瀏覽代碼

Update uat zhaoshang datecer

wwd 3 年之前
父節點
當前提交
47f323f171

+ 1 - 1
README.md

@@ -1,4 +1,4 @@
-# ztk-encryptdacodedata
+# ztk-encryptdecodedata
 
 ## **概述**
 

+ 13 - 0
pom.xml

@@ -194,6 +194,19 @@
                 </configuration>
             </plugin>
         </plugins>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+                <!-- src/main/resources下的指定资源放行 -->
+                <includes>
+                    <include>**/*.properties</include>
+                    <include>**/*.yml</include>
+                    <include>**/*.xml</include>
+                    <include>**/*.jks</include>
+                </includes>
+                <filtering>false</filtering>
+            </resource>
+        </resources>
     </build>
     <repositories>
         <repository>

+ 30 - 3
src/main/java/com/persagy/ztkencryptdecodedata/kafka/CloudKafkaConsumerFromEdgeTopic.java

@@ -8,6 +8,7 @@ import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.consumer.ConsumerRecords;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.common.TopicPartition;
+import org.apache.kafka.common.config.SslConfigs;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -56,9 +57,25 @@ public class CloudKafkaConsumerFromEdgeTopic implements CommandLineRunner {
     private  String keyDeserializer;
     @Value("${spring.kafka.consumer.value-deserializer}")
     private String valueDeserializer;
-
+    @Value("${spring.kafka.producer.security.protocol}")
+    private  String securtyprotocol;
+    @Value("${spring.kafka.producer.ssl.endpoint.identification.algorithm}")
+    private  String algorithm;
+    @Value("${spring.profiles.active}")
+    private String active;
     @Value("${spring.location}")
     private String location;
+    @Value("${spring.kafka.consumer.ssl.trust-store-location}")
+    private String truststorelocaltion;
+    @Value("${spring.kafka.consumer.ssl.trust-store-password}")
+    private String truststorepassword;
+    @Value("${spring.kafka.consumer.ssl.key-store-location}")
+    private String keystorelocaltion;
+    @Value("${spring.kafka.consumer.ssl.key-store-password}")
+    private String keystorepassword;
+    @Value("${spring.kafka.consumer.ssl.key-password}")
+    private String keypassword;
+
 
 
 
@@ -70,13 +87,23 @@ public class CloudKafkaConsumerFromEdgeTopic implements CommandLineRunner {
     @Override
     public void run(String... args) throws Exception {
         int i=0;
-        System.out.println("Colud_Subscribe_Edge_Update_messages");
+       // System.out.println("Colud_Subscribe_Edge_Update_messages");
         Properties props = new Properties();
         props.put("bootstrap.servers", consumerBootstrap);
-        props.put("group.id", groupId);
+        props.put("group.id", location+groupId+active);
         props.put("auto.offset.reset", autooffet);
         props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
         props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+        props.put("security.protocol", securtyprotocol);
+        props.put("ssl.endpoint.identification.algorithm",algorithm);
+        // properties.put("ssl.truststore.location", ResourceUtils.getFile(truststorelocaltion).getPath());
+        // properties.put("ssl.truststore.password", keystorepassword);
+        props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG,keystorelocaltion);
+        props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG,keystorepassword);
+        props.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG,keypassword);
+        props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,truststorelocaltion);
+        props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG,truststorepassword);
+
 
         //metadata.max.age.ms 1000
         KafkaConsumer<String, String> consumer =  new KafkaConsumer<>(props);

+ 37 - 2
src/main/java/com/persagy/ztkencryptdecodedata/kafka/EdgeKafkaProducer.java

@@ -2,10 +2,12 @@ package com.persagy.ztkencryptdecodedata.kafka;
 
 import com.alibaba.fastjson.JSONObject;
 import com.persagy.ztkencryptdecodedata.dataSafety.EncryptInputMessageService;
+import lombok.SneakyThrows;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kafka.common.KafkaException;
+import org.apache.kafka.common.config.SslConfigs;
 import org.apache.kafka.common.errors.AuthorizationException;
 import org.apache.kafka.common.errors.OutOfOrderSequenceException;
 import org.apache.kafka.common.errors.ProducerFencedException;
@@ -14,6 +16,7 @@ import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.stereotype.Service;
+import org.springframework.util.ResourceUtils;
 
 import java.util.Properties;
 import java.util.concurrent.*;
@@ -46,12 +49,31 @@ public class EdgeKafkaProducer  {
     private String keySerializer;
     @Value("${spring.kafka.producer.value-serializer}")
     private String ValueSerializer;
-
+    @Value("${spring.kafka.producer.security.protocol}")
+    private  String securtyprotocol;
+    @Value("${spring.kafka.producer.ssl.endpoint.identification.algorithm}")
+    private  String algorithm;
+    @Value("${spring.profiles.active}")
+    private String active;
     @Value("${spring.location}")
     private String location;
+    @Value("${spring.kafka.producer.ssl.trust-store-location}")
+    private String truststorelocaltion;
+    @Value("${spring.kafka.producer.ssl.key-store-password}")
+    private String keystorepassword;
+
+    @Value("${spring.kafka.consumer.ssl.key-store-location}")
+    private String keytorelocaltion;
+    @Value("${spring.kafka.producer.ssl.trust-store-password}")
+    private String truststorepassword;
+    @Value("${spring.kafka.producer.ssl.key-password}")
+    private String keypassword;
+
+
    // private final  static  ExecutorService executorService = new ThreadPoolExecutor(10, 20, 60, TimeUnit.MILLISECONDS,new LinkedBlockingQueue<>());
    private final static ExecutorService executorService = new ThreadPoolExecutor(Runtime.getRuntime().availableProcessors(),Runtime.getRuntime().availableProcessors() * 2, 60, TimeUnit.SECONDS,new LinkedBlockingQueue(50000));
     private  int mm=0;
+    @SneakyThrows
     public Future ProducerSend(String moudle, Object object) {
         properties = new Properties();
         // 连接的 kafka 集群地址
@@ -62,11 +84,24 @@ public class EdgeKafkaProducer  {
         properties.put("max.request.size", 1048576); //信息发送最大值1MB
         //batch.size当批量的数据大小达到设定值后,就会立即发送,不顾下面的linger.ms
         properties.put("retry.backoff.ms", 500);//设定重试时间间隔避免无效的频繁重试
-        properties.put("client.id", clientID+"_"+String.valueOf(mm));//设定重试时间间隔避免无效的频繁重试
+        properties.put("client.id", location+clientID+active+"_"+String.valueOf(mm));//设定重试时间间隔避免无效的频繁重试
         properties.put("linger.ms", 5);//延迟1ms发送,这项设置将通过增加小的延迟来完成--即,不是立即发送一条记录,producer将会等待给定的延迟时间以允许其他消息记录发送,这些消息记录可以批量处理
         properties.put("buffer.memory", bfferMemory);//producer可以用来缓存数据的内存大小。
         properties.put("key.serializer", "org.apache.kafka.common.serialization.IntegerSerializer");
         properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+        properties.put("security.protocol", securtyprotocol);
+        properties.put("ssl.endpoint.identification.algorithm",algorithm);
+       // properties.put("ssl.truststore.location", ResourceUtils.getFile(truststorelocaltion).getPath());
+       // properties.put("ssl.truststore.password", keystorepassword);
+        properties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG,keytorelocaltion);
+        properties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG,keystorepassword);
+        properties.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG,keypassword);
+        properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,truststorelocaltion);
+        properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG,truststorepassword);
+
+
+
+
         // producer = new KafkaProducer<String, String>(properties);
         producer = new KafkaProducer<String, String>(properties);
         String Topic = location + "_" + moudle;

+ 2 - 2
src/main/resources/application-prod.yml

@@ -17,12 +17,12 @@ spring:
       batch-size: 16384
       buffer-memory: 33554432
       acks: all
-      client-id: edge_clent_dev
+      client-id: _producer_
       key-serializer: org.apache.kafka.common.serialization.IntegerSerializer
       value-serializer: org.apache.kafka.common.serialization.StringSerializer
     consumer:
       bootstrap-servers: 192.168.2.128:9092,192.168.2.128:9093,192.168.2.128:9094
-      group-id: cloud_consumer_dev
+      group-id: _consumer_
       auto-offset-reset: earliest
       key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
       value-deserializer: org.apache.kafka.common.serialization.StringDeserializer

文件差異過大導致無法顯示
+ 29 - 2
src/main/resources/application-uat.yml


+ 1 - 1
src/main/resources/application.yml

@@ -1,7 +1,7 @@
 spring:
   profiles:
     active: dev
-  location: Cloud  # Edge 边缘测 or Cloud 云端
+  location: Edge  # Edge 边缘测 or Cloud 云端
 
 
 

二進制
src/main/resources/uatclient.keystore.jks


二進制
src/main/resources/uatclient.truststore.jks