wwd 3 years ago
parent
commit
69c44e1cb6
49 changed files with 3548 additions and 103 deletions
  1. 30 103
      .gitignore
  2. 117 0
      .mvn/wrapper/MavenWrapperDownloader.java
  3. 2 0
      .mvn/wrapper/maven-wrapper.properties
  4. 310 0
      mvnw
  5. 182 0
      mvnw.cmd
  6. 319 0
      pom.xml
  7. 35 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/DirectoryMonitorService.java
  8. 26 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/SystemConfig.java
  9. 34 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/ZtkEdgeclouddatasecurityApplication.java
  10. 347 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/alibabaiot/alibabaiotgateway.java
  11. 50 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/config/AsyncConfig.java
  12. 20 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/config/EdgeToCloudServiceInterface.java
  13. 32 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/config/FactoryForStrategy.java
  14. 42 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/config/SystemProperties.java
  15. 34 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/config/YmlPropertySourceFactory.java
  16. 45 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/controller/TestEnDeServiceController.java
  17. 21 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/datasafety/DataOptRSA.java
  18. 5 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/datasafety/DecrypInputMessageInterface.java
  19. 61 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/datasafety/DecryptInputMessageService.java
  20. 5 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/datasafety/EncryptInputMessageInterface.java
  21. 73 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/datasafety/EncryptInputMessageService.java
  22. 34 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/entity/ChattingUser.java
  23. 95 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/entity/NettyMessage.java
  24. 58 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/entity/NettyMessageQueue.java
  25. 28 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/entity/alerm/CommonConst.java
  26. 50 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/kafka/CloudKafkaConsumer.java
  27. 67 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/kafka/CloudKafkaProducer.java
  28. 192 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/kafka/EdgeKafkaProducer.java
  29. 58 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/kafka/EdgeKakfaConsumer.java
  30. 47 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/kafkatask/EdgeSendToKakfaTaskJob.java
  31. 5 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/Command.java
  32. 249 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/MsgHandler.java
  33. 30 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/MyChannelInitializer.java
  34. 30 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/ObjDecoder.java
  35. 18 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/ObjEncoder.java
  36. 10 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/Packet.java
  37. 17 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/PacketClazzMap.java
  38. 37 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/cloud/CenterChannelInitializer.java
  39. 141 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/cloud/CenterClientHandler.java
  40. 110 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/cloud/NettyClient.java
  41. 118 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/edge/NettyServer.java
  42. 69 0
      src/main/java/com/persagy/ztkedgeclouddatasecurity/until/SerializationUtil.java
  43. 134 0
      src/main/resources/application-dev.yml
  44. 71 0
      src/main/resources/application-prod.yml
  45. 71 0
      src/main/resources/application-uat.yml
  46. 6 0
      src/main/resources/application.yml
  47. BIN
      src/main/resources/uatclient.keystore.jks
  48. BIN
      src/main/resources/uatclient.truststore.jks
  49. 13 0
      src/test/java/com/persagy/ztkedgeclouddatasecurity/ZtkEdgeclouddatasecurityApplicationTests.java

+ 30 - 103
.gitignore

@@ -1,106 +1,33 @@
-# ---> macOS
-.DS_Store
-.AppleDouble
-.LSOverride
-
-# Icon must end with two \r
-Icon
-
-
-# Thumbnails
-._*
-
-# Files that might appear in the root of a volume
-.DocumentRevisions-V100
-.fseventsd
-.Spotlight-V100
-.TemporaryItems
-.Trashes
-.VolumeIcon.icns
-
-# Directories potentially created on remote AFP share
-.AppleDB
-.AppleDesktop
-Network Trash Folder
-Temporary Items
-.apdisk
-
-# ---> Windows
-# Windows image file caches
-Thumbs.db
-ehthumbs.db
-
-# Folder config file
-Desktop.ini
-
-# Recycle Bin used on file shares
-$RECYCLE.BIN/
-
-# Windows Installer files
-*.cab
-*.msi
-*.msm
-*.msp
-
-# Windows shortcuts
-*.lnk
-
-# ---> JetBrains
-# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio
-
+HELP.md
+target/
+.mvn/wrapper/maven-wrapper.jar
+**/src/main/**/target/
+**/src/test/**/target/
+
+### STS ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
+
+### IntelliJ IDEA ###
+.idea
+*.iws
 *.iml
-
-## Directory-based project format:
-.idea/
-# if you remove the above rule, at least ignore the following:
-
-# User-specific stuff:
-# .idea/workspace.xml
-# .idea/tasks.xml
-# .idea/dictionaries
-
-# Sensitive or high-churn files:
-# .idea/dataSources.ids
-# .idea/dataSources.xml
-# .idea/sqlDataSources.xml
-# .idea/dynamic.xml
-# .idea/uiDesigner.xml
-
-# Gradle:
-# .idea/gradle.xml
-# .idea/libraries
-
-# Mongo Explorer plugin:
-# .idea/mongoSettings.xml
-
-## File-based project format:
 *.ipr
-*.iws
-
-## Plugin-specific files:
-
-# IntelliJ
-/out/
-
-# mpeltonen/sbt-idea plugin
-.idea_modules/
-
-# JIRA plugin
-atlassian-ide-plugin.xml
-
-# Crashlytics plugin (for Android Studio and IntelliJ)
-com_crashlytics_export_strings.xml
-crashlytics.properties
-crashlytics-build.properties
-
-# ---> Maven
-target/
-pom.xml.tag
-pom.xml.releaseBackup
-pom.xml.versionsBackup
-pom.xml.next
-release.properties
-dependency-reduced-pom.xml
-buildNumber.properties
-.mvn/timing.properties
 
+### NetBeans ###
+/nbproject/private/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/
+build/
+!**/src/main/**/build/
+!**/src/test/**/build/
+
+### VS Code ###
+.vscode/

+ 117 - 0
.mvn/wrapper/MavenWrapperDownloader.java

@@ -0,0 +1,117 @@
+/*
+ * Copyright 2007-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.net.*;
+import java.io.*;
+import java.nio.channels.*;
+import java.util.Properties;
+
+public class MavenWrapperDownloader {
+
+    private static final String WRAPPER_VERSION = "0.5.6";
+    /**
+     * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
+     */
+    private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+        + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
+
+    /**
+     * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
+     * use instead of the default one.
+     */
+    private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
+            ".mvn/wrapper/maven-wrapper.properties";
+
+    /**
+     * Path where the maven-wrapper.jar will be saved to.
+     */
+    private static final String MAVEN_WRAPPER_JAR_PATH =
+            ".mvn/wrapper/maven-wrapper.jar";
+
+    /**
+     * Name of the property which should be used to override the default download url for the wrapper.
+     */
+    private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
+
+    public static void main(String args[]) {
+        System.out.println("- Downloader started");
+        File baseDirectory = new File(args[0]);
+        System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
+
+        // If the maven-wrapper.properties exists, read it and check if it contains a custom
+        // wrapperUrl parameter.
+        File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
+        String url = DEFAULT_DOWNLOAD_URL;
+        if(mavenWrapperPropertyFile.exists()) {
+            FileInputStream mavenWrapperPropertyFileInputStream = null;
+            try {
+                mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
+                Properties mavenWrapperProperties = new Properties();
+                mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
+                url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
+            } catch (IOException e) {
+                System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
+            } finally {
+                try {
+                    if(mavenWrapperPropertyFileInputStream != null) {
+                        mavenWrapperPropertyFileInputStream.close();
+                    }
+                } catch (IOException e) {
+                    // Ignore ...
+                }
+            }
+        }
+        System.out.println("- Downloading from: " + url);
+
+        File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
+        if(!outputFile.getParentFile().exists()) {
+            if(!outputFile.getParentFile().mkdirs()) {
+                System.out.println(
+                        "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
+            }
+        }
+        System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
+        try {
+            downloadFileFromURL(url, outputFile);
+            System.out.println("Done");
+            System.exit(0);
+        } catch (Throwable e) {
+            System.out.println("- Error downloading");
+            e.printStackTrace();
+            System.exit(1);
+        }
+    }
+
+    private static void downloadFileFromURL(String urlString, File destination) throws Exception {
+        if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
+            String username = System.getenv("MVNW_USERNAME");
+            char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
+            Authenticator.setDefault(new Authenticator() {
+                @Override
+                protected PasswordAuthentication getPasswordAuthentication() {
+                    return new PasswordAuthentication(username, password);
+                }
+            });
+        }
+        URL website = new URL(urlString);
+        ReadableByteChannel rbc;
+        rbc = Channels.newChannel(website.openStream());
+        FileOutputStream fos = new FileOutputStream(destination);
+        fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
+        fos.close();
+        rbc.close();
+    }
+
+}

+ 2 - 0
.mvn/wrapper/maven-wrapper.properties

@@ -0,0 +1,2 @@
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.1/apache-maven-3.8.1-bin.zip
+wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar

+ 310 - 0
mvnw

@@ -0,0 +1,310 @@
+#!/bin/sh
+# ----------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ----------------------------------------------------------------------------
+
+# ----------------------------------------------------------------------------
+# Maven Start Up Batch script
+#
+# Required ENV vars:
+# ------------------
+#   JAVA_HOME - location of a JDK home dir
+#
+# Optional ENV vars
+# -----------------
+#   M2_HOME - location of maven2's installed home dir
+#   MAVEN_OPTS - parameters passed to the Java VM when running Maven
+#     e.g. to debug Maven itself, use
+#       set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+#   MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+# ----------------------------------------------------------------------------
+
+if [ -z "$MAVEN_SKIP_RC" ] ; then
+
+  if [ -f /etc/mavenrc ] ; then
+    . /etc/mavenrc
+  fi
+
+  if [ -f "$HOME/.mavenrc" ] ; then
+    . "$HOME/.mavenrc"
+  fi
+
+fi
+
+# OS specific support.  $var _must_ be set to either true or false.
+cygwin=false;
+darwin=false;
+mingw=false
+case "`uname`" in
+  CYGWIN*) cygwin=true ;;
+  MINGW*) mingw=true;;
+  Darwin*) darwin=true
+    # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
+    # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
+    if [ -z "$JAVA_HOME" ]; then
+      if [ -x "/usr/libexec/java_home" ]; then
+        export JAVA_HOME="`/usr/libexec/java_home`"
+      else
+        export JAVA_HOME="/Library/Java/Home"
+      fi
+    fi
+    ;;
+esac
+
+if [ -z "$JAVA_HOME" ] ; then
+  if [ -r /etc/gentoo-release ] ; then
+    JAVA_HOME=`java-config --jre-home`
+  fi
+fi
+
+if [ -z "$M2_HOME" ] ; then
+  ## resolve links - $0 may be a link to maven's home
+  PRG="$0"
+
+  # need this for relative symlinks
+  while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+      PRG="$link"
+    else
+      PRG="`dirname "$PRG"`/$link"
+    fi
+  done
+
+  saveddir=`pwd`
+
+  M2_HOME=`dirname "$PRG"`/..
+
+  # make it fully qualified
+  M2_HOME=`cd "$M2_HOME" && pwd`
+
+  cd "$saveddir"
+  # echo Using m2 at $M2_HOME
+fi
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched
+if $cygwin ; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME=`cygpath --unix "$M2_HOME"`
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+  [ -n "$CLASSPATH" ] &&
+    CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
+fi
+
+# For Mingw, ensure paths are in UNIX format before anything is touched
+if $mingw ; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME="`(cd "$M2_HOME"; pwd)`"
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
+fi
+
+if [ -z "$JAVA_HOME" ]; then
+  javaExecutable="`which javac`"
+  if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
+    # readlink(1) is not available as standard on Solaris 10.
+    readLink=`which readlink`
+    if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
+      if $darwin ; then
+        javaHome="`dirname \"$javaExecutable\"`"
+        javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
+      else
+        javaExecutable="`readlink -f \"$javaExecutable\"`"
+      fi
+      javaHome="`dirname \"$javaExecutable\"`"
+      javaHome=`expr "$javaHome" : '\(.*\)/bin'`
+      JAVA_HOME="$javaHome"
+      export JAVA_HOME
+    fi
+  fi
+fi
+
+if [ -z "$JAVACMD" ] ; then
+  if [ -n "$JAVA_HOME"  ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+      # IBM's JDK on AIX uses strange locations for the executables
+      JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+      JAVACMD="$JAVA_HOME/bin/java"
+    fi
+  else
+    JAVACMD="`which java`"
+  fi
+fi
+
+if [ ! -x "$JAVACMD" ] ; then
+  echo "Error: JAVA_HOME is not defined correctly." >&2
+  echo "  We cannot execute $JAVACMD" >&2
+  exit 1
+fi
+
+if [ -z "$JAVA_HOME" ] ; then
+  echo "Warning: JAVA_HOME environment variable is not set."
+fi
+
+CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
+
+# traverses directory structure from process work directory to filesystem root
+# first directory with .mvn subdirectory is considered project base directory
+find_maven_basedir() {
+
+  if [ -z "$1" ]
+  then
+    echo "Path not specified to find_maven_basedir"
+    return 1
+  fi
+
+  basedir="$1"
+  wdir="$1"
+  while [ "$wdir" != '/' ] ; do
+    if [ -d "$wdir"/.mvn ] ; then
+      basedir=$wdir
+      break
+    fi
+    # workaround for JBEAP-8937 (on Solaris 10/Sparc)
+    if [ -d "${wdir}" ]; then
+      wdir=`cd "$wdir/.."; pwd`
+    fi
+    # end of workaround
+  done
+  echo "${basedir}"
+}
+
+# concatenates all lines of a file
+concat_lines() {
+  if [ -f "$1" ]; then
+    echo "$(tr -s '\n' ' ' < "$1")"
+  fi
+}
+
+BASE_DIR=`find_maven_basedir "$(pwd)"`
+if [ -z "$BASE_DIR" ]; then
+  exit 1;
+fi
+
+##########################################################################################
+# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+# This allows using the maven wrapper in projects that prohibit checking in binary data.
+##########################################################################################
+if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Found .mvn/wrapper/maven-wrapper.jar"
+    fi
+else
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
+    fi
+    if [ -n "$MVNW_REPOURL" ]; then
+      jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+    else
+      jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+    fi
+    while IFS="=" read key value; do
+      case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
+      esac
+    done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Downloading from: $jarUrl"
+    fi
+    wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
+    if $cygwin; then
+      wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
+    fi
+
+    if command -v wget > /dev/null; then
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Found wget ... using wget"
+        fi
+        if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
+            wget "$jarUrl" -O "$wrapperJarPath"
+        else
+            wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath"
+        fi
+    elif command -v curl > /dev/null; then
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Found curl ... using curl"
+        fi
+        if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
+            curl -o "$wrapperJarPath" "$jarUrl" -f
+        else
+            curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
+        fi
+
+    else
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Falling back to using Java to download"
+        fi
+        javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
+        # For Cygwin, switch paths to Windows format before running javac
+        if $cygwin; then
+          javaClass=`cygpath --path --windows "$javaClass"`
+        fi
+        if [ -e "$javaClass" ]; then
+            if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
+                if [ "$MVNW_VERBOSE" = true ]; then
+                  echo " - Compiling MavenWrapperDownloader.java ..."
+                fi
+                # Compiling the Java class
+                ("$JAVA_HOME/bin/javac" "$javaClass")
+            fi
+            if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
+                # Running the downloader
+                if [ "$MVNW_VERBOSE" = true ]; then
+                  echo " - Running MavenWrapperDownloader.java ..."
+                fi
+                ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
+            fi
+        fi
+    fi
+fi
+##########################################################################################
+# End of extension
+##########################################################################################
+
+export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
+if [ "$MVNW_VERBOSE" = true ]; then
+  echo $MAVEN_PROJECTBASEDIR
+fi
+MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME=`cygpath --path --windows "$M2_HOME"`
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
+  [ -n "$CLASSPATH" ] &&
+    CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
+  [ -n "$MAVEN_PROJECTBASEDIR" ] &&
+    MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
+fi
+
+# Provide a "standardized" way to retrieve the CLI args that will
+# work with both Windows and non-Windows executions.
+MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
+export MAVEN_CMD_LINE_ARGS
+
+WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+exec "$JAVACMD" \
+  $MAVEN_OPTS \
+  -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
+  "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
+  ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"

+ 182 - 0
mvnw.cmd

@@ -0,0 +1,182 @@
+@REM ----------------------------------------------------------------------------
+@REM Licensed to the Apache Software Foundation (ASF) under one
+@REM or more contributor license agreements.  See the NOTICE file
+@REM distributed with this work for additional information
+@REM regarding copyright ownership.  The ASF licenses this file
+@REM to you under the Apache License, Version 2.0 (the
+@REM "License"); you may not use this file except in compliance
+@REM with the License.  You may obtain a copy of the License at
+@REM
+@REM    https://www.apache.org/licenses/LICENSE-2.0
+@REM
+@REM Unless required by applicable law or agreed to in writing,
+@REM software distributed under the License is distributed on an
+@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+@REM KIND, either express or implied.  See the License for the
+@REM specific language governing permissions and limitations
+@REM under the License.
+@REM ----------------------------------------------------------------------------
+
+@REM ----------------------------------------------------------------------------
+@REM Maven Start Up Batch script
+@REM
+@REM Required ENV vars:
+@REM JAVA_HOME - location of a JDK home dir
+@REM
+@REM Optional ENV vars
+@REM M2_HOME - location of maven2's installed home dir
+@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
+@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
+@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
+@REM     e.g. to debug Maven itself, use
+@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+@REM ----------------------------------------------------------------------------
+
+@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
+@echo off
+@REM set title of command window
+title %0
+@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
+@if "%MAVEN_BATCH_ECHO%" == "on"  echo %MAVEN_BATCH_ECHO%
+
+@REM set %HOME% to equivalent of $HOME
+if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
+
+@REM Execute a user defined script before this one
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
+@REM check for pre script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
+if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
+:skipRcPre
+
+@setlocal
+
+set ERROR_CODE=0
+
+@REM To isolate internal variables from possible post scripts, we use another setlocal
+@setlocal
+
+@REM ==== START VALIDATION ====
+if not "%JAVA_HOME%" == "" goto OkJHome
+
+echo.
+echo Error: JAVA_HOME not found in your environment. >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+:OkJHome
+if exist "%JAVA_HOME%\bin\java.exe" goto init
+
+echo.
+echo Error: JAVA_HOME is set to an invalid directory. >&2
+echo JAVA_HOME = "%JAVA_HOME%" >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+@REM ==== END VALIDATION ====
+
+:init
+
+@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
+@REM Fallback to current working directory if not found.
+
+set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
+IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
+
+set EXEC_DIR=%CD%
+set WDIR=%EXEC_DIR%
+:findBaseDir
+IF EXIST "%WDIR%"\.mvn goto baseDirFound
+cd ..
+IF "%WDIR%"=="%CD%" goto baseDirNotFound
+set WDIR=%CD%
+goto findBaseDir
+
+:baseDirFound
+set MAVEN_PROJECTBASEDIR=%WDIR%
+cd "%EXEC_DIR%"
+goto endDetectBaseDir
+
+:baseDirNotFound
+set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
+cd "%EXEC_DIR%"
+
+:endDetectBaseDir
+
+IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
+
+@setlocal EnableExtensions EnableDelayedExpansion
+for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
+@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
+
+:endReadAdditionalConfig
+
+SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
+set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
+set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+
+FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
+    IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
+)
+
+@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
+if exist %WRAPPER_JAR% (
+    if "%MVNW_VERBOSE%" == "true" (
+        echo Found %WRAPPER_JAR%
+    )
+) else (
+    if not "%MVNW_REPOURL%" == "" (
+        SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+    )
+    if "%MVNW_VERBOSE%" == "true" (
+        echo Couldn't find %WRAPPER_JAR%, downloading it ...
+        echo Downloading from: %DOWNLOAD_URL%
+    )
+
+    powershell -Command "&{"^
+		"$webclient = new-object System.Net.WebClient;"^
+		"if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
+		"$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
+		"}"^
+		"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
+		"}"
+    if "%MVNW_VERBOSE%" == "true" (
+        echo Finished downloading %WRAPPER_JAR%
+    )
+)
+@REM End of extension
+
+@REM Provide a "standardized" way to retrieve the CLI args that will
+@REM work with both Windows and non-Windows executions.
+set MAVEN_CMD_LINE_ARGS=%*
+
+%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
+if ERRORLEVEL 1 goto error
+goto end
+
+:error
+set ERROR_CODE=1
+
+:end
+@endlocal & set ERROR_CODE=%ERROR_CODE%
+
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
+@REM check for post script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
+if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
+:skipRcPost
+
+@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
+if "%MAVEN_BATCH_PAUSE%" == "on" pause
+
+if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
+
+exit /B %ERROR_CODE%

+ 319 - 0
pom.xml

@@ -0,0 +1,319 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>org.springframework.boot</groupId>
+		<artifactId>spring-boot-starter-parent</artifactId>
+		<version>2.5.3</version>
+		<relativePath/> <!-- lookup parent from repository -->
+	</parent>
+	<groupId>com.persagy</groupId>
+	<artifactId>ztk-edgeclouddatasecurity</artifactId>
+	<version>0.0.1-SNAPSHOT</version>
+	<name>ztk-edgeclouddatasecurity</name>
+	<description>Cmft project data encryption service interface processing</description>
+	<properties>
+		<java.version>1.8</java.version>
+		<protostuff.version>1.0.10</protostuff.version>
+		<objenesis.version>2.4</objenesis.version>
+	</properties>
+	<dependencies>
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter-test</artifactId>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter</artifactId>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter-web</artifactId>
+			<!-- 移除嵌入式tomcat插件 -->
+			<!--  <exclusions>
+ 				<exclusion>
+					 <groupId>org.springframework.boot</groupId>
+ 					<artifactId>spring-boot-starter-tomcat</artifactId>
+ 				</exclusion>
+			</exclusions>  -->
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter-integration</artifactId>
+			<version>2.5.2</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.springframework.boot</groupId>
+					<artifactId>spring-boot-starter-logging</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.integration</groupId>
+			<artifactId>spring-integration-ip</artifactId>
+			<version>5.5.1</version>
+		</dependency>
+		<dependency>
+			<groupId>org.projectlombok</groupId>
+			<artifactId>lombok</artifactId>
+			<optional>true</optional>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter-test</artifactId>
+			<scope>test</scope>
+			<exclusions>
+				<exclusion>
+					<groupId>org.junit.vintage</groupId>
+					<artifactId>junit-vintage-engine</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter-aop</artifactId>
+			<version>2.5.2</version>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.integration</groupId>
+			<artifactId>spring-integration-core</artifactId>
+			<version>5.5.1</version>
+		</dependency>
+
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-configuration-processor</artifactId>
+			<optional>true</optional>
+		</dependency>
+		<dependency>
+			<groupId>org.yaml</groupId>
+			<artifactId>snakeyaml</artifactId>
+		</dependency>
+		<dependency>
+			<groupId>com.github.ulisesbocchio</groupId>
+			<artifactId>jasypt-spring-boot-starter</artifactId>
+			<version>3.0.3</version>
+		</dependency>
+<!--		<dependency>-->
+<!--			<groupId>org.apache.kafka</groupId>-->
+<!--			<artifactId>kafka-clients</artifactId>-->
+<!--			<version>0.11.0.0</version>-->
+<!--		</dependency>-->
+		<dependency>
+			<groupId>org.springframework.kafka</groupId>
+			<artifactId>spring-kafka</artifactId>
+<!--			<version>2.3.7.RELEASE</version>-->
+		</dependency>
+<!--		<dependency>-->
+<!--			<groupId>org.apache.kafka</groupId>-->
+<!--			<artifactId>kafka-streams</artifactId>-->
+<!--			<version>0.11.0.0</version>-->
+<!--		</dependency>-->
+		<dependency>
+			<groupId>com.alibaba</groupId>
+			<artifactId>fastjson</artifactId>
+			<version>1.2.54</version>
+		</dependency>
+		<dependency>
+			<groupId>commons-net</groupId>
+			<artifactId>commons-net</artifactId>
+			<version>3.3</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.commons</groupId>
+			<artifactId>commons-lang3</artifactId>
+			<version>3.7</version>
+		</dependency>
+		<dependency>
+			<groupId>com.aliyun.alink.linksdk</groupId>
+			<artifactId>iot-linkkit-java</artifactId>
+			<version>1.2.0.1</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>com.aliyun.alink.linksdk</groupId>
+			<artifactId>public-cmp-java</artifactId>
+			<version>1.3.6</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>com.google.code.gson</groupId>
+			<artifactId>gson</artifactId>
+			<version>2.8.1</version>
+			<scope>compile</scope>
+		</dependency>
+		<!-- https://mvnrepository.com/artifact/cn.shuibo/rsa-encrypt-body-spring-boot -->
+		<dependency>
+			<groupId>cn.shuibo</groupId>
+			<artifactId>rsa-encrypt-body-spring-boot</artifactId>
+			<version>1.0.1.RELEASE</version>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.13.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.httpcomponents</groupId>
+			<artifactId>httpclient</artifactId>
+			<version>4.5.13</version>
+		</dependency>
+		<dependency>
+			<groupId>io.netty</groupId>
+			<artifactId>netty-all</artifactId>
+			<version>4.1.43.Final</version>
+		</dependency>
+		<!-- amqp 1.0 qpid client -->
+		<dependency>
+			<groupId>org.apache.qpid</groupId>
+			<artifactId>qpid-jms-client</artifactId>
+			<version>0.56.0</version>
+
+		</dependency>
+		<!--alibaba http2 -->
+		<dependency>
+			<groupId>com.aliyun.openservices</groupId>
+			<artifactId>iot-client-message</artifactId>
+			<version>1.1.3</version>
+		</dependency>
+		<dependency>
+			<groupId>com.aliyun</groupId>
+			<artifactId>aliyun-java-sdk-core</artifactId>
+			<version>3.7.1</version>
+		</dependency>
+		<!-- util for base64-->
+		<dependency>
+			<groupId>commons-codec</groupId>
+			<artifactId>commons-codec</artifactId>
+			<version>1.10</version>
+		</dependency>
+		<dependency>
+			<groupId>com.google.guava</groupId>
+			<artifactId>guava</artifactId>
+			<version>26.0-jre</version>
+		</dependency>
+		<!-- Protostuff -->
+		<dependency>
+			<groupId>com.dyuproject.protostuff</groupId>
+			<artifactId>protostuff-core</artifactId>
+			<version>${protostuff.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>com.dyuproject.protostuff</groupId>
+			<artifactId>protostuff-runtime</artifactId>
+			<version>${protostuff.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.objenesis</groupId>
+			<artifactId>objenesis</artifactId>
+			<version>${objenesis.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>cn.hutool</groupId>
+			<artifactId>hutool-all</artifactId>
+			<version>5.7.9</version>
+		</dependency>
+	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-resources-plugin</artifactId>
+				<version>3.1.0</version>
+				<executions>
+					<execution>
+						<id>copy-resources</id>
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<resources>
+								<resource>
+									<directory>src/main/resources/</directory>
+									<includes>
+										<include>**/*.properties</include>
+										<include>**/*.yml</include>
+										<include>**/*.xml</include>
+										<include>**/*.jks</include>
+									</includes>
+								</resource>
+							</resources>
+							<outputDirectory>${project.build.directory}/config</outputDirectory>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.springframework.boot</groupId>
+				<artifactId>spring-boot-maven-plugin</artifactId>
+				<configuration>
+					<!-- 一定要配置否则打jar的时候会说找不到主类,根据具体项目来-->
+					<mainClass>com.persagy.ztkedgeclouddatasecurity.ZtkEdgeclouddatasecurityApplication</mainClass>
+					<executable>true</executable>
+					<fork>true</fork>
+					<excludes>
+						<exclude>
+							<groupId>org.projectlombok</groupId>
+							<artifactId>lombok</artifactId>
+						</exclude>
+					</excludes>
+				</configuration>
+			</plugin>
+		</plugins>
+		<resources>
+			<resource>
+				<directory>src/main/resources/</directory>
+				<filtering>true</filtering>
+				<!-- src/main/resources下的指定资源放行 -->
+				<includes>
+					<include>**/*.properties</include>
+					<include>**/*.yml</include>
+					<include>**/*.xml</include>
+					<include>**/*.jks</include>
+				</includes>
+
+			</resource>
+		</resources>
+	</build>
+	<repositories>
+		<repository>
+			<id>alimaven</id>
+			<name>aliyun maven</name>
+			<url>http://maven.aliyun.com/nexus/content/groups/public/</url>
+		</repository>
+		<repository>
+			<id>spring-snapshots</id>
+			<name>Spring Snapshots</name>
+			<url>https://repo1.maven.org/maven2/</url>
+			<snapshots>
+				<enabled>true</enabled>
+			</snapshots>
+		</repository>
+	</repositories>
+	<pluginRepositories>
+		<pluginRepository>
+			<id>spring-milestones</id>
+			<name>Spring Milestones</name>
+			<url>https://repo1.maven.org/maven2/</url>
+		</pluginRepository>
+		<pluginRepository>
+			<id>spring-snapshots</id>
+			<name>Spring Snapshots</name>
+			<url>https://repo1.maven.org/maven2/</url>
+			<snapshots>
+				<enabled>true</enabled>
+			</snapshots>
+		</pluginRepository>
+	</pluginRepositories>
+
+</project>

+ 35 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/DirectoryMonitorService.java

@@ -0,0 +1,35 @@
+package com.persagy.ztkedgeclouddatasecurity;
+
+//@Service
+//public class DirectoryMonitorService {
+//
+//    private static final Logger logger = (Logger) LogManager.getLogger(DirectoryMonitorService.class);
+//
+//    private static BlockingQueue messageQueue = null;
+//    public  ChattingUser chattingUser;
+//
+//    @Autowired
+//    private KafkaProducer watchService;
+//
+//    @Async
+//    public void monitorSourceDirectoty() {
+//        while (true) {
+//            try {
+//                messageQueue=chattingUser.getMessageTcpSendQueue();
+//                if (messageQueue !=null){
+//
+//
+//                      watchService.producerMsg(messageQueue);
+//
+//                }
+//
+//            } catch (ClosedWatchServiceException e) {
+//                logger.error("Exception occured while polling from source file", e);
+//                return;
+//            }
+//            // process the WatchEvents
+//
+//
+//        }
+//    }
+//}

+ 26 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/SystemConfig.java

@@ -0,0 +1,26 @@
+package com.persagy.ztkedgeclouddatasecurity;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.ApplicationArguments;
+import org.springframework.boot.ApplicationRunner;
+import org.springframework.core.annotation.Order;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Component;
+
+@Component
+@Order(value = 1)
+public class SystemConfig  implements ApplicationRunner {
+
+    @Autowired // 注入到容器中
+    private Environment environment;
+
+    @Value("${spring.location}")
+    private  String profile;
+
+
+    @Override
+    public void run(ApplicationArguments args) throws Exception {
+        System.out.println("Environment Properties Config >>> " + environment.getProperty("spring.location"));
+    }
+}

+ 34 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/ZtkEdgeclouddatasecurityApplication.java

@@ -0,0 +1,34 @@
+package com.persagy.ztkedgeclouddatasecurity;
+
+import cn.shuibo.annotation.EnableSecurity;
+import com.persagy.ztkedgeclouddatasecurity.config.YmlPropertySourceFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.ApplicationRunner;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.web.servlet.ServletComponentScan;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.scheduling.annotation.EnableAsync;
+
+@EnableAsync
+@EnableSecurity
+@PropertySource(value = "classpath:application-${spring.profiles.active}.yml", encoding = "UTF-8",factory = YmlPropertySourceFactory.class)
+@ServletComponentScan
+@SpringBootApplication
+public class ZtkEdgeclouddatasecurityApplication {
+//	@Autowired
+//	private DirectoryMonitorService directoryMonitorService;
+
+//	@Bean
+//	public ApplicationRunner startDirectoryMonitorService() {
+//		return args -> directoryMonitorService.monitorSourceDirectoty();
+//	}
+
+	public static void main(String[] args) {
+		SpringApplication.run(ZtkEdgeclouddatasecurityApplication.class, args);
+	}
+
+
+
+}

+ 347 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/alibabaiot/alibabaiotgateway.java

@@ -0,0 +1,347 @@
+package com.persagy.ztkedgeclouddatasecurity.alibabaiot;
+
+import io.netty.handler.proxy.HttpProxyHandler;
+import io.netty.handler.proxy.ProxyHandler;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.qpid.jms.JmsConnection;
+import org.apache.qpid.jms.JmsConnectionExtensions;
+import org.apache.qpid.jms.JmsConnectionFactory;
+import org.apache.qpid.jms.JmsConnectionListener;
+import org.apache.qpid.jms.message.JmsInboundMessageDispatch;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.ApplicationArguments;
+import org.springframework.boot.ApplicationRunner;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.core.annotation.Order;
+import org.springframework.scheduling.annotation.Async;
+import org.springframework.scheduling.annotation.EnableAsync;
+import org.springframework.stereotype.Component;
+
+import javax.crypto.Mac;
+import javax.crypto.spec.SecretKeySpec;
+import javax.jms.*;
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.Hashtable;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Supplier;
+
+
+@Component
+@Order(value = 1)
+@EnableAsync
+@ConditionalOnProperty(prefix = "spring", name="location", havingValue="Cloud")
+public class alibabaiotgateway implements ApplicationRunner {
+    private final static Logger logger = LoggerFactory.getLogger(alibabaiotgateway.class);
+    @Value("${alibaba.iot.accessKey}")
+    private  String accessKey ;
+    @Value("${alibaba.iot.accessSecret}")
+    private  String accessSecret ;
+    @Value("${alibaba.iot.consumerGroupId}")
+    private  String consumerGroupId ;
+
+    //iotInstanceId:企业版实例请填写实例ID,公共实例请填空字符串""。
+    @Value("${alibaba.iot.iotInstanceId}")
+    private  String iotInstanceId ;
+
+    //控制台服务端订阅中消费组状态页客户端ID一栏将显示clientId参数。
+    //建议使用机器UUID、MAC地址、IP等唯一标识等作为clientId。便于您区分识别不同的客户端。
+    @Value("${alibaba.iot.clientId}")
+    private  String clientId ;
+
+    //${YourHost}为接入域名,请参见AMQP客户端接入说明文档。
+    @Value("${alibaba.iot.host}")
+    private  String host ;
+    @Value("${alibaba.iot.proxy}")
+    private  boolean proxy;
+    @Value("${alibaba.iot.proxyhost}")
+    private  String proxyhost;
+    @Value("${alibaba.iot.proxyport}")
+    private  Integer proxyport;
+
+
+
+    // 指定单个进程启动的连接数
+    // 单个连接消费速率有限,请参考使用限制,最大64个连接
+    // 连接数和消费速率及rebalance相关,建议每500QPS增加一个连接
+    private static int connectionCount = 4;
+
+    //业务处理异步线程池,线程池参数可以根据您的业务特点调整,或者您也可以用其他异步方式处理接收到的消息。
+    private final static ExecutorService executorService = new ThreadPoolExecutor(
+            Runtime.getRuntime().availableProcessors(),
+            Runtime.getRuntime().availableProcessors() * 2, 60, TimeUnit.SECONDS,
+            new LinkedBlockingQueue(50000));
+
+
+//    public static void main(String[] args) throws Exception {
+//        List<Connection> connections = new ArrayList<>();
+//
+//        //参数说明,请参见AMQP客户端接入说明文档。
+//        for (int i = 0; i < connectionCount; i++) {
+//            long timeStamp = System.currentTimeMillis();
+//            //签名方法:支持hmacmd5、hmacsha1和hmacsha256。
+//            String signMethod = "hmacsha1";
+//
+//            //userName组装方法,请参见AMQP客户端接入说明文档。
+//            String userName = clientId +"-" + i + "|authMode=aksign"
+//                    + ",signMethod=" + signMethod
+//                    + ",timestamp=" + timeStamp
+//                    + ",authId=" + accessKey
+//                    + ",iotInstanceId=" + iotInstanceId
+//                    + ",consumerGroupId=" + consumerGroupId
+//                    + "|";
+//            //计算签名,password组装方法,请参见AMQP客户端接入说明文档。
+//            String signContent = "authId=" + accessKey + "&timestamp=" + timeStamp;
+//            String password = doSign(signContent, accessSecret, signMethod);
+//            //amqp.idleTimeout
+//            String connectionUrl = "failover:(amqps://" + host + ":5671?amqp.idleTimeout=80000)"
+//                    + "?failover.reconnectDelay=30";
+//
+//            Hashtable<String, String> hashtable = new Hashtable<>();
+//            hashtable.put("connectionfactory.SBCF", connectionUrl);
+//            hashtable.put("queue.QUEUE", "default");
+//            hashtable.put(Context.INITIAL_CONTEXT_FACTORY, "org.apache.qpid.jms.jndi.JmsInitialContextFactory");
+//            Context context = new InitialContext(hashtable);
+//            ConnectionFactory cf = (ConnectionFactory)context.lookup("SBCF");
+//            Destination queue = (Destination)context.lookup("QUEUE");
+//            // 创建连接。
+//            Connection connection = cf.createConnection(userName, password);
+//            connections.add(connection);
+//
+//            ((JmsConnection)connection).addConnectionListener(myJmsConnectionListener);
+//            // 创建会话。
+//            // Session.CLIENT_ACKNOWLEDGE: 收到消息后,需要手动调用message.acknowledge()。
+//            // Session.AUTO_ACKNOWLEDGE: SDK自动ACK(推荐)。
+//            Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
+//
+//            connection.start();
+//            // 创建Receiver连接。
+//            MessageConsumer consumer = session.createConsumer(queue);
+//            consumer.setMessageListener(messageListener);
+//        }
+//
+//        logger.info("amqp demo is started successfully, and will exit after 60s ");
+//
+//        // 结束程序运行
+//        Thread.sleep(60 * 1000);
+//        logger.info("run shutdown");
+//
+//        connections.forEach(c-> {
+//            try {
+//                c.close();
+//            } catch (JMSException e) {
+//                logger.error("failed to close connection", e);
+//            }
+//        });
+//
+//        executorService.shutdown();
+//        if (executorService.awaitTermination(10, TimeUnit.SECONDS)) {
+//            logger.info("shutdown success");
+//        } else {
+//            logger.info("failed to handle messages");
+//        }
+//    }
+
+    private static MessageListener messageListener = new MessageListener() {
+        @Override
+        public void onMessage(final Message message) {
+            try {
+                //1.收到消息之后一定要ACK。
+                // 推荐做法:创建Session选择Session.AUTO_ACKNOWLEDGE,这里会自动ACK。
+                // 其他做法:创建Session选择Session.CLIENT_ACKNOWLEDGE,这里一定要调message.acknowledge()来ACK。
+                // message.acknowledge();
+                //2.建议异步处理收到的消息,确保onMessage函数里没有耗时逻辑。
+                // 如果业务处理耗时过程过长阻塞住线程,可能会影响SDK收到消息后的正常回调。
+                executorService.submit(new Runnable() {
+                    @Override
+                    public void run() {
+                        processMessage(message);
+                    }
+                });
+            } catch (Exception e) {
+                logger.error("submit task occurs exception ", e);
+            }
+        }
+    };
+
+    /**
+     * 在这里处理您收到消息后的具体业务逻辑。
+     */
+    private static void processMessage(Message message) {
+        try {
+            byte[] body = message.getBody(byte[].class);
+            String content = new String(body);
+            String topic = message.getStringProperty("topic");
+            String messageId = message.getStringProperty("messageId");
+            logger.info("receive message"
+                    + ",\n topic = " + topic
+                    + ",\n messageId = " + messageId
+                    + ",\n content = " + content);
+        } catch (Exception e) {
+            logger.error("processMessage occurs error ", e);
+        }
+    }
+
+    private static JmsConnectionListener myJmsConnectionListener = new JmsConnectionListener() {
+        /**
+         * 连接成功建立。
+         */
+        @Override
+        public void onConnectionEstablished(URI remoteURI) {
+            logger.info("onConnectionEstablished, remoteUri:{}", remoteURI);
+        }
+
+        /**
+         * 尝试过最大重试次数之后,最终连接失败。
+         */
+        @Override
+        public void onConnectionFailure(Throwable error) {
+            logger.error("onConnectionFailure, {}", error.getMessage());
+        }
+
+        /**
+         * 连接中断。
+         */
+        @Override
+        public void onConnectionInterrupted(URI remoteURI) {
+            logger.info("onConnectionInterrupted, remoteUri:{}", remoteURI);
+        }
+
+        /**
+         * 连接中断后又自动重连上。
+         */
+        @Override
+        public void onConnectionRestored(URI remoteURI) {
+            logger.info("onConnectionRestored, remoteUri:{}", remoteURI);
+        }
+
+        @Override
+        public void onInboundMessage(JmsInboundMessageDispatch envelope) {}
+
+        @Override
+        public void onSessionClosed(Session session, Throwable cause) {}
+
+        @Override
+        public void onConsumerClosed(MessageConsumer consumer, Throwable cause) {}
+
+        @Override
+        public void onProducerClosed(MessageProducer producer, Throwable cause) {}
+    };
+
+    /**
+     * 计算签名,password组装方法,请参见AMQP客户端接入说明文档。
+     */
+    private static String doSign(String toSignString, String secret, String signMethod) throws Exception {
+        SecretKeySpec signingKey = new SecretKeySpec(secret.getBytes(), signMethod);
+        Mac mac = Mac.getInstance(signMethod);
+        mac.init(signingKey);
+        byte[] rawHmac = mac.doFinal(toSignString.getBytes());
+        return Base64.encodeBase64String(rawHmac);
+    }
+
+    @Async
+    @Override
+    public void run(ApplicationArguments args) throws Exception {
+        List<Connection> connections = new ArrayList<>();
+
+        //参数说明,请参见AMQP客户端接入说明文档。
+        for (int i = 0; i < connectionCount; i++) {
+            long timeStamp = System.currentTimeMillis();
+            //签名方法:支持hmacmd5、hmacsha1和hmacsha256。
+            String signMethod = "hmacsha1";
+
+            //userName组装方法,请参见AMQP客户端接入说明文档。
+            String userName = clientId +"-" + i + "|authMode=aksign"
+                    + ",signMethod=" + signMethod
+                    + ",timestamp=" + timeStamp
+                    + ",authId=" + accessKey
+                    + ",iotInstanceId=" + iotInstanceId
+                    + ",consumerGroupId=" + consumerGroupId
+                    + "|";
+            //计算签名,password组装方法,请参见AMQP客户端接入说明文档。
+            String signContent = "authId=" + accessKey + "&timestamp=" + timeStamp;
+            String password = doSign(signContent, accessSecret, signMethod);
+            //amqp.idleTimeout
+            String connectionUrl = "failover:(amqps://" + host + ":5671?amqp.idleTimeout=80000)"
+                    + "?failover.reconnectDelay=30";
+
+            Hashtable<String, String> hashtable = new Hashtable<>();
+            hashtable.put("connectionfactory.SBCF", connectionUrl);
+            hashtable.put("queue.QUEUE", "default");
+            hashtable.put(Context.INITIAL_CONTEXT_FACTORY, "org.apache.qpid.jms.jndi.JmsInitialContextFactory");
+            Context context = new InitialContext(hashtable);
+
+            Connection connection = null;
+            if (proxy){
+                logger.info(">>>>进入代理服务处理方法");
+                //   context =new InitialContext(hashtable);
+//                JMSConnectionFactory cf1 = (JMSConnectionFactory) context.lookup("SBCF");
+//                Supplier<ProxyHandler> proxyHandlerSupplier =()->{
+//                  return new HttpProxyHandler(new InetSocketAddress(proxyhost,proxyport)) ;
+//                };
+//                cf1.setExtension(JmsConnectionExtensions.PROXY_HANDLER_SUPPLIER.toString(),(connection1,remote)->{
+//                    return proxyHandlerSupplier;
+//                });
+                Supplier<ProxyHandler> proxyHandlerSupplier =()-> new HttpProxyHandler(new InetSocketAddress(proxyhost,proxyport));
+                JmsConnectionFactory factory = (JmsConnectionFactory) context.lookup("SBCF");
+                factory.setExtension(JmsConnectionExtensions.PROXY_HANDLER_SUPPLIER.toString(), (connection1, remote) -> {
+//                    SocketAddress proxyAddress = new InetSocketAddress(proxyhost, proxyport);
+//                    Supplier<ProxyHandler> proxyHandlerFactory = () -> {
+//                        return new HttpProxyHandler(proxyAddress);
+//                    };
+                    return proxyHandlerSupplier;
+                });
+                connection = factory.createConnection(userName, password);
+                logger.info(">>>>>>>创建阿里iot通过招商代理访问互联网");
+            }else{
+                ConnectionFactory cf = (ConnectionFactory)context.lookup("SBCF");
+                // 创建连接。
+                connection= cf.createConnection(userName, password);
+            }
+
+            Destination queue = (Destination)context.lookup("QUEUE");
+            connections.add(connection);
+
+            ((JmsConnection)connection).addConnectionListener(myJmsConnectionListener);
+            // 创建会话。
+            // Session.CLIENT_ACKNOWLEDGE: 收到消息后,需要手动调用message.acknowledge()。
+            // Session.AUTO_ACKNOWLEDGE: SDK自动ACK(推荐)。
+            // Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
+            Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
+            connection.start();
+
+            // 创建Receiver连接。
+            MessageConsumer consumer = session.createConsumer(queue);
+            consumer.setMessageListener(messageListener);
+        }
+
+        logger.info("amqp demo is started successfully, and will exit after 60s ");
+
+        // 结束程序运行
+        Thread.sleep(60 * 1000);
+        logger.info("run shutdown");
+
+        connections.forEach(c-> {
+            try {
+                c.close();
+            } catch (JMSException e) {
+                logger.error("failed to close connection", e);
+            }
+        });
+        executorService.shutdown();
+        if (executorService.awaitTermination(10, TimeUnit.SECONDS)) {
+            logger.info("shutdown success");
+        } else {
+            logger.info("failed to handle messages");
+        }
+    }
+}

+ 50 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/config/AsyncConfig.java

@@ -0,0 +1,50 @@
+package com.persagy.ztkedgeclouddatasecurity.config;
+
+import org.apache.logging.log4j.LogManager;
+import org.slf4j.Logger;
+import org.springframework.aop.interceptor.AsyncUncaughtExceptionHandler;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.scheduling.annotation.AsyncConfigurerSupport;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
+
+import java.lang.reflect.Method;
+import java.util.concurrent.Executor;
+
+public class AsyncConfig extends AsyncConfigurerSupport {
+
+    private static final Logger logger = (Logger) LogManager.getLogger(AsyncConfig.class);
+
+    private static final String THREAD_NAME_PREFIX = "Parser-";
+
+    @Value("${corePoolSize}")
+    public int corePoolSize;
+
+    @Value("${maxPoolSize}")
+    public int maxPoolSize;
+
+    @Value("${queueCapacity}")
+    public int queueCapacity;
+
+    @Override
+    public Executor getAsyncExecutor() {
+        ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+        executor.setCorePoolSize(corePoolSize);
+        executor.setMaxPoolSize(maxPoolSize);
+        executor.setQueueCapacity(queueCapacity);
+        executor.setThreadNamePrefix(THREAD_NAME_PREFIX);
+        executor.initialize();
+
+        return executor;
+    }
+
+    @Override
+    public AsyncUncaughtExceptionHandler getAsyncUncaughtExceptionHandler() {
+        return (Throwable ex, Method method, Object... params) -> {
+            logger.error("Exception message - " + ex.getMessage());
+            logger.error("Method name - " + method.getName());
+            for (Object param : params) {
+                logger.error("Parameter value - " + param);
+            }
+        };
+    }
+}

+ 20 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/config/EdgeToCloudServiceInterface.java

@@ -0,0 +1,20 @@
+package com.persagy.ztkedgeclouddatasecurity.config;
+
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+
+public interface EdgeToCloudServiceInterface {
+
+
+    public void SentMsgToCloudQueue();
+
+
+
+
+
+
+
+
+}

+ 32 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/config/FactoryForStrategy.java

@@ -0,0 +1,32 @@
+package com.persagy.ztkedgeclouddatasecurity.config;
+
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Service;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+@Service
+public class FactoryForStrategy {
+
+    @Value("${listener.port}")
+    private String listenerPort;
+
+
+    Map<String, EdgeToCloudServiceInterface> strategys = new ConcurrentHashMap<>();
+
+    public EdgeToCloudServiceInterface getStrategy(String component) throws Exception {
+        EdgeToCloudServiceInterface strategy = strategys.get(component);
+        if (strategy == null) {
+            throw new RuntimeException("no strategy defined");
+        }
+        return strategy;
+    }
+
+
+
+//    String[] infolist= listenerPort.split(",");
+//    for(int i=0;i<infolist)
+
+}

+ 42 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/config/SystemProperties.java

@@ -0,0 +1,42 @@
+package com.persagy.ztkedgeclouddatasecurity.config;
+
+
+import lombok.Data;
+import lombok.Setter;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+
+import java.util.List;
+
+@Component
+@ConfigurationProperties(prefix = "project")
+public class SystemProperties {
+
+    @Setter
+    private List<Config> configs;
+
+    @Data
+    private static class Config {
+        private String name;
+        private int port;
+        private String ip;
+        private String projectid;
+    }
+
+    private static SystemProperties systemProperties;
+
+    @Autowired
+    public void setTestProperties(SystemProperties systemProperties) {
+        SystemProperties.systemProperties = systemProperties;
+    }
+
+    public static Config getByName(String name) {
+        for (Config c : systemProperties.configs) {
+            if (c.getName().equals(name)) {
+                return c;
+            }
+        }
+        return null;
+    }
+}

+ 34 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/config/YmlPropertySourceFactory.java

@@ -0,0 +1,34 @@
+package com.persagy.ztkedgeclouddatasecurity.config;
+
+import org.springframework.beans.factory.config.YamlPropertiesFactoryBean;
+import org.springframework.core.env.PropertiesPropertySource;
+import org.springframework.core.env.PropertySource;
+import org.springframework.core.io.support.DefaultPropertySourceFactory;
+import org.springframework.core.io.support.EncodedResource;
+
+import java.io.IOException;
+import java.util.Properties;
+
+public class YmlPropertySourceFactory extends DefaultPropertySourceFactory {
+
+    @Override
+    public PropertySource<?> createPropertySource(String name, EncodedResource resource) throws IOException {
+        String sourceName = name != null ? name : resource.getResource().getFilename();
+        if (!resource.getResource().exists()) {
+            return new PropertiesPropertySource(sourceName, new Properties());
+        } else if (sourceName.endsWith(".yml") || sourceName.endsWith(".yaml")) {
+            Properties propertiesFromYaml = loadYml(resource);
+            return new PropertiesPropertySource(sourceName, propertiesFromYaml);
+        } else {
+            return super.createPropertySource(name, resource);
+        }
+    }
+
+    private Properties loadYml(EncodedResource resource) throws IOException {
+        YamlPropertiesFactoryBean factory = new YamlPropertiesFactoryBean();
+        factory.setResources(resource.getResource());
+        factory.afterPropertiesSet();
+        return factory.getObject();
+    }
+
+}

+ 45 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/controller/TestEnDeServiceController.java

@@ -0,0 +1,45 @@
+package com.persagy.ztkedgeclouddatasecurity.controller;
+
+import cn.shuibo.annotation.Decrypt;
+import cn.shuibo.annotation.Encrypt;
+import com.persagy.ztkedgeclouddatasecurity.config.SystemProperties;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.web.bind.annotation.*;
+
+@RestController
+@EnableAutoConfiguration
+@RequestMapping("/project")
+public class TestEnDeServiceController {
+
+
+    /**
+     * 对返回值加密
+     * @return
+     */
+    @Encrypt
+    @GetMapping("/encryt")
+    public String test01(@RequestParam String Str ){
+
+        return Str;
+    }
+
+    /**
+     * 对传过来的加密参数进行解密
+     * @param testBean
+     * @return
+     */
+    @Decrypt
+    @PostMapping("/decrypt")
+    public String test02(@RequestBody String testBean){
+        return testBean;
+    }
+
+    @GetMapping("/config")
+    public Object test1(@RequestParam String name) {
+        return SystemProperties.getByName(name);
+    }
+
+
+
+}
+

+ 21 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/datasafety/DataOptRSA.java

@@ -0,0 +1,21 @@
+package com.persagy.ztkedgeclouddatasecurity.datasafety;
+
+
+import cn.shuibo.annotation.Encrypt;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+
+import java.nio.charset.StandardCharsets;
+
+@EnableAutoConfiguration
+public class DataOptRSA {
+
+    /**
+     * 对返回值加密
+     * @return
+     */
+    @Encrypt
+    public static String DataEncrypt(String msg){
+        msg.getBytes(StandardCharsets.UTF_8);
+        return msg;
+    }
+}

+ 5 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/datasafety/DecrypInputMessageInterface.java

@@ -0,0 +1,5 @@
+package com.persagy.ztkedgeclouddatasecurity.datasafety;
+
+public interface DecrypInputMessageInterface {
+    String DecryptMsgInputMessage(String str);
+}

+ 61 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/datasafety/DecryptInputMessageService.java

@@ -0,0 +1,61 @@
+package com.persagy.ztkedgeclouddatasecurity.datasafety;
+
+import cn.shuibo.config.SecretKeyConfig;
+import cn.shuibo.util.Base64Util;
+import cn.shuibo.util.RSAUtil;
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Service;
+
+@Service
+public class DecryptInputMessageService implements  DecrypInputMessageInterface {
+
+    private Logger log = LoggerFactory.getLogger(this.getClass());
+    @Value("${rsa.encrypt.privateKey}")
+    private  String privateKey;
+
+    @Value("${rsa.encrypt.showLog}")
+    private  boolean showLog;
+
+    @Autowired
+    private SecretKeyConfig secretKeyConfig;
+
+    @Override
+    public String DecryptMsgInputMessage(String inputMessage) {
+        String charset =secretKeyConfig.getCharset();
+        if (StringUtils.isEmpty(privateKey)) {
+            throw new IllegalArgumentException("privateKey is null");
+        }
+        String content = inputMessage;
+        String decryptBody="";
+        try{
+
+            if (content.startsWith("{")) {
+                log.info("Unencrypted without decryption:{}", content);
+                decryptBody = content;
+            } else {
+                StringBuilder json = new StringBuilder();
+                content = content.replaceAll(" ", "+");
+
+                if (!StringUtils.isEmpty(content)) {
+                    String[] contents = content.split("\\|");
+                    for (String value : contents) {
+                        value = new String(RSAUtil.decrypt(Base64Util.decode(value), privateKey), charset);
+                        json.append(value);
+                    }
+                }
+                decryptBody = json.toString();
+                if (showLog) {
+                    log.info("Encrypted data received:{},After decryption:{}", content, decryptBody);
+                }
+            }}catch (Exception e){
+            e.printStackTrace();
+        }
+        return decryptBody;
+    }
+
+}
+

+ 5 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/datasafety/EncryptInputMessageInterface.java

@@ -0,0 +1,5 @@
+package com.persagy.ztkedgeclouddatasecurity.datasafety;
+
+public interface EncryptInputMessageInterface {
+    Object beforeBodyWrite(Object body);
+}

+ 73 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/datasafety/EncryptInputMessageService.java

@@ -0,0 +1,73 @@
+package com.persagy.ztkedgeclouddatasecurity.datasafety;
+
+import cn.shuibo.config.SecretKeyConfig;
+import cn.shuibo.util.Base64Util;
+import cn.shuibo.util.RSAUtil;
+import com.alibaba.fastjson.JSONObject;
+import com.alibaba.fastjson.annotation.JSONField;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Service;
+import org.springframework.util.StringUtils;
+
+@Service
+public class EncryptInputMessageService implements  EncryptInputMessageInterface{
+    private Logger log = LoggerFactory.getLogger(this.getClass());
+    @Value("${rsa.encrypt.open}")
+    private boolean encrypt;
+    @Autowired
+    private SecretKeyConfig secretKeyConfig;
+    private static ThreadLocal<Boolean> encryptLocal = new ThreadLocal<>();
+    @Value("${rsa.encrypt.open}")
+    private  String converterType;
+
+    public boolean supports(String converterType) {
+        encrypt = false;
+        if (secretKeyConfig.isOpen()) {
+            encrypt = true;
+        }
+        return encrypt;
+    }
+    class Model {
+
+        @JSONField(jsonDirect=true)
+        public String content;
+    }
+
+    @Override
+    public  Object beforeBodyWrite(Object body) {
+        // EncryptResponseBodyAdvice.setEncryptStatus(false);
+        // Dynamic Settings Not Encrypted
+        Boolean status = encryptLocal.get();
+        if (null != status && !status) {
+            encryptLocal.remove();
+            return body;
+        }
+        if (encrypt) {
+            String publicKey = secretKeyConfig.getPublicKey();
+            try {
+                Model model =new Model();
+                model.content=body.toString();
+                String content = JSONObject.toJSONString(model);
+
+                if (!StringUtils.hasText(publicKey)) {
+                    throw new NullPointerException("Please configure rsa.encrypt.privatekeyc parameter!");
+                }
+                byte[] data = content.getBytes();
+                byte[] encodedData = RSAUtil.encrypt(data, publicKey);
+                String result = Base64Util.encode(encodedData);
+                if(secretKeyConfig.isShowLog()) {
+                    log.info("Pre-encrypted data:{},After encryption:{}", content, result);
+                }
+                return result;
+            } catch (Exception e) {
+                log.error("Encrypted data exception", e);
+            }
+        }
+        return body;
+    }
+
+}
+

+ 34 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/entity/ChattingUser.java

@@ -0,0 +1,34 @@
+package com.persagy.ztkedgeclouddatasecurity.entity;
+
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.handler.codec.http.websocketx.WebSocketServerHandshaker;
+import lombok.Data;
+import lombok.extern.slf4j.Slf4j;
+
+import java.util.concurrent.BlockingQueue;
+
+@Data
+@Slf4j
+public class ChattingUser {
+    public enum Type{
+        edge,
+        cloud
+    }
+    private String userId;
+
+    private ChannelHandlerContext channelHandlerContext;
+
+    private Type type;
+
+    private  int port;
+
+    private  String ip;
+
+    private WebSocketServerHandshaker handshaker;
+
+    private BlockingQueue<String> messageTcpSendQueue;
+
+    private BlockingQueue<String> messageKafkaSendQueue;
+
+    private  Object ack ;
+}

+ 95 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/entity/NettyMessage.java

@@ -0,0 +1,95 @@
+package com.persagy.ztkedgeclouddatasecurity.entity;
+
+import com.alibaba.fastjson.JSONObject;
+import com.persagy.ztkedgeclouddatasecurity.netty.Command;
+import com.persagy.ztkedgeclouddatasecurity.netty.Packet;
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+import java.io.Serializable;
+import java.util.List;
+
+@Data
+@NoArgsConstructor
+@AllArgsConstructor
+public class NettyMessage<T> extends Packet implements Serializable {
+    /*
+    唯一标识
+     */
+    private long streamId;
+    private String channelId;
+    /**
+     * 操作类型:1-请求、2 -响应、3-通知、
+     * 4-边缘端获取报警定义、
+     * 5-边缘端主动推送报警记录、
+     * 6-边缘端主动更新报警记录状态、
+     * 7-云端推送修改的报警定义给边缘端(增量新增修改报警定义)、
+     * 8-云端把报警记录的id推送到边缘端
+     * 9-边缘端取报警定义,云端推送给边缘端的标记(全量报警定义)
+     * 10-云端推送删除的报警定义给边缘端(增量删除报警定义)、
+     * 11-云端下发隔离系统给边缘端
+     * 200 - 建立连接,此时的projectId == 项目id
+     * 12-云端更新报警状态
+     */
+    private int opCode;
+    /**
+     * 请求来源 解析这个id
+     */
+    private String projectId;
+
+
+
+
+    private String groupCode;
+
+    /**
+     * 传输内容
+     */
+    private List<T> content;
+    /**
+     * 备注说明
+     */
+    private String remark;
+    /**
+     * 成功标识
+     */
+    private Boolean success;
+
+    public NettyMessage(String channelId, int opCode, String projectId, String groupCode) {
+        this.channelId = channelId;
+        this.opCode = opCode;
+        this.projectId = projectId;
+        this.groupCode = groupCode;
+    }
+
+
+
+
+
+    public List<T> getContent() {
+        return content;
+    }
+//    public List<T> getContent(Class<T> t) {
+//        return ItvJsonUtil.jsonToList(JSON.toJSONString(t),t);
+//    }
+
+    public void setContent(List<T> content) {
+        this.content = content;
+    }
+
+    @Override
+    public String toString() {
+        return JSONObject.toJSONString(this);
+    }
+
+    /**
+     * 获取协议指令
+     *
+     * @return 返回指令值
+     */
+    @Override
+    public Byte getCommand() {
+        return Command.NETTY_MESSAGE;
+    }
+}

+ 58 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/entity/NettyMessageQueue.java

@@ -0,0 +1,58 @@
+package com.persagy.ztkedgeclouddatasecurity.entity;
+
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+
+public class NettyMessageQueue {
+    //队列大小
+    static final int QUEUE_MAX_SIZE = 1000000;
+
+    static BlockingQueue<String> blockingQueue = new LinkedBlockingQueue<>(QUEUE_MAX_SIZE);
+
+    static BlockingQueue<NettyMessage> blockingQueuecloud = new LinkedBlockingQueue<>(QUEUE_MAX_SIZE);
+
+    /**
+     * 私有的默认构造子,保证外界无法直接实例化
+     */
+    private NettyMessageQueue() {
+    }
+
+    //单例队列
+    public static NettyMessageQueue getNettyMessageQueue() {
+        return SingletonHolder.queue;
+    }
+
+    //生产入队
+    public void produce(String commandResult) throws InterruptedException {
+        blockingQueue.put(commandResult);
+    }
+
+    //边缘到云生产入队
+    public void cloudproduce(NettyMessage commandResult) throws InterruptedException {
+        blockingQueuecloud.put(commandResult);
+    }
+
+
+
+
+    //消费出队
+    public BlockingQueue<String> consume() throws InterruptedException {
+        return blockingQueue;
+    }
+
+    // 获取队列大小
+    public int size() {
+        return blockingQueue.size();
+    }
+
+    /**
+     * 类级的内部类,也就是静态的成员式内部类,该内部类的实例与外部类的实例
+     * 没有绑定关系,而且只有被调用到才会装载,从而实现了延迟加载
+     */
+    private static class SingletonHolder {
+        /**
+         * 静态初始化器,由JVM来保证线程安全
+         */
+        private static final NettyMessageQueue queue = new NettyMessageQueue();
+    }
+}

+ 28 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/entity/alerm/CommonConst.java

@@ -0,0 +1,28 @@
+package com.persagy.ztkedgeclouddatasecurity.entity.alerm;
+
+public class CommonConst {
+    /**
+     * 项目id
+     */
+    public static String projectId;
+    /**
+     * 集团编码
+     */
+    public static String groupCode;
+
+
+    /**
+     * 云端nettyhost
+     */
+    public static String inetHost;
+    /**
+     * 云端netty端口
+     */
+    public static int inetPort;
+    /**
+     * 系统标识,用于创建人
+     */
+    public static String systemId;
+
+
+}

+ 50 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/kafka/CloudKafkaConsumer.java

@@ -0,0 +1,50 @@
+package com.persagy.ztkedgeclouddatasecurity.kafka;
+
+import com.alibaba.fastjson.JSONObject;
+import com.persagy.ztkedgeclouddatasecurity.entity.NettyMessage;
+import com.persagy.ztkedgeclouddatasecurity.netty.cloud.NettyClient;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.support.Acknowledgment;
+import org.springframework.kafka.support.KafkaHeaders;
+import org.springframework.messaging.handler.annotation.Header;
+import org.springframework.stereotype.Component;
+
+import java.util.Optional;
+
+
+@Component
+@Slf4j
+@ConditionalOnProperty(prefix = "spring", name="location", havingValue="Cloud")
+public class CloudKafkaConsumer {
+
+    @Autowired
+    public NettyClient nettyClient;
+    //@KafkaListener(topics = KafkaProducer.TOPIC_TEST, groupId = KafkaProducer.TOPIC_GROUP1)
+    @KafkaListener(topicPattern = "Edge_.*")
+    public void topicList(ConsumerRecord<?, ?> record, Acknowledgment ack, @Header(KafkaHeaders.RECEIVED_TOPIC) String topic) {
+      Optional message = Optional.ofNullable(JSONObject.parseObject(record.value().toString()));
+        if (message.isPresent()) {
+            Object msg = message.get();
+            log.info("Cloud 消费了: Topic:" + topic + ",Message:" + msg);
+            JSONObject.parseObject(JSONObject.toJSONString(msg)).getObject("msg", NettyMessage.class);
+            try {
+                nettyClient.sendMessage(JSONObject.parseObject(JSONObject.toJSONString(msg)).getObject("msg", NettyMessage.class));
+                if (ack != null) {
+                    ack.acknowledge();
+                }
+            } catch (InterruptedException e) {
+               log.error(e.getMessage(),e);
+            }
+
+
+
+
+        }
+
+    }
+
+}

+ 67 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/kafka/CloudKafkaProducer.java

@@ -0,0 +1,67 @@
+package com.persagy.ztkedgeclouddatasecurity.kafka;
+
+import com.alibaba.fastjson.JSONObject;
+import com.persagy.ztkedgeclouddatasecurity.datasafety.EncryptInputMessageService;
+import lombok.extern.slf4j.Slf4j;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.support.SendResult;
+import org.springframework.stereotype.Component;
+import org.springframework.util.concurrent.ListenableFuture;
+import org.springframework.util.concurrent.ListenableFutureCallback;
+
+import java.util.concurrent.BlockingQueue;
+
+
+@Component
+@Slf4j
+public class CloudKafkaProducer {
+    private static Logger logger = LoggerFactory.getLogger(CloudKafkaProducer.class.getClass());
+
+    @Autowired
+    private KafkaTemplate<String, Object> kafkaTemplate;
+
+    @Autowired
+    private EncryptInputMessageService encryptInputMessageService;
+
+    @Value("${spring.location}")
+    private String location;
+
+
+
+
+
+//    public void producerMsg(BlockingQueue<String> queue){
+//
+//    }
+    @ConditionalOnProperty(prefix = "spring", name="location", havingValue="Cloud")
+    public  void  CloudproducerMsg(JSONObject msgObj){
+
+        String TOPIC = location+"_"+ msgObj.getString("projectid");
+        //发送消息
+        ListenableFuture<SendResult<String, Object>> future = kafkaTemplate.send(TOPIC, encryptInputMessageService.beforeBodyWrite(msgObj));
+        future.addCallback(new ListenableFutureCallback<SendResult<String, Object>>() {
+            @Override
+            public void onFailure(Throwable throwable) {
+                //发送失败的处理
+                logger.info(TOPIC + " - 生产者 发送消息失败:" + throwable.getMessage());
+            }
+
+            @Override
+            public void onSuccess(SendResult<String, Object> stringObjectSendResult) {
+                //成功的处理
+                logger.info(TOPIC + " - 生产者 发送消息成功:" + stringObjectSendResult.toString());
+            }
+        });
+
+    }
+
+
+
+
+
+}

+ 192 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/kafka/EdgeKafkaProducer.java

@@ -0,0 +1,192 @@
+package com.persagy.ztkedgeclouddatasecurity.kafka;
+
+
+import cn.hutool.core.lang.Snowflake;
+import cn.hutool.core.util.IdUtil;
+import com.alibaba.fastjson.JSONObject;
+//import org.apache.kafka.clients.consumer.ConsumerConfig;
+//import org.apache.kafka.clients.producer.ProducerConfig;
+//import org.apache.kafka.clients.producer.ProducerRecord;
+//import org.apache.kafka.common.config.SslConfigs;
+//import org.apache.kafka.common.serialization.StringSerializer;
+import com.persagy.ztkedgeclouddatasecurity.datasafety.DecryptInputMessageService;
+import com.persagy.ztkedgeclouddatasecurity.datasafety.EncryptInputMessageService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.support.SendResult;
+import org.springframework.stereotype.Service;
+import org.springframework.util.concurrent.ListenableFuture;
+import org.springframework.util.concurrent.ListenableFutureCallback;
+
+import java.util.Properties;
+import java.util.concurrent.*;
+
+@Service
+@ConditionalOnProperty(prefix = "spring", name="location", havingValue="Edge")
+public class EdgeKafkaProducer {
+
+    @Autowired
+    private KafkaTemplate<String, Object> kafkaTemplate;
+
+    @Autowired
+    private EncryptInputMessageService  encryptInputMessageService;
+
+
+
+
+    private static Logger logger = LoggerFactory.getLogger(EdgeKafkaProducer.class.getClass());
+    private static BlockingQueue messageQueue = null;
+    private static Properties properties;
+    private static org.apache.kafka.clients.producer.KafkaProducer<String, String> producer;
+   // private final static ExecutorService executorService = new ThreadPoolExecutor(Runtime.getRuntime().availableProcessors(),Runtime.getRuntime().availableProcessors() * 2, 60, TimeUnit.SECONDS,new LinkedBlockingQueue(50000));
+    private Snowflake snowflake = IdUtil.getSnowflake(1, 1);
+//    @Value("${spring.kafka.bootstrap-servers}")
+//    private String bootstrapServer;
+//    @Value("${spring.kafka.producer.acks}")
+//    private String ackSet;
+//    @Value("${spring.kafka.producer.retries}")
+//    private int retries;
+//    @Value("${spring.kafka.producer.batch-size}")
+//    private int batchSize;
+//    @Value("${spring.kafka.producer.buffer-memory}")
+//    private int bfferMemory;
+//    @Value("${spring.kafka.producer.client-id}")
+//    private String clientID;
+//    @Value("${spring.kafka.producer.key-serializer}")
+//    private String keySerializer;
+//    @Value("${spring.kafka.producer.value-serializer}")
+//    private String ValueSerializer;
+//    @Value("${spring.kafka.producer.security.protocol}")
+//    private  String securtyprotocol;
+//    @Value("${spring.kafka.producer.ssl.endpoint.identification.algorithm}")
+//    private  String algorithm;
+    @Value("${spring.profiles.active}")
+    private String active;
+    @Value("${spring.location}")
+    private String location;
+//    @Value("${spring.kafka.producer.ssl.trust-store-location}")
+//    private String truststorelocaltion;
+//    @Value("${spring.kafka.producer.ssl.key-store-password}")
+//    private String keystorepassword;
+//
+//    @Value("${spring.kafka.consumer.ssl.key-store-location}")
+//    private String keytorelocaltion;
+//    @Value("${spring.kafka.producer.ssl.trust-store-password}")
+//    private String truststorepassword;
+//    @Value("${spring.kafka.producer.ssl.key-password}")
+//    private String keypassword;
+
+    private  int mm=0;
+//    @PostConstruct
+//    private void ProducerConfigbyself(){
+//
+//        // InputStream input=EdgeKafkaProducer.class.getClassLoader().getResourceAsStream("db.properties");
+//        properties = new Properties();
+//        // 连接的 kafka 集群地址
+//        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
+//        properties.put("acks", ackSet);//所有follower都响应了才认为消息提交成功,即"committed"
+//        properties.put("retries", retries);//retries = MAX 无限重试,直到你意识到出现了问题:)
+//        properties.put("batch.size", batchSize);//producer将试图批处理消息记录,以减少请求次数.默认的批量处理消息字节数
+//        properties.put("max.request.size", 1048576); //信息发送最大值1MB
+//        //batch.size当批量的数据大小达到设定值后,就会立即发送,不顾下面的linger.ms
+//        properties.put("retry.backoff.ms", 500);//设定重试时间间隔避免无效的频繁重试
+//
+//        properties.put("linger.ms", 5);//延迟1ms发送,这项设置将通过增加小的延迟来完成--即,不是立即发送一条记录,producer将会等待给定的延迟时间以允许其他消息记录发送,这些消息记录可以批量处理
+//        properties.put("buffer.memory", bfferMemory);//producer可以用来缓存数据的内存大小。
+//        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
+//        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
+//        properties.put("security.protocol", securtyprotocol);
+//
+//        // properties.put("ssl.truststore.location", ResourceUtils.getFile(truststorelocaltion).getPath());
+//        // properties.put("ssl.truststore.password", keystorepassword);ResourceUtils.getFile(keytorelocaltion).getPath()ResourceUtils.getFile(truststorelocaltion).getPath()
+//        if ("dev".equals(active)){
+//
+//        }else{
+//            properties.put("ssl.endpoint.identification.algorithm",algorithm);
+//            properties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG,  new File(keytorelocaltion).getAbsolutePath());
+//            properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, new File(truststorelocaltion).getAbsolutePath());
+//            properties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG,keystorepassword);
+//            properties.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG,keypassword);
+//            properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG,truststorepassword);
+//        }
+
+        // producer = new KafkaProducer<String, String>(properties);
+
+//    }
+    public void producerMsg(BlockingQueue<String> queue){
+
+        while (true) {
+            messageQueue = queue;
+            try {
+                /// Thread.sleep(3000);
+                //  Object messageObject = messageQueue.peek();
+                Object messageObject = messageQueue.poll();
+
+
+                if (null == messageObject) {
+                    // System.out.println(">>>> queue has no data");
+                    logger.info(">>> kafka queue size: " + messageQueue.size());
+                    Thread.sleep(100);
+                    //executorService.shutdown();
+                    break;
+                } else {
+//                    ExecutorService executorService = new ThreadPoolExecutor(Runtime.getRuntime().availableProcessors(),Runtime.getRuntime().availableProcessors() * 2, 60, TimeUnit.SECONDS,new LinkedBlockingQueue(50000));
+//                    //properties.put("client.id", location+clientID+active+"-"+snowflake.nextId());
+//                    executorService.submit((Runnable) () -> {
+//                        JSONObject object = JSONObject.parseObject(messageObject.toString());
+//                        String TOPIC = location+"_"+ object.getString("port") + "_" + object.getString("userid");
+//                        producer = new org.apache.kafka.clients.producer.KafkaProducer<String, String>(properties);
+//                        producer.send(new ProducerRecord<String, String>(TOPIC,",", object.toString()));
+//                        logger.info(">>> kafka queue size: " + messageQueue.size());
+//                        producer.close();
+//                    });
+//                    executorService.shutdown();
+                    JSONObject object = JSONObject.parseObject(messageObject.toString());
+                    String obj2String = JSONObject.toJSONString(object);
+                    logger.info("准备发送消息为:{}", obj2String);
+                    String TOPIC = location+"_"+ object.getString("port") + "_" + object.getString("userid");
+                    //发送消息
+                    ListenableFuture<SendResult<String, Object>> future = kafkaTemplate.send(TOPIC, encryptInputMessageService.beforeBodyWrite(obj2String));
+                    future.addCallback(new ListenableFutureCallback<SendResult<String, Object>>() {
+                        @Override
+                        public void onFailure(Throwable throwable) {
+                            //发送失败的处理
+                            logger.info(TOPIC + " - 生产者 发送消息失败:" + throwable.getMessage());
+                        }
+
+                        @Override
+                        public void onSuccess(SendResult<String, Object> stringObjectSendResult) {
+                            //成功的处理
+                            logger.info(TOPIC + " - 生产者 发送消息成功:" + stringObjectSendResult.toString());
+                        }
+                    });
+
+
+                }
+
+            } catch (InterruptedException e) {
+                logger.error("executorService_error>>>" + e);
+               // ProducerConfigbyself();
+            } catch (Exception e) {
+                e.printStackTrace();
+               // ProducerConfigbyself();
+                logger.error("executorService_error>>>" + e);
+            }
+            // messageQueue.poll();
+
+        }
+
+
+
+    }
+
+
+
+
+}
+
+

+ 58 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/kafka/EdgeKakfaConsumer.java

@@ -0,0 +1,58 @@
+package com.persagy.ztkedgeclouddatasecurity.kafka;
+
+import com.alibaba.fastjson.JSONObject;
+import com.persagy.ztkedgeclouddatasecurity.entity.NettyMessage;
+import com.persagy.ztkedgeclouddatasecurity.netty.cloud.NettyClient;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.support.Acknowledgment;
+import org.springframework.kafka.support.KafkaHeaders;
+import org.springframework.messaging.handler.annotation.Header;
+import org.springframework.stereotype.Component;
+
+import java.util.Optional;
+
+@Component
+@Slf4j
+@ConditionalOnProperty(prefix = "spring", name="location", havingValue="Edge")
+public class EdgeKakfaConsumer {
+
+    @Autowired
+    public NettyClient nettyClient;
+
+    @KafkaListener(topicPattern = "Cloud_.*")
+    public void topicList(ConsumerRecord<?, ?> record, Acknowledgment ack, @Header(KafkaHeaders.RECEIVED_TOPIC) String topic) {
+
+
+        //System.out.println("............"+record.value());
+        Optional message = Optional.ofNullable(JSONObject.parseObject(record.value().toString()));
+        if (message.isPresent()) {
+            Object msg = message.get();
+            log.info("Cloud 消费了: Topic:" + topic + ",Message:" + msg);
+            String[] TopicArr = topic.split("_");
+            if ("zkt-proj-alarm".equals(TopicArr[2])){
+                JSONObject.parseObject(JSONObject.toJSONString(msg)).getObject("msg", NettyMessage.class);
+                try {
+                  //  nettyClient.sendMessage(JSONObject.parseObject(JSONObject.toJSONString(msg)).getObject("msg", NettyMessage.class));
+                    if (ack != null) {
+                        ack.acknowledge();
+                    }
+                } catch (InterruptedException e) {
+                    log.error(e.getMessage(),e);
+                }
+
+            }
+
+
+
+
+
+        }
+
+    }
+
+
+}

+ 47 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/kafkatask/EdgeSendToKakfaTaskJob.java

@@ -0,0 +1,47 @@
+package com.persagy.ztkedgeclouddatasecurity.kafkatask;
+
+import com.persagy.ztkedgeclouddatasecurity.entity.ChattingUser;
+import com.persagy.ztkedgeclouddatasecurity.entity.NettyMessageQueue;
+import com.persagy.ztkedgeclouddatasecurity.kafka.EdgeKafkaProducer;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.CommandLineRunner;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.core.annotation.Order;
+import org.springframework.scheduling.annotation.Async;
+import org.springframework.stereotype.Component;
+
+import java.util.concurrent.BlockingQueue;
+
+@Component
+@Order(2)
+@ConditionalOnProperty(prefix = "spring", name="location", havingValue="Edge")
+public class EdgeSendToKakfaTaskJob implements CommandLineRunner {
+
+    private static BlockingQueue messageQueue = null;
+    public  ChattingUser chattingUser;
+    @Autowired
+    private EdgeKafkaProducer kafkaProducer;
+    @Override
+    @Async
+    public void run(String... args) throws Exception {
+
+        while (true){
+
+            if ( NettyMessageQueue.getNettyMessageQueue().consume() !=null){
+
+                    if (NettyMessageQueue.getNettyMessageQueue().consume().size()>0){
+                        kafkaProducer.producerMsg(NettyMessageQueue.getNettyMessageQueue().consume());
+                    }
+
+            }
+
+
+        }
+
+
+
+
+    }
+
+
+}

+ 5 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/Command.java

@@ -0,0 +1,5 @@
+package com.persagy.ztkedgeclouddatasecurity.netty;
+
+public interface Command {
+    Byte NETTY_MESSAGE = 1;                //数据类型为NettyMessage
+}

+ 249 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/MsgHandler.java

@@ -0,0 +1,249 @@
+package com.persagy.ztkedgeclouddatasecurity.netty;
+
+import cn.hutool.core.collection.CollectionUtil;
+import com.alibaba.fastjson.JSONObject;
+import com.persagy.ztkedgeclouddatasecurity.entity.ChattingUser;
+import com.persagy.ztkedgeclouddatasecurity.entity.NettyMessage;
+import com.persagy.ztkedgeclouddatasecurity.entity.NettyMessageQueue;
+import com.persagy.ztkedgeclouddatasecurity.kafka.EdgeKafkaProducer;
+import com.persagy.ztkedgeclouddatasecurity.netty.edge.NettyServer;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.channel.group.ChannelGroup;
+import io.netty.channel.group.DefaultChannelGroup;
+import io.netty.util.concurrent.GlobalEventExecutor;
+import lombok.extern.slf4j.Slf4j;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.concurrent.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+@Slf4j
+public class MsgHandler extends SimpleChannelInboundHandler<NettyMessage<JSONObject>> {
+    /**
+     * 保留所有与服务器建立连接的channel对象
+     */
+    private Logger logger = LoggerFactory.getLogger(this.getClass());
+    public static ChannelGroup channelGroup = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
+    private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss:SSS");//格式化
+    BlockingQueue<String> messageQueue = new LinkedBlockingQueue<>(1024 * 1024);
+//    @Autowired
+//    private EncryptInputMessageService encryptInputMessageService;
+//    @Autowired
+//    private DecryptInputMessageService decryptInputMessageService;
+
+    public ChattingUser chattingUser;
+
+    private final static ExecutorService executorService = new ThreadPoolExecutor(
+            Runtime.getRuntime().availableProcessors(),
+            Runtime.getRuntime().availableProcessors() * 2, 60, TimeUnit.SECONDS,
+            new LinkedBlockingQueue(50000));
+
+    private EdgeKafkaProducer edgeKafkaProducer;
+
+
+
+    public MsgHandler(ChattingUser chattingUser, EdgeKafkaProducer edgeKafkaProducer) {
+        this.chattingUser =chattingUser;
+        this.edgeKafkaProducer=edgeKafkaProducer;
+        this.edgeKafkaProducer.producerMsg(messageQueue);
+
+    }
+
+    public String GetRemoteIP(String str){
+        String result ="";
+        String regEx="((2[0-4]\\d|25[0-5]|[01]?\\d\\d?)\\.){3}(2[0-4]\\d|25[0-5]|[01]?\\d\\d?)";
+        Pattern p = Pattern.compile(regEx);
+        Matcher m = p.matcher(str);
+        while (m.find()) {
+            result = m.group();
+            // System.out.println(result);
+            //break;   加break则提取string中的一个IP
+        }
+        return result;
+    }
+
+    @Override
+    protected void channelRead0(ChannelHandlerContext ctx, NettyMessage<JSONObject> msg) throws Exception {
+        log.info("收到[{}]消息:{}", ctx.channel().remoteAddress(), msg);
+        //super.channelRead(ctx, msg);
+        // nConnection.decrementAndGet();
+        InetSocketAddress inteSocket = (InetSocketAddress) ctx.channel().localAddress();
+        String localip = inteSocket.getAddress().getHostAddress();
+        String localport = String.valueOf(inteSocket.getPort());
+        System.out.println(
+                "server ip: " + localip + "  Server port "+ localport
+        );
+
+        // System.out.println(">>>>>"+nConnection.decrementAndGet());
+        String time = sdf.format(new Date());
+//            ByteBuf in = (ByteBuf) msg;
+//            System.out.println(
+//                    "server receive: " + in.toString(CharsetUtil.UTF_8)
+//            );
+//            String str = in.toString(CharsetUtil.UTF_8);
+        JSONObject obj =new JSONObject();
+        obj.put("port",localport);
+        obj.put("msg",msg);
+        obj.put("time",time);
+        // List<ChattingUser> list= listInfoClump.getListInfo();
+        //EncryptInputMessage.beforeBodyWrite(obj);
+        // for (int j=0;j<list.size();j++){
+        //    if (Integer.valueOf(localport) == list.get(j).getPort() ){
+        obj.put("userid",chattingUser.getUserId());
+        obj.put("targetAddress",chattingUser.getIp());
+
+        //   }
+        // }
+        System.out.println(">>>>"+obj.toString());
+        NettyMessageQueue.getNettyMessageQueue().produce(obj.toString());
+       // messageQueue.offer(obj.toString(), 1000, TimeUnit.MICROSECONDS);
+       // chattingUser.setMessageTcpSendQueue(messageQueue);
+       // edgeKafkaProducer.producerMsg(messageQueue);
+
+
+        //System.out.println("dataEncrypt>>>"+encryptInputMessageService.beforeBodyWrite(obj).toString());
+        // decryptInputMessageService.DecryptMsgInputMessage(encryptInputMessageService.beforeBodyWrite(obj).toString());
+        //System.out.println("dataDecrypt>>>"+ decryptInputMessageService.DecryptMsgInputMessage(encryptInputMessageService.beforeBodyWrite(obj).toString()));
+
+//            if (!messageQueue.isEmpty()&&m==0 ){
+//                m++;
+//                edgeKafkaProducer.setQueue(messageQueue);
+//            }
+
+        /**
+         *
+         * ctx.write(in);
+         * Bytebuf并不适合直接重用,必须新建才能写回客户端,
+         * 否则将会报io.netty.util.IllegalReferenceCountException: refCnt: 0, decrement: 1
+         * 原因是这是因为Netty4有引用计数器的原因,自从Netty 4开始,对象的生命周期由它们的引用计数(reference counts)管理,
+         * 而不是由垃圾收集器(garbage collector)管理了。ByteBuf是最值得注意的,它使用了引用计数来改进分配内存和释放内存的性能。
+         * 在我们创建ByteBuf对象后,它的引用计数是1,当你释放(release)引用计数对象时,它的引用计数减1,
+         * 如果引用计数为0,这个引用计数对象会被释放(deallocate),并返回对象池。
+         * 当尝试访问引用计数为0的引用计数对象会抛出IllegalReferenceCountException异常:
+         * 或者在该Bytebuf 在复用前需要调用retain(),将计数器置为1
+         */
+        // ByteBuf pong = Unpooled.copiedBuffer(str.getBytes());
+        //ctx.write(pong);
+
+        // System.out.println(GetRemoteIP(String.valueOf(ctx.channel().remoteAddress()))+"接受到..Server received: " + in.toString(CharsetUtil.UTF_8));
+        System.out.println(GetRemoteIP(String.valueOf(ctx.channel().remoteAddress()))+"接受到..Server received: " + msg);
+        Channel channel =ctx.channel();
+
+        //System.out.println( channel.pipeline().get("id"));
+
+        String ip = GetRemoteIP(String.valueOf(ctx.channel().remoteAddress()));
+        if (ip.equals(chattingUser.getIp())) {
+            System.out.println("Certification Ip List>>>>"+ip);
+        }else {
+            System.out.println("No_Certification Ip List>>>>"+ip +" Messageinfo>>>"+ channel.id().asLongText());
+        }
+
+//        if (!messageQueue.isEmpty()){
+//
+//            edgeKafkaProducer.setQueue(messageQueue);
+//        }
+//        executorService.submit(new Runnable() {
+//            @Autowired
+//            private EdgeKafkaProducer edgeKafkaProducer;
+//            @Override
+//            public void run() {
+//                if (!messageQueue.isEmpty()){
+//                   edgeKafkaProducer.setQueue(messageQueue);
+//                }
+//            }
+//        });
+//        executorService.shutdown();
+//        if (executorService.awaitTermination(10, TimeUnit.SECONDS)) {
+//            logger.info("shutdown success");
+//        } else {
+//            logger.info("failed to handle messages");
+//        }
+        // while (true){
+
+        //  }
+        // ctx.write(in);
+        //ctx.write(msg);
+        // ctx.flush();
+
+
+    }
+
+
+    @Override
+    public void channelRegistered(ChannelHandlerContext ctx) throws Exception {
+        SocketAddress socketAddress = ctx.channel().remoteAddress();
+        String remoteAddress = socketAddress.toString();
+        log.warn("--某个客户端绑定地址:[{}]--", remoteAddress);
+    }
+
+
+    /**
+     * 在读取操作期间,有异常抛出时会调用。
+     *
+     * @param ctx
+     * @param cause
+     * @throws Exception
+     */
+    @Override
+    public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
+        log.error("netty 连接异常" + ctx.channel().remoteAddress(), cause);
+        ctx.close();
+    }
+
+    /**
+     * 新增连接
+     */
+    @Override
+    public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
+        //NettyServer.socketChannelMap.put(ctx.channel().remoteAddress().toString(), ctx.channel());
+        channelGroup.add(ctx.channel());
+        log.info("当前连接数:[{}],新建立连接为[{}]...", channelGroup.size(), ctx.channel().remoteAddress().toString());
+        if (CollectionUtil.isNotEmpty(channelGroup)) {
+            for (Channel channel : channelGroup) {
+                log.info("已有连接地址为[{}]", channel.remoteAddress().toString());
+            }
+        }
+        super.handlerAdded(ctx);
+    }
+
+    /**
+     * 断开连接
+     */
+    @Override
+    public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
+        //chanel可以理解成Connection
+        NettyServer.removeChannel(ctx);
+        //下面这行代码Netty会自动调用
+        channelGroup.remove(ctx.channel());
+        super.handlerRemoved(ctx);
+        log.warn("----客户端[{}] ----离开", ctx.channel().remoteAddress().toString());
+    }
+
+    /**
+     * 该方法只会在通道建立时调用一次,连接生效
+     */
+    @Override
+    public void channelActive(ChannelHandlerContext ctx) throws Exception {
+        log.warn("[{}]:channelActive", ctx.channel().remoteAddress());
+        super.channelActive(ctx);
+    }
+
+    /**
+     * 连接是否有效
+     */
+    @Override
+    public void channelInactive(ChannelHandlerContext ctx) throws Exception {
+        ctx.channel().close();
+        log.warn("[{}]:channelInactive", ctx.channel().remoteAddress());
+        super.channelInactive(ctx);
+    }
+}
+

+ 30 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/MyChannelInitializer.java

@@ -0,0 +1,30 @@
+package com.persagy.ztkedgeclouddatasecurity.netty;
+
+
+import com.persagy.ztkedgeclouddatasecurity.entity.ChattingUser;
+import com.persagy.ztkedgeclouddatasecurity.kafka.EdgeKafkaProducer;
+import io.netty.channel.ChannelInitializer;
+import io.netty.channel.socket.SocketChannel;
+
+public class MyChannelInitializer extends ChannelInitializer<SocketChannel> {
+
+    private final EdgeKafkaProducer edgeKafkaProducer;
+    private ChattingUser chattingUser;
+
+    public MyChannelInitializer(ChattingUser chattingUser, EdgeKafkaProducer edgeKafkaProducer) {
+        this.chattingUser = chattingUser;
+        this.edgeKafkaProducer = edgeKafkaProducer;
+    }
+
+    @Override
+    protected void initChannel(SocketChannel channel) {
+//        channel.pipeline().addLast(new IdleStateHandler(60, 0, 0, TimeUnit.SECONDS));
+        //对象传输处理[解码]
+        channel.pipeline().addLast(new ObjDecoder());
+        // 在管道中添加我们自己的接收数据实现方法
+        channel.pipeline().addLast(new MsgHandler(chattingUser,edgeKafkaProducer));
+        //对象传输处理[编码]
+        channel.pipeline().addLast(new ObjEncoder());
+    }
+
+}

+ 30 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/ObjDecoder.java

@@ -0,0 +1,30 @@
+package com.persagy.ztkedgeclouddatasecurity.netty;
+
+
+import com.persagy.ztkedgeclouddatasecurity.until.SerializationUtil;
+import io.netty.buffer.ByteBuf;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.handler.codec.ByteToMessageDecoder;
+
+import java.util.List;
+
+public class ObjDecoder extends ByteToMessageDecoder {
+
+    @Override
+    protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) {
+        if (in.readableBytes() < 4) {
+            return;
+        }
+        in.markReaderIndex();
+        int dataLength = in.readInt();
+        if (in.readableBytes() < dataLength) {
+            in.resetReaderIndex();
+            return;
+        }
+        byte command = in.readByte();  //读取指令
+        byte[] data = new byte[dataLength - 1]; //指令占了一位,剔除掉
+        in.readBytes(data);
+        out.add(SerializationUtil.deserialize(data, PacketClazzMap.packetTypeMap.get(command)));
+    }
+
+}

+ 18 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/ObjEncoder.java

@@ -0,0 +1,18 @@
+package com.persagy.ztkedgeclouddatasecurity.netty;
+
+import com.persagy.ztkedgeclouddatasecurity.until.SerializationUtil;
+import io.netty.buffer.ByteBuf;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.handler.codec.MessageToByteEncoder;
+
+public class ObjEncoder extends MessageToByteEncoder<Packet> {
+
+    @Override
+    protected void encode(ChannelHandlerContext ctx, Packet in, ByteBuf out) {
+        byte[] data = SerializationUtil.serialize(in);
+        out.writeInt(data.length + 1);
+        out.writeByte(in.getCommand()); //添加指令
+        out.writeBytes(data);
+    }
+
+}

+ 10 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/Packet.java

@@ -0,0 +1,10 @@
+package com.persagy.ztkedgeclouddatasecurity.netty;
+
+public abstract class Packet {
+    /**
+     * 获取协议指令
+     *
+     * @return 返回指令值
+     */
+    public abstract Byte getCommand();
+}

+ 17 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/PacketClazzMap.java

@@ -0,0 +1,17 @@
+package com.persagy.ztkedgeclouddatasecurity.netty;
+
+
+import com.persagy.ztkedgeclouddatasecurity.entity.NettyMessage;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class PacketClazzMap {
+
+    public final static Map<Byte, Class<NettyMessage>> packetTypeMap = new ConcurrentHashMap<>();
+
+    static {
+        packetTypeMap.put(Command.NETTY_MESSAGE, NettyMessage.class);
+    }
+
+}

+ 37 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/cloud/CenterChannelInitializer.java

@@ -0,0 +1,37 @@
+package com.persagy.ztkedgeclouddatasecurity.netty.cloud;
+
+import com.persagy.ztkedgeclouddatasecurity.kafka.CloudKafkaConsumer;
+import com.persagy.ztkedgeclouddatasecurity.kafka.CloudKafkaProducer;
+import com.persagy.ztkedgeclouddatasecurity.netty.ObjDecoder;
+import com.persagy.ztkedgeclouddatasecurity.netty.ObjEncoder;
+import io.netty.channel.ChannelInitializer;
+import io.netty.channel.socket.SocketChannel;
+
+public class CenterChannelInitializer   extends ChannelInitializer<SocketChannel> {
+    private final  CloudKafkaConsumer cloudKafkaConsumer;
+    private final NettyClient nettyClient;
+    private final String projectID;
+    private final CloudKafkaProducer cloudKafkaProducer;
+
+
+
+    public CenterChannelInitializer(CloudKafkaConsumer cloudKafkaConsumer, NettyClient nettyClient,String projectID,CloudKafkaProducer cloudKafkaProducer) {
+        this.cloudKafkaConsumer = cloudKafkaConsumer;
+        this.nettyClient = nettyClient;
+        this.projectID = projectID;
+        this.cloudKafkaProducer = cloudKafkaProducer;
+        //加参数
+
+    }
+
+    @Override
+    protected void initChannel(SocketChannel channel) throws Exception {
+//        channel.pipeline().addLast(new IdleStateHandler(0,60,0, TimeUnit.SECONDS));
+        //对象传输处理
+        channel.pipeline().addLast(new ObjDecoder());
+        // 在管道中添加我们自己的接收数据实现方法
+        channel.pipeline().addLast(new CenterClientHandler(cloudKafkaConsumer, nettyClient,projectID,cloudKafkaProducer));
+        channel.pipeline().addLast(new ObjEncoder());
+    }
+
+}

+ 141 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/cloud/CenterClientHandler.java

@@ -0,0 +1,141 @@
+package com.persagy.ztkedgeclouddatasecurity.netty.cloud;
+
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.core.date.TimeInterval;
+import com.alibaba.fastjson.JSONObject;
+import com.persagy.ztkedgeclouddatasecurity.entity.NettyMessage;
+import com.persagy.ztkedgeclouddatasecurity.entity.alerm.CommonConst;
+import com.persagy.ztkedgeclouddatasecurity.kafka.CloudKafkaConsumer;
+import com.persagy.ztkedgeclouddatasecurity.kafka.CloudKafkaProducer;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+
+import java.util.Arrays;
+import java.util.concurrent.TimeUnit;
+
+@Slf4j
+//@ConditionalOnProperty(prefix = "spring", name="location", havingValue="Cloud")
+public class CenterClientHandler extends SimpleChannelInboundHandler<NettyMessage> {
+    // Sleep 5 seconds before a reconnection attempt.
+    static final int RECONNECT_DELAY = Integer.parseInt(System.getProperty("reconnectDelay", "5"));
+    // Reconnect when the server sends nothing for 10 seconds.
+    private static final int READ_TIMEOUT = Integer.parseInt(System.getProperty("readTimeout", "10"));
+
+    private CloudKafkaConsumer cloudKafkaConsumer;
+    private final NettyClient nettyClient;
+
+    private static NettyMessage  nettyMessage;
+    private  String projectID;
+
+     private CloudKafkaProducer cloudKafkaProducer;
+
+    @Value("${spring.location}")
+    private String location;
+
+
+
+    public CenterClientHandler(CloudKafkaConsumer cloudKafkaConsumer, NettyClient nettyClient,String projectID,CloudKafkaProducer cloudKafkaProducer) {
+        this.cloudKafkaConsumer = cloudKafkaConsumer;
+        this.nettyClient = nettyClient;
+        this.projectID = projectID;
+        this.cloudKafkaProducer = cloudKafkaProducer;
+        // 加项目的标识
+
+    }
+
+    /**
+     * 在到服务器的连接已经建立之后将被调用
+     * 当客户端主动链接服务端的链接后,这个通道就是活跃的了。也就是客户端与服务端建立了通信通道并且可以传输数据
+     *
+     * @param ctx
+     * @throws Exception
+     */
+    @Override
+    public void channelActive(ChannelHandlerContext ctx) throws Exception {
+        log.info("Connected to: {}", ctx.channel().remoteAddress());
+        //建立连接
+        nettyClient.sendMessage(new NettyMessage("",200,"Pj4403070003","ZS"));
+        //启动的时候发送消息,获取全部报警定义
+        NettyMessage nettyMessage = new NettyMessage("",4,"Pj4403070003","ZS");
+        JSONObject content = new JSONObject();
+        content.put("groupCode", CommonConst.groupCode);
+        content.put("projectId", CommonConst.projectId);
+        nettyMessage.setContent(Arrays.asList(content));
+        nettyClient.sendMessage(nettyMessage);
+
+        //内存中缓冲的未发送成功的数据发送出去
+        iniAlarmResult(ctx);
+    }
+
+    private void iniAlarmResult(ChannelHandlerContext ctx) {
+
+    }
+
+    @Override
+    protected void channelRead0(ChannelHandlerContext ctx, NettyMessage msg) throws Exception {
+        //接收msg消息{与上一章节相比,此处已经不需要自己进行解码}
+        log.info("Client received: {}", msg);
+        System.out.println(">>>>>"+projectID);
+        JSONObject cloudboj= new JSONObject();
+        cloudboj.put("projectid",projectID);
+        cloudboj.put("msg",msg);
+        try {
+            if ("Edge".equals(location)){
+
+            }else {
+                cloudKafkaProducer.CloudproducerMsg(cloudboj);
+            }
+
+            TimeInterval timer = DateUtil.timer();
+            handlerMsg(ctx, msg);
+            log.info("处理消息时间[{}]",timer.interval()  +">>>>>>"+ ctx.channel());
+        } catch (Exception e) {
+            log.error("channelRead", e);
+        }
+    }
+
+    private void handlerMsg(ChannelHandlerContext channelHandlerContext, NettyMessage msg) throws Exception {
+
+    }
+
+    /**
+     * 在处理过程中引发异常时被调用
+     * 抓住异常,当发生异常的时候,可以做一些相应的处理,比如打印日志、关闭链接
+     *
+     * @param ctx
+     * @param cause
+     * @throws Exception
+     */
+    @Override
+    public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
+        log.info("exceptionCaught", cause);
+        ctx.close();
+    }
+
+    /**
+     * 通道处于非活跃状态动作,该方法只会在失效时调用一次
+     * 当客户端主动断开服务端的链接后,这个通道就是不活跃的。也就是说客户端与服务端的关闭了通信通道并且不可以传输数据
+     */
+    @Override
+    public void channelInactive(ChannelHandlerContext ctx) throws Exception {
+        //客户端自己不正常情况下自己在重连一次
+        log.info("Disconnected from: " + ctx.channel().remoteAddress());
+
+    }
+
+
+    @Override
+    public void channelUnregistered(final ChannelHandlerContext ctx) throws Exception {
+        log.info("channelUnregistered and reconnecting to: {}:{} ", RECONNECT_DELAY, CommonConst.inetHost, CommonConst.inetPort);
+        ctx.channel().eventLoop().schedule(new Runnable() {
+            @Override
+            public void run() {
+                nettyClient.reConnect();
+            }
+        }, RECONNECT_DELAY, TimeUnit.SECONDS);
+    }
+
+}

+ 110 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/cloud/NettyClient.java

@@ -0,0 +1,110 @@
+package com.persagy.ztkedgeclouddatasecurity.netty.cloud;
+
+import com.alibaba.fastjson.JSONObject;
+import com.alibaba.fastjson.TypeReference;
+import com.persagy.ztkedgeclouddatasecurity.entity.NettyMessage;
+import com.persagy.ztkedgeclouddatasecurity.entity.NettyMessageQueue;
+import com.persagy.ztkedgeclouddatasecurity.kafka.CloudKafkaConsumer;
+import com.persagy.ztkedgeclouddatasecurity.kafka.CloudKafkaProducer;
+import io.netty.bootstrap.Bootstrap;
+import io.netty.channel.*;
+import io.netty.channel.nio.NioEventLoopGroup;
+import io.netty.channel.socket.nio.NioSocketChannel;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+
+@Component
+@Slf4j
+//@ConditionalOnProperty(prefix = "spring", name="location", havingValue="Cloud")
+public class NettyClient {
+    public static Channel channel;
+    public Bootstrap bootstrap = new Bootstrap();
+    @Autowired
+    private  CloudKafkaConsumer cloudKafkaConsumer;
+
+    @Autowired
+    private  CloudKafkaProducer cloudKafkaProducer;
+
+    @Value("${listener.cloud1}")
+    private String Connectinfo;
+
+    @PostConstruct
+    public void connect() {
+        EventLoopGroup workerGroup = new NioEventLoopGroup();
+        String[] connectInfoArr=Connectinfo.split(",");
+        for (int i=0;i<connectInfoArr.length;i++){
+            String ip= connectInfoArr[i].split(":")[2];
+            int port = Integer.valueOf(connectInfoArr[i].split(":")[3]);
+            String projectId = connectInfoArr[i].split(":")[3]+"_"+connectInfoArr[i].split(":")[1];
+        try {
+            bootstrap.group(workerGroup);
+            bootstrap.channel(NioSocketChannel.class);
+            bootstrap.option(ChannelOption.AUTO_READ, true);
+            bootstrap.handler(new CenterChannelInitializer(cloudKafkaConsumer,this,projectId,cloudKafkaProducer));
+            ChannelFuture f = bootstrap.connect(ip, port).sync();
+            channel = f.channel();
+            // f.channel().closeFuture().sync();
+        } catch (Exception e) {
+            log.info("连接异常", e);
+        } finally {
+            //workerGroup.shutdownGracefully();
+        }
+        }
+    }
+
+
+    public void reConnect() {
+        // 加入断线后自动重连监听器
+        String[] connectInfoArr=Connectinfo.split(",");
+        for (int i=0;i<connectInfoArr.length;i++){
+            String ip= connectInfoArr[i].split(":")[2];
+            int port = Integer.valueOf(connectInfoArr[i].split(":")[3]);
+            channel = bootstrap.connect(ip, port).addListener(new ChannelFutureListener() {
+                @Override
+                public void operationComplete(ChannelFuture future) throws Exception {
+
+                    if (future.cause() != null) {
+                        log.info("Failed to connect: {}", future.cause());
+                    }
+                }
+            }).channel();
+
+
+        }
+
+    }
+
+
+    public void sendMessage(NettyMessage msg) throws InterruptedException {
+        log.info("给云端发送数据:[{}]", msg);
+        if (channel.isWritable()) {
+            try {
+                //!!!!!!注意,NettyMessage<T>是有泛型的,如果把发送一个数据,接收方必须有一个和<T>的全限定类名完全一样的实体类,
+                //发送方不应该限制接收方,所有使用JSONObject.parseObject(msg.toString(), NettyMessage.class)操作让泛型擦除掉
+                channel.writeAndFlush(JSONObject.parseObject(msg.toString(), new TypeReference<NettyMessage<JSONObject>>(){}));
+            } catch (Exception e) {
+                log.error("发送数据异常,放入缓冲队列中", e);
+                NettyMessageQueue.getNettyMessageQueue().cloudproduce(msg);
+                channel.close();
+            }
+        } else {
+            log.warn("云端netty不可写,放入缓冲队列中[{}]", msg);
+            NettyMessageQueue.getNettyMessageQueue().cloudproduce(msg);
+        }
+    }
+
+    public void sendMessageNotCheck(NettyMessage msg){
+        try {
+            channel.writeAndFlush(JSONObject.parseObject(msg.toString(), NettyMessage.class));
+        } catch (Exception e) {
+            log.error(e.getMessage(),e);
+        }
+    }
+
+
+}

+ 118 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/netty/edge/NettyServer.java

@@ -0,0 +1,118 @@
+package com.persagy.ztkedgeclouddatasecurity.netty.edge;
+
+import cn.hutool.core.collection.CollectionUtil;
+import com.persagy.ztkedgeclouddatasecurity.entity.ChattingUser;
+import com.persagy.ztkedgeclouddatasecurity.kafka.EdgeKafkaProducer;
+import com.persagy.ztkedgeclouddatasecurity.netty.MyChannelInitializer;
+import io.netty.bootstrap.ServerBootstrap;
+import io.netty.channel.*;
+import io.netty.channel.nio.NioEventLoopGroup;
+import io.netty.channel.socket.nio.NioServerSocketChannel;
+import lombok.extern.slf4j.Slf4j;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+
+@Component
+@Slf4j
+@ConditionalOnProperty(prefix = "spring", name="location", havingValue="Edge")
+public class NettyServer {
+
+    public static Map<String, Set<Channel>> socketChannelMap = new ConcurrentHashMap<>();
+    private Logger logger = LoggerFactory.getLogger(this.getClass());
+    @Value("${listener.port}")
+    private String listenerPort;
+
+    @Value("${spring.profiles.active}")
+    private String active;
+    @Value("${spring.location}")
+    private String location;
+
+    private ChannelFuture[] ChannelFutures = null;
+    private List<ChattingUser> list = new ArrayList<>();
+    @Autowired
+    private EdgeKafkaProducer edgeKafkaProducer;
+
+    public static synchronized void removeChannel(ChannelHandlerContext ctx) {
+        Channel channel = ctx.channel();
+        log.warn("删除channel[{}]", ctx.channel().remoteAddress().toString());
+        Set<Map.Entry<String, Set<Channel>>> channelList = NettyServer.socketChannelMap.entrySet();
+        for (Map.Entry<String, Set<Channel>> channelEntry : channelList) {
+            Set<Channel> channels = channelEntry.getValue();
+            if (CollectionUtil.isNotEmpty(channels)) {
+                Iterator<Channel> it = channels.iterator();
+                while (it.hasNext()) {
+                    Channel ch = it.next();
+                    if (ch == channel) {
+                        it.remove();
+                        log.warn("----项目ID[{}],地址[{}] ----离开---", channelEntry.getKey(), channel.remoteAddress().toString());
+                    }
+                }
+                if (CollectionUtil.isNotEmpty(channels)) {
+                    log.warn("项目[{}]剩余客户端数量[{}]", channelEntry.getKey(), channels.size());
+                    NettyServer.socketChannelMap.put(channelEntry.getKey(), channels);
+                } else {
+                    log.warn("项目[{}]已经全部下线", channelEntry.getKey());
+                    NettyServer.socketChannelMap.remove(channelEntry.getKey());
+                }
+            }
+        }
+    }
+
+    @PostConstruct
+    private void start() {
+
+        //多端口绑定
+        String[] ipPortArr = listenerPort.split(",");
+        // ListInfoClump listInfoClump =new ListInfoClump();
+
+        for (int i = 1; i <= ipPortArr.length; i++) {
+
+            ChattingUser chattingUser = new ChattingUser();
+            chattingUser.setUserId(ipPortArr[i - 1].split(":")[1]);
+            String ip = "";
+            if ("Cloud".equals(location) && (!"dev".equals(active))) {
+                chattingUser.setIp(ipPortArr[i - 1].split(":")[1]);
+                ip = ipPortArr[i - 1].split(":")[1];
+            } else {
+                chattingUser.setIp(ipPortArr[i - 1].split(":")[2]);
+                ip = ipPortArr[i - 1].split(":")[2];
+            }
+
+            if ("zkt-proj-alarm".equals(ipPortArr[i - 1].split(":")[1])) {
+                ServerBootstrap serverBootstrap = new ServerBootstrap();
+                EventLoopGroup bossGroup = new NioEventLoopGroup();
+                NioEventLoopGroup workerGroup = new NioEventLoopGroup();
+                serverBootstrap.group(bossGroup, workerGroup);
+                serverBootstrap.channel(NioServerSocketChannel.class);
+                serverBootstrap.childOption(ChannelOption.SO_REUSEADDR, true);
+                int port = Integer.valueOf(ipPortArr[i - 1].split(":")[3]);
+                chattingUser.setPort(port);
+                list.add(chattingUser);
+                serverBootstrap.childHandler(new MyChannelInitializer(chattingUser, edgeKafkaProducer));
+                ChannelFuture channelFuture = serverBootstrap.bind(port);
+                if (ChannelFutures == null) {
+                    ChannelFutures = new ChannelFuture[8];
+                }
+                ChannelFutures[i] = channelFuture;
+                channelFuture.addListener(future -> {
+                    if (future.isSuccess()) {
+                        System.out.println("Started success,port:" + port + ">>>" + channelFuture.channel().pipeline().channel().id().asLongText());
+                    } else {
+                        System.out.println("Started Failed,port:" + port + ">>>" + channelFuture.channel().pipeline().channel().id().asLongText());
+                    }
+                });
+
+            }
+
+        }
+
+    }
+}

+ 69 - 0
src/main/java/com/persagy/ztkedgeclouddatasecurity/until/SerializationUtil.java

@@ -0,0 +1,69 @@
+package com.persagy.ztkedgeclouddatasecurity.until;
+
+import com.dyuproject.protostuff.LinkedBuffer;
+import com.dyuproject.protostuff.ProtostuffIOUtil;
+import com.dyuproject.protostuff.Schema;
+import com.dyuproject.protostuff.runtime.RuntimeSchema;
+import org.objenesis.Objenesis;
+import org.objenesis.ObjenesisStd;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class SerializationUtil {
+
+    private static Map<Class<?>, Schema<?>> cachedSchema = new ConcurrentHashMap<>();
+
+    private static Objenesis objenesis = new ObjenesisStd();
+
+    private SerializationUtil() {
+
+    }
+
+    /**
+     * 序列化(对象 -> 字节数组)
+     *
+     * @param obj 对象
+     * @return 字节数组
+     */
+    public static <T> byte[] serialize(T obj) {
+        Class<T> cls = (Class<T>) obj.getClass();
+        LinkedBuffer buffer = LinkedBuffer.allocate(LinkedBuffer.DEFAULT_BUFFER_SIZE);
+        try {
+            Schema<T> schema = getSchema(cls);
+            return ProtostuffIOUtil.toByteArray(obj, schema, buffer);
+        } catch (Exception e) {
+            throw new IllegalStateException(e.getMessage(), e);
+        } finally {
+            buffer.clear();
+        }
+    }
+
+    /**
+     * 反序列化(字节数组 -> 对象)
+     *
+     * @param data
+     * @param cls
+     * @param <T>
+     */
+    public static <T> T deserialize(byte[] data, Class<T> cls) {
+        try {
+            T message = objenesis.newInstance(cls);
+            Schema<T> schema = getSchema(cls);
+            ProtostuffIOUtil.mergeFrom(data, message, schema);
+            return message;
+        } catch (Exception e) {
+            throw new IllegalStateException(e.getMessage(), e);
+        }
+    }
+
+    private static <T> Schema<T> getSchema(Class<T> cls) {
+        Schema<T> schema = (Schema<T>) cachedSchema.get(cls);
+        if (schema == null) {
+            schema = RuntimeSchema.createFrom(cls);
+            cachedSchema.put(cls, schema);
+        }
+        return schema;
+    }
+
+}

File diff suppressed because it is too large
+ 134 - 0
src/main/resources/application-dev.yml


File diff suppressed because it is too large
+ 71 - 0
src/main/resources/application-prod.yml


File diff suppressed because it is too large
+ 71 - 0
src/main/resources/application-uat.yml


+ 6 - 0
src/main/resources/application.yml

@@ -0,0 +1,6 @@
+spring:
+  profiles:
+    active: dev
+  location: Cloud  # Edge 边缘测 or Cloud 云端
+
+

BIN
src/main/resources/uatclient.keystore.jks


BIN
src/main/resources/uatclient.truststore.jks


+ 13 - 0
src/test/java/com/persagy/ztkedgeclouddatasecurity/ZtkEdgeclouddatasecurityApplicationTests.java

@@ -0,0 +1,13 @@
+package com.persagy.ztkedgeclouddatasecurity;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.boot.test.context.SpringBootTest;
+
+@SpringBootTest
+class ZtkEdgeclouddatasecurityApplicationTests {
+
+	@Test
+	void contextLoads() {
+	}
+
+}