Browse Source

cdc读取数据封装json

wudianlong 3 years ago
parent
commit
90893427dc

+ 2 - 2
dw-db/src/main/java/com/saga/hbase/phoenix/app/PhoenixDML.java

@@ -51,8 +51,8 @@ public class PhoenixDML {
         }
     }
 
-    //TODO 更新
-    public Integer update(){
+    //TODO 更新插入
+    public Integer upsert(){
         String sql1="upsert into test_phoenix_api values(1,'test1')";
         String sql2="upsert into test_phoenix_api values(2,'test2')";
         String sql3="upsert into test_phoenix_api values(3,'test3')";

+ 36 - 0
dw-flink/src/main/java/com/saga/flink/cdc/app/FlinkPhoenixDML.java

@@ -1,10 +1,46 @@
 package com.saga.flink.cdc.app;
 
+import com.alibaba.fastjson.JSONObject;
+import com.saga.flink.cdc.bean.User;
+import com.saga.flink.cdc.sink.PhoenixSinkFunction;
+import com.saga.flink.cdc.source.JSONDebeziumDeserializationSchema;
+import com.ververica.cdc.connectors.mysql.MySqlSource;
+import com.ververica.cdc.connectors.mysql.table.StartupOptions;
+import com.ververica.cdc.debezium.DebeziumSourceFunction;
+import org.apache.flink.streaming.api.datastream.DataStreamSource;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+
 public class FlinkPhoenixDML {
 
     public static void main(String[] args) {
 
+        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+
+        env.setParallelism(3);
+        DebeziumSourceFunction<JSONObject> mysqlSource = MySqlSource.<JSONObject>builder()
+                .hostname("hadoop01")
+                .port(3306)
+                .username("root")
+                .password("j5ry0#jZ7vaUt5f4")
+                .databaseList("saga_dev")
+                .tableList("saga_dev.user") //可选配置项,如果不指定该参数,则会读取上一个配置下的所有表的数据,注意:指定的时候需要使用"db.table"的方式
+                .startupOptions(StartupOptions.initial())
+                .deserializer(new JSONDebeziumDeserializationSchema())
+                .build();
+
+        DataStreamSource<JSONObject> userDataStreamSource = env.addSource(mysqlSource);
+        userDataStreamSource.returns(JSONObject.class).addSink(new PhoenixSinkFunction<>());
 
+        try {
+            env.execute("DML");
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
 
+        try {
+            env.execute();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
     }
 }

+ 12 - 0
dw-flink/src/main/java/com/saga/flink/cdc/bean/User.java

@@ -0,0 +1,12 @@
+package com.saga.flink.cdc.bean;
+
+import lombok.Data;
+
+@Data
+public class User {
+
+    private Integer id;
+    private String name;
+    private Integer age;
+    private String address;
+}

+ 37 - 0
dw-flink/src/main/java/com/saga/flink/cdc/constant/CDCConstant.java

@@ -0,0 +1,37 @@
+package com.saga.flink.cdc.constant;
+
+public class CDCConstant {
+
+    public static final String AFTER = "after";
+    public static final String BEFORE = "before";
+
+    /**
+     * 操作:r(初始化数据读取),c(插入),u(更新),d(删除)
+     */
+    public static final String OP = "op";
+
+    /** 初始化数据读取 */
+    public static final String OP_R = "r";
+
+    /** 插入 */
+    public static final String OP_C = "c";
+
+    /** 更新 */
+    public static final String OP_U = "u";
+
+    /** 删除 */
+    public static final String OP_D = "d";
+
+    public static final String TS_MS = "ts_ms";
+    public static final String SOURCE = "source";
+    public static final String DB = "db";
+    public static final String TABLE = "table";
+
+    /** kafka 分区 key */
+    public static final String KAFKA_PARTITION_KEY = "kafka_partition_key";
+
+    /** kafka 分区数量 */
+    public static final Integer KAFKA_PARTITION_NUM = 3;
+
+    public static final String PRIMARY_JSON = "primary_json";
+}

+ 41 - 0
dw-flink/src/main/java/com/saga/flink/cdc/constant/OpEnum.java

@@ -0,0 +1,41 @@
+package com.saga.flink.cdc.constant;
+
+
+public enum OpEnum {
+
+    C("c", "insert"), D("d", "delete"), R("r", "insert"), U("u", "update");
+
+    private String key;
+    private String value;
+
+    private OpEnum(String key, String value) {
+        this.key = key;
+        this.value = value;
+    }
+
+    public static String getValue(String key) {
+        String value = "";
+        for (OpEnum c : OpEnum.values()) {
+            if (c.getKey() == key) {
+                value = c.getValue();
+            }
+        }
+        return value;
+    }
+
+    public String getKey() {
+        return key;
+    }
+
+    public void setKey(String key) {
+        this.key = key;
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+}

+ 19 - 0
dw-flink/src/main/java/com/saga/flink/cdc/constant/PhoenixConstant.java

@@ -0,0 +1,19 @@
+package com.saga.flink.cdc.constant;
+
+public class PhoenixConstant {
+
+    //Phoenix库名
+    public static final String HBASE_SCHEMA = "SAGA_LOG";
+
+    //Phoenix驱动
+    public static final String PHOENIX_DRIVER = "org.apache.phoenix.jdbc.PhoenixDriver";
+
+    //Phoenix连接参数
+    public static final String PHOENIX_SERVER = "jdbc:phoenix:hadoop01,hadoop02,hadoop03:2181";
+
+    //ClickHouse驱动
+//    public static final String CLICKHOUSE_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver";
+
+    //ClickHouse连接地址
+//    public static final String CLICKHOUSE_URL = "jdbc:clickhouse://hadoop02:8123/default";
+}

+ 19 - 0
dw-flink/src/main/java/com/saga/flink/cdc/sink/PhoenixSinkFunction.java

@@ -0,0 +1,19 @@
+package com.saga.flink.cdc.sink;
+
+import com.saga.flink.cdc.bean.User;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
+
+public class PhoenixSinkFunction<User> extends RichSinkFunction<User> {
+
+    @Override
+    public void open(Configuration parameters) throws Exception {
+        System.out.println("PhoenixSinkFunction open");
+    }
+
+    @Override
+    public void invoke(User value, Context context) throws Exception {
+        System.out.println("PhoenixSinkFunction invoke");
+        System.out.println(value.toString());
+    }
+}

+ 22 - 0
dw-flink/src/main/java/com/saga/flink/cdc/source/JSONDebeziumDeserializationSchema.java

@@ -0,0 +1,22 @@
+package com.saga.flink.cdc.source;
+
+import com.alibaba.fastjson.JSONObject;
+import com.saga.flink.cdc.utils.CDCUtils;
+import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
+import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.util.Collector;
+import org.apache.kafka.connect.source.SourceRecord;
+
+public class JSONDebeziumDeserializationSchema implements DebeziumDeserializationSchema<JSONObject> {
+    @Override
+    public void deserialize(SourceRecord sourceRecord, Collector<JSONObject> collector) throws Exception {
+
+        JSONObject cdcDataJSON = CDCUtils.getCDCDataJSON(sourceRecord);
+        collector.collect(cdcDataJSON);
+    }
+
+    @Override
+    public TypeInformation<JSONObject> getProducedType() {
+        return null;
+    }
+}

+ 43 - 0
dw-flink/src/main/java/com/saga/flink/cdc/source/MysqlSourceCDC.java

@@ -0,0 +1,43 @@
+package com.saga.flink.cdc.source;
+
+import com.saga.flink.cdc.bean.User;
+import com.saga.flink.cdc.constant.PhoenixConstant;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+public class MysqlSourceCDC extends RichSourceFunction<User> {
+
+    private static Connection connection = null;
+    private static Statement statement = null;
+
+    @Override
+    public void open(Configuration parameters) throws Exception {
+        try {
+            Class.forName(PhoenixConstant.PHOENIX_DRIVER);
+            connection = DriverManager.getConnection(PhoenixConstant.PHOENIX_SERVER);
+            statement = connection.createStatement();
+
+        } catch (ClassNotFoundException e) {
+            e.printStackTrace();
+        } catch (SQLException e) {
+            e.printStackTrace();
+        }
+        System.out.println("MysqlSourceCDC open");
+    }
+
+    @Override
+    public void run(SourceContext<User> ctx) throws Exception {
+        System.out.println("MysqlSourceCDC run");
+        ctx.collect(new User());
+    }
+
+    @Override
+    public void cancel() {
+        System.out.println("MysqlSourceCDC cancel");
+    }
+}

+ 87 - 0
dw-flink/src/main/java/com/saga/flink/cdc/utils/CDCUtils.java

@@ -0,0 +1,87 @@
+package com.saga.flink.cdc.utils;
+
+import com.alibaba.fastjson.JSONObject;
+import com.saga.flink.cdc.constant.CDCConstant;
+import com.saga.flink.cdc.constant.OpEnum;
+import org.apache.kafka.connect.data.Field;
+import org.apache.kafka.connect.data.Struct;
+import org.apache.kafka.connect.source.SourceRecord;
+
+import java.util.List;
+
+public class CDCUtils {
+
+    public static JSONObject getCDCDataJSON(SourceRecord sourceRecord){
+
+        Struct dataRecord = (Struct) sourceRecord.value();
+
+        JSONObject cdcJson = new JSONObject();
+
+        // 判断数据操作
+        String op = dataRecord.getString(CDCConstant.OP);
+        String op_type = OpEnum.getValue(op);
+
+        //未知操作
+        if ("".equals(op_type)){
+            op_type = op;
+        }
+        cdcJson.put(CDCConstant.OP, op_type);
+
+        Struct source = dataRecord.getStruct(CDCConstant.SOURCE);
+        Object db = source.get(CDCConstant.DB);
+        Object table = source.get(CDCConstant.TABLE);
+        Object ts_ms = source.get(CDCConstant.TS_MS);
+
+        cdcJson.put(CDCConstant.DB, db);
+        cdcJson.put(CDCConstant.TABLE, table);
+        cdcJson.put(CDCConstant.TS_MS, ts_ms);
+
+        if (!CDCConstant.OP_D.equals(op)){
+            Struct after = dataRecord.getStruct(CDCConstant.AFTER);
+            JSONObject afterJson = fieldToJson(after);
+            cdcJson.put(CDCConstant.AFTER, afterJson);
+        }
+
+        if (CDCConstant.OP_U.equals(op) || CDCConstant.OP_D.equals(op)){
+            Struct before = dataRecord.getStruct(CDCConstant.BEFORE);
+            JSONObject beforeJson = fieldToJson(before);
+            cdcJson.put(CDCConstant.BEFORE, beforeJson);
+        }
+
+        // 主键 json ,kafka 分区 key
+        JSONObject pkJson = new JSONObject();
+        Struct pkey = (Struct) sourceRecord.key();
+
+        List<Field> fields = pkey.schema().fields();
+        Integer hash = 0;
+        for (Field field : fields) {
+            String n = field.name();
+            Object v = pkey.get(n);
+            pkJson.put(n, v);
+            hash += v.hashCode();
+        }
+        Integer partitionNum = Math.abs(hash) % CDCConstant.KAFKA_PARTITION_NUM;
+        cdcJson.put(CDCConstant.KAFKA_PARTITION_KEY, partitionNum);
+        cdcJson.put(CDCConstant.PRIMARY_JSON, pkJson);
+
+        return cdcJson;
+    }
+
+    private static JSONObject fieldToJson(Struct struct){
+        if (struct == null){
+            return new JSONObject();
+        }
+        List<Field> fields = struct.schema().fields();
+
+        JSONObject json = new JSONObject();
+
+        for (Field field : fields) {
+            String fieldName = field.name();
+            Object fieldValue = struct.get(fieldName);
+            json.put(fieldName, fieldValue);
+        }
+
+        return json;
+    }
+
+}