|
@@ -0,0 +1,87 @@
|
|
|
+package com.saga.flink.cdc.utils;
|
|
|
+
|
|
|
+import com.alibaba.fastjson.JSONObject;
|
|
|
+import com.saga.flink.cdc.constant.CDCConstant;
|
|
|
+import com.saga.flink.cdc.constant.OpEnum;
|
|
|
+import org.apache.kafka.connect.data.Field;
|
|
|
+import org.apache.kafka.connect.data.Struct;
|
|
|
+import org.apache.kafka.connect.source.SourceRecord;
|
|
|
+
|
|
|
+import java.util.List;
|
|
|
+
|
|
|
+public class CDCUtils {
|
|
|
+
|
|
|
+ public static JSONObject getCDCDataJSON(SourceRecord sourceRecord){
|
|
|
+
|
|
|
+ Struct dataRecord = (Struct) sourceRecord.value();
|
|
|
+
|
|
|
+ JSONObject cdcJson = new JSONObject();
|
|
|
+
|
|
|
+ // 判断数据操作
|
|
|
+ String op = dataRecord.getString(CDCConstant.OP);
|
|
|
+ String op_type = OpEnum.getValue(op);
|
|
|
+
|
|
|
+ //未知操作
|
|
|
+ if ("".equals(op_type)){
|
|
|
+ op_type = op;
|
|
|
+ }
|
|
|
+ cdcJson.put(CDCConstant.OP, op_type);
|
|
|
+
|
|
|
+ Struct source = dataRecord.getStruct(CDCConstant.SOURCE);
|
|
|
+ Object db = source.get(CDCConstant.DB);
|
|
|
+ Object table = source.get(CDCConstant.TABLE);
|
|
|
+ Object ts_ms = source.get(CDCConstant.TS_MS);
|
|
|
+
|
|
|
+ cdcJson.put(CDCConstant.DB, db);
|
|
|
+ cdcJson.put(CDCConstant.TABLE, table);
|
|
|
+ cdcJson.put(CDCConstant.TS_MS, ts_ms);
|
|
|
+
|
|
|
+ if (!CDCConstant.OP_D.equals(op)){
|
|
|
+ Struct after = dataRecord.getStruct(CDCConstant.AFTER);
|
|
|
+ JSONObject afterJson = fieldToJson(after);
|
|
|
+ cdcJson.put(CDCConstant.AFTER, afterJson);
|
|
|
+ }
|
|
|
+
|
|
|
+ if (CDCConstant.OP_U.equals(op) || CDCConstant.OP_D.equals(op)){
|
|
|
+ Struct before = dataRecord.getStruct(CDCConstant.BEFORE);
|
|
|
+ JSONObject beforeJson = fieldToJson(before);
|
|
|
+ cdcJson.put(CDCConstant.BEFORE, beforeJson);
|
|
|
+ }
|
|
|
+
|
|
|
+ // 主键 json ,kafka 分区 key
|
|
|
+ JSONObject pkJson = new JSONObject();
|
|
|
+ Struct pkey = (Struct) sourceRecord.key();
|
|
|
+
|
|
|
+ List<Field> fields = pkey.schema().fields();
|
|
|
+ Integer hash = 0;
|
|
|
+ for (Field field : fields) {
|
|
|
+ String n = field.name();
|
|
|
+ Object v = pkey.get(n);
|
|
|
+ pkJson.put(n, v);
|
|
|
+ hash += v.hashCode();
|
|
|
+ }
|
|
|
+ Integer partitionNum = Math.abs(hash) % CDCConstant.KAFKA_PARTITION_NUM;
|
|
|
+ cdcJson.put(CDCConstant.KAFKA_PARTITION_KEY, partitionNum);
|
|
|
+ cdcJson.put(CDCConstant.PRIMARY_JSON, pkJson);
|
|
|
+
|
|
|
+ return cdcJson;
|
|
|
+ }
|
|
|
+
|
|
|
+ private static JSONObject fieldToJson(Struct struct){
|
|
|
+ if (struct == null){
|
|
|
+ return new JSONObject();
|
|
|
+ }
|
|
|
+ List<Field> fields = struct.schema().fields();
|
|
|
+
|
|
|
+ JSONObject json = new JSONObject();
|
|
|
+
|
|
|
+ for (Field field : fields) {
|
|
|
+ String fieldName = field.name();
|
|
|
+ Object fieldValue = struct.get(fieldName);
|
|
|
+ json.put(fieldName, fieldValue);
|
|
|
+ }
|
|
|
+
|
|
|
+ return json;
|
|
|
+ }
|
|
|
+
|
|
|
+}
|