wudianlong 2 tahun lalu
induk
melakukan
7bf2d28d9d
5 mengubah file dengan 115 tambahan dan 5 penghapusan
  1. 2 0
      function/hive_func.sh
  2. 5 0
      function/mysql_func.sh
  3. 105 0
      function/sqoop_func.sh
  4. 1 1
      hive_ddl/saga_dw_ods.sql
  5. 2 4
      job/mysql_to_hdfs.sh

+ 2 - 0
function/hive_func.sh

@@ -0,0 +1,2 @@
+#!/bin/bash
+

+ 5 - 0
function/mysql_func.sh

@@ -0,0 +1,5 @@
+#!/bin/bash
+
+insert_table() {
+  insert into tableName select id, name from tableNameB
+}

+ 105 - 0
function/sqoop_func.sh

@@ -0,0 +1,105 @@
+#! /bin/bash
+
+if [ -n "$2" ] ;then do_date=$2
+else
+do_date=`date -d '-1 day' +%F`
+fi
+
+echo $do_date
+
+
+## lzo 压缩
+bin/sqoop import  \
+--connect "jdbc:mysql://192.168.0.76:3306/sagacloud_review?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false" \
+--username root \
+--password 123456 \
+--target-dir /warehouse/saga_dw/ods/ods_energy_week_day  \
+--delete-target-dir \
+--query "select * from energy_week_day where 1 = 1 and  \$CONDITIONS" \
+--num-mappers 1 \
+--hive-drop-import-delims \
+--fields-terminated-by '\001' \
+--compress \
+--compression-codec lzo \
+--hive-import \
+--hive-database saga_dw \
+--hive-table ods_energy_week_day \
+--hive-overwrite \
+--null-string '\\N' \
+--null-non-string '\\N'
+
+#按天写入分区
+mysql_to_hdfs_by_day() {
+sqoop import -D mapred.job.queue.name=root.tianyan  \
+--connect "$1" \
+--username $2 \
+--password $3 \
+--target-dir /warehouse/saga_dw/ods/$2/$do_date  \
+--delete-target-dir \
+--query "$5 and  \$CONDITIONS" \
+--num-mappers 1 \
+--hive-drop-import-delims \
+--fields-terminated-by '\001' \
+--compress \
+--compression-codec snappy \
+--hive-import \
+--hive-database saga_dw \
+--hive-table $4 \
+--hive-overwrite \
+--hive-partition-key dt \
+--hive-partition-value $do_date \
+--null-string '\\N' \
+--null-non-string '\\N'
+}
+
+#写入没分区表
+collect_data_np() {
+sqoop import -D mapred.job.queue.name=root.tianyan  \
+--connect "$1" \
+--username $2 \
+--password $3 \
+--target-dir /warehouse/saga_dw/ods/$2  \
+--delete-target-dir \
+--query "$5 and  \$CONDITIONS" \
+--num-mappers 1 \
+--hive-drop-import-delims \
+--fields-terminated-by '\001' \
+--compress \
+--compression-codec snappy \
+--hive-import \
+--hive-database dw \
+--hive-table $4 \
+--hive-overwrite \
+--null-string '\\N' \
+--null-non-string '\\N'
+}
+
+
+## lzo压缩,mysql -> hdfs
+mysql_to_hdfs_lzo () {
+  bin/sqoop import  \
+  -D mapred.job.queue.name=root.hive  \
+  --connect "jdbc:mysql://192.168.0.76:3306/sagacloud_review?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false" \
+  --username root \
+  --password 123456 \
+  --target-dir /warehouse/saga_dw/ods/ods_energy_week_day  \
+  --delete-target-dir \
+  --query "select * from energy_week_day where 1 = 1 and  \$CONDITIONS" \
+  --num-mappers 1 \
+  --hive-drop-import-delims \
+  --fields-terminated-by '\001' \
+  --compress \
+  --compression-codec lzo \
+  --hive-import \
+  --hive-database saga_dw \
+  --hive-table ods_energy_week_day \
+  --hive-overwrite \
+  --null-string '\\N' \
+  --null-non-string '\\N'
+}
+
+hdfs_to_mysql(){
+  sqoop export \
+  -D \
+  -connect
+}

+ 1 - 1
hive_ddl/saga_dw_ods.sql

@@ -16,4 +16,4 @@ STORED AS ORC
 LOCATION
 'hdfs://sagaCluster:8020/warehouse/saga_dw/ods/ods_energy_week_day'
 TBLPROPERTIES (
-'orc.compress'='SNAPPY');
+'orc.compress'='lzo');

+ 2 - 4
job/mysql_to_hdfs.sh

@@ -1,11 +1,9 @@
 #! /bin/bash
-#. /opt/app/bi-app/config/config.sh
+. /opt/app/bi-app/config/config.sh
 . ../config/config.sh
 
 url=${MYSQL_URL}
 user=${MYSQL_USER}
 password=${MYSQL_PASSWORD}
 
-echo $url
-echo $user
-echo $password
+