123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105 |
- #! /bin/bash
- if [ -n "$2" ] ;then do_date=$2
- else
- do_date=`date -d '-1 day' +%F`
- fi
- echo $do_date
- ## lzo 压缩
- bin/sqoop import \
- --connect "jdbc:mysql://192.168.0.76:3306/sagacloud_review?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false" \
- --username root \
- --password 123456 \
- --target-dir /warehouse/saga_dw/ods/ods_energy_week_day \
- --delete-target-dir \
- --query "select * from energy_week_day where 1 = 1 and \$CONDITIONS" \
- --num-mappers 1 \
- --hive-drop-import-delims \
- --fields-terminated-by '\001' \
- --compress \
- --compression-codec lzo \
- --hive-import \
- --hive-database saga_dw \
- --hive-table ods_energy_week_day \
- --hive-overwrite \
- --null-string '\\N' \
- --null-non-string '\\N'
- #按天写入分区
- mysql_to_hdfs_by_day() {
- sqoop import -D mapred.job.queue.name=root.tianyan \
- --connect "$1" \
- --username $2 \
- --password $3 \
- --target-dir /warehouse/saga_dw/ods/$2/$do_date \
- --delete-target-dir \
- --query "$5 and \$CONDITIONS" \
- --num-mappers 1 \
- --hive-drop-import-delims \
- --fields-terminated-by '\001' \
- --compress \
- --compression-codec snappy \
- --hive-import \
- --hive-database saga_dw \
- --hive-table $4 \
- --hive-overwrite \
- --hive-partition-key dt \
- --hive-partition-value $do_date \
- --null-string '\\N' \
- --null-non-string '\\N'
- }
- #写入没分区表
- collect_data_np() {
- sqoop import -D mapred.job.queue.name=root.tianyan \
- --connect "$1" \
- --username $2 \
- --password $3 \
- --target-dir /warehouse/saga_dw/ods/$2 \
- --delete-target-dir \
- --query "$5 and \$CONDITIONS" \
- --num-mappers 1 \
- --hive-drop-import-delims \
- --fields-terminated-by '\001' \
- --compress \
- --compression-codec snappy \
- --hive-import \
- --hive-database dw \
- --hive-table $4 \
- --hive-overwrite \
- --null-string '\\N' \
- --null-non-string '\\N'
- }
- ## lzo压缩,mysql -> hdfs
- mysql_to_hdfs_lzo () {
- bin/sqoop import \
- -D mapred.job.queue.name=root.hive \
- --connect "jdbc:mysql://192.168.0.76:3306/sagacloud_review?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false" \
- --username root \
- --password 123456 \
- --target-dir /warehouse/saga_dw/ods/ods_energy_week_day \
- --delete-target-dir \
- --query "select * from energy_week_day where 1 = 1 and \$CONDITIONS" \
- --num-mappers 1 \
- --hive-drop-import-delims \
- --fields-terminated-by '\001' \
- --compress \
- --compression-codec lzo \
- --hive-import \
- --hive-database saga_dw \
- --hive-table ods_energy_week_day \
- --hive-overwrite \
- --null-string '\\N' \
- --null-non-string '\\N'
- }
- hdfs_to_mysql(){
- sqoop export \
- -D \
- -connect
- }
|