#!/bin/bash . ./config.sh url=$MYSQL_URL username=$MYSQL_USER password=$MYSQL_PASSWORD ## 无分区 collect_data_np() { sqoop import -D mapred.job.queue.name=hive \ --connect $1 \ --username $2 \ --password $3 \ --target-dir /warehouse/saga_dw/ods/$2 \ --delete-target-dir \ --query "$5 and \$CONDITIONS" \ --num-mappers 1 \ --hive-drop-import-delims \ --fields-terminated-by '\001' \ --compress \ --compression-codec lzo \ --hive-import \ --hive-database saga_dw \ --hive-table $4 \ --hive-overwrite \ --null-string '\\N' \ --null-non-string '\\N' } mysql_to_hdfs_lzo() { sqoop import \ -D mapred.job.queue.name=hive \ --connect "jdbc:mysql://hadoop01:3306/saga_dev?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false" \ --username root \ --password j5ry0#jZ7vaUt5f4 \ --target-dir /warehouse/saga_dw/ods/ods_energy_15_min_history \ --delete-target-dir \ --columns "building, func_id, meter, data_time, data_value, dt" \ --query "select building, func_id, meter, data_time, data_value, dt from energy_15_min where 1 = 1 and \$CONDITIONS" \ --num-mappers 1 \ --hive-drop-import-delims \ --fields-terminated-by '\001' \ --compress \ --compression-codec lzo \ --hive-import \ --hive-database saga_dw \ --hive-table ods_energy_15_min_history \ --hive-overwrite \ --null-string '\\N' \ --null-non-string '\\N' }