mysql_to_hdfs.sh 1.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152
  1. #!/bin/bash
  2. . ./config.sh
  3. url=$MYSQL_URL
  4. username=$MYSQL_USER
  5. password=$MYSQL_PASSWORD
  6. ## 无分区
  7. collect_data_np() {
  8. sqoop import -D mapred.job.queue.name=hive \
  9. --connect $1 \
  10. --username $2 \
  11. --password $3 \
  12. --target-dir /warehouse/saga_dw/ods/$2 \
  13. --delete-target-dir \
  14. --query "$5 and \$CONDITIONS" \
  15. --num-mappers 1 \
  16. --hive-drop-import-delims \
  17. --fields-terminated-by '\001' \
  18. --compress \
  19. --compression-codec lzo \
  20. --hive-import \
  21. --hive-database saga_dw \
  22. --hive-table $4 \
  23. --hive-overwrite \
  24. --null-string '\\N' \
  25. --null-non-string '\\N'
  26. }
  27. mysql_to_hdfs_lzo() {
  28. sqoop import \
  29. -D mapred.job.queue.name=hive \
  30. --connect "jdbc:mysql://hadoop01:3306/saga_dev?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false" \
  31. --username root \
  32. --password j5ry0#jZ7vaUt5f4 \
  33. --target-dir /warehouse/saga_dw/ods/ods_energy_15_min_history \
  34. --delete-target-dir \
  35. --columns "building, func_id, meter, data_time, data_value, dt" \
  36. --query "select building, func_id, meter, data_time, data_value, dt from energy_15_min where 1 = 1 and \$CONDITIONS" \
  37. --num-mappers 1 \
  38. --hive-drop-import-delims \
  39. --fields-terminated-by '\001' \
  40. --compress \
  41. --compression-codec lzo \
  42. --hive-import \
  43. --hive-database saga_dw \
  44. --hive-table ods_energy_15_min_history \
  45. --hive-overwrite \
  46. --null-string '\\N' \
  47. --null-non-string '\\N'
  48. }