sqoop_func.sh 2.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105
  1. #! /bin/bash
  2. if [ -n "$2" ] ;then do_date=$2
  3. else
  4. do_date=`date -d '-1 day' +%F`
  5. fi
  6. echo $do_date
  7. ## lzo 压缩
  8. bin/sqoop import \
  9. --connect "jdbc:mysql://192.168.0.76:3306/sagacloud_review?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false" \
  10. --username root \
  11. --password 123456 \
  12. --target-dir /warehouse/saga_dw/ods/ods_energy_week_day \
  13. --delete-target-dir \
  14. --query "select * from energy_week_day where 1 = 1 and \$CONDITIONS" \
  15. --num-mappers 1 \
  16. --hive-drop-import-delims \
  17. --fields-terminated-by '\001' \
  18. --compress \
  19. --compression-codec lzo \
  20. --hive-import \
  21. --hive-database saga_dw \
  22. --hive-table ods_energy_week_day \
  23. --hive-overwrite \
  24. --null-string '\\N' \
  25. --null-non-string '\\N'
  26. #按天写入分区
  27. mysql_to_hdfs_by_day() {
  28. sqoop import -D mapred.job.queue.name=root.tianyan \
  29. --connect "$1" \
  30. --username $2 \
  31. --password $3 \
  32. --target-dir /warehouse/saga_dw/ods/$2/$do_date \
  33. --delete-target-dir \
  34. --query "$5 and \$CONDITIONS" \
  35. --num-mappers 1 \
  36. --hive-drop-import-delims \
  37. --fields-terminated-by '\001' \
  38. --compress \
  39. --compression-codec snappy \
  40. --hive-import \
  41. --hive-database saga_dw \
  42. --hive-table $4 \
  43. --hive-overwrite \
  44. --hive-partition-key dt \
  45. --hive-partition-value $do_date \
  46. --null-string '\\N' \
  47. --null-non-string '\\N'
  48. }
  49. #写入没分区表
  50. collect_data_np() {
  51. sqoop import -D mapred.job.queue.name=root.tianyan \
  52. --connect "$1" \
  53. --username $2 \
  54. --password $3 \
  55. --target-dir /warehouse/saga_dw/ods/$2 \
  56. --delete-target-dir \
  57. --query "$5 and \$CONDITIONS" \
  58. --num-mappers 1 \
  59. --hive-drop-import-delims \
  60. --fields-terminated-by '\001' \
  61. --compress \
  62. --compression-codec snappy \
  63. --hive-import \
  64. --hive-database dw \
  65. --hive-table $4 \
  66. --hive-overwrite \
  67. --null-string '\\N' \
  68. --null-non-string '\\N'
  69. }
  70. ## lzo压缩,mysql -> hdfs
  71. mysql_to_hdfs_lzo () {
  72. bin/sqoop import \
  73. -D mapred.job.queue.name=root.hive \
  74. --connect "jdbc:mysql://192.168.0.76:3306/sagacloud_review?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false" \
  75. --username root \
  76. --password 123456 \
  77. --target-dir /warehouse/saga_dw/ods/ods_energy_week_day \
  78. --delete-target-dir \
  79. --query "select * from energy_week_day where 1 = 1 and \$CONDITIONS" \
  80. --num-mappers 1 \
  81. --hive-drop-import-delims \
  82. --fields-terminated-by '\001' \
  83. --compress \
  84. --compression-codec lzo \
  85. --hive-import \
  86. --hive-database saga_dw \
  87. --hive-table ods_energy_week_day \
  88. --hive-overwrite \
  89. --null-string '\\N' \
  90. --null-non-string '\\N'
  91. }
  92. hdfs_to_mysql(){
  93. sqoop export \
  94. -D \
  95. -connect
  96. }