ods_to_dwd.sh 2.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. #!/bin/bash
  2. . /opt/app/bi-app/energy/dayTask/config.sh
  3. if [ -n "$2" ] ;then
  4. echo "如果是输入的日期按照取输入日期"
  5. do_date=$2
  6. else
  7. echo "====没有输入数据的日期,取当前时间的前一天===="
  8. do_date=`date -d yesterday +"%Y-%m-%d"`
  9. fi
  10. echo $do_date
  11. env_config="
  12. use saga_dw;
  13. SET mapreduce.job.queuename=default;
  14. SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
  15. SET hive.exec.dynamic.partition.mode=nonstrict;"
  16. dwd_energy_15_min_hour="
  17. insert overwrite table saga_dw.dwd_energy_15_min_hour partition (dt)
  18. select building,
  19. func_id,
  20. branch_type,
  21. branch_type_name,
  22. cast(value_sum as decimal(30, 15)) as value_sum ,
  23. hour_ as current_hour,
  24. current_year,
  25. year_mouth,
  26. week_of_year,
  27. dt
  28. from (
  29. select building,
  30. func_id,
  31. branch_type,
  32. branch_type_name,
  33. dt,
  34. t1.hour_,
  35. (ap_sum - al_sum) value_sum
  36. from (
  37. select building,
  38. func_id,
  39. 'ALU' as branch_type,
  40. '其他' as branch_type_name,
  41. sum(if((branch_type == 'AP' and use_range_type == '1'), data_value, 0)) ap_sum,
  42. sum(if((branch_type == 'AL' and use_range_type in (2, 3, 4, 6, 7)), data_value, 0)) al_sum,
  43. dt,
  44. o15mh.hour_
  45. from (
  46. select building,
  47. func_id,
  48. meter,
  49. data_time,
  50. substr(data_time, 0, 13) hour_,
  51. data_value,
  52. dt
  53. from ods_energy_15_min oe15m
  54. where dt >= '2022-01-01'
  55. ) o15mh
  56. left join dim_office_meter dom on o15mh.meter = dom.meter
  57. where dt >= '2022-01-01' and branch_type in ('AP', 'AL')
  58. group by building, func_id, dt, o15mh.hour_ ) t1
  59. union
  60. select building,
  61. func_id,
  62. concat_ws('-', branch_type, use_range_type),
  63. branch_type_name,
  64. dt,
  65. o15mh.hour_,
  66. sum(data_value) value_sum
  67. from (
  68. select building,
  69. func_id,
  70. meter,
  71. data_time,
  72. substr(data_time, 0, 13) hour_,
  73. data_value,
  74. dt
  75. from ods_energy_15_min oe15m
  76. where dt >= '2022-01-01'
  77. ) o15mh
  78. left join dim_office_meter dom2 on o15mh.meter = dom2.meter
  79. group by building, func_id, branch_type, branch_type_name, use_range_type, dt, o15mh.hour_
  80. ) teh
  81. left join dim_date_day ddd on dt = date_id
  82. order by current_hour;"
  83. insert_into_table(){
  84. case $1 in
  85. "all")
  86. hive -e "$env_config$dwd_energy_15_min_hour"
  87. ;;
  88. esac
  89. }
  90. insert_into_table $1