ソースを参照

添加ods到dwd层

wudianlong 2 年 前
コミット
0147e356dd
2 ファイル変更106 行追加1 行削除
  1. 19 1
      energy/dayTask/energy_hive.sql
  2. 87 0
      energy/dayTask/ods_to_dwd.sh

+ 19 - 1
energy/dayTask/energy_hive.sql

@@ -66,7 +66,25 @@ LOCATION
 TBLPROPERTIES (
     'orc.compress'='lzo');
 
-
+create table saga_dw.dwd_energy_15_min_hour
+(
+    building         string,
+    func_id          string,
+    branch_type      string,
+    branch_type_name string,
+    value_sum        decimal(30, 15),
+    current_hour     string comment '小时',
+    current_year     integer comment '年份',
+    year_mouth       string comment '年份-月份',
+    week_of_year     integer comment '这一年的第几周'
+) COMMENT '能源原始数据15min按小时维度数据'
+    PARTITIONED BY (`dt` string)
+    ROW FORMAT DELIMITED FIELDS TERMINATED BY '\001'
+    STORED AS TEXTFILE
+    LOCATION
+        'hdfs://sagaCluster:8020/warehouse/saga_dw/dwd/dwd_energy_15_min_hour'
+    TBLPROPERTIES (
+        'orc.compress' = 'lzo');
 
 
 

+ 87 - 0
energy/dayTask/ods_to_dwd.sh

@@ -0,0 +1,87 @@
+#!/bin/bash
+
+. /opt/app/bi-app/energy/dayTask/config.sh
+
+if [ -n "$2" ] ;then
+    echo "如果是输入的日期按照取输入日期"
+    do_date=$2
+else
+    echo "====没有输入数据的日期,取当前时间的前一天===="
+    do_date=`date -d yesterday +"%Y-%m-%d"`
+fi
+echo $do_date
+
+env_config="
+use saga_dw;
+SET mapreduce.job.queuename=default;
+SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
+SET hive.exec.dynamic.partition.mode=nonstrict;"
+
+dwd_energy_15_min_hour="
+with total_energy_hour as (
+    with oe15m_hour as (
+        select building,
+               func_id,
+               meter,
+               data_time,
+               substr(data_time, 0, 13) hour_,
+               data_value,
+               dt
+        from ods_energy_15_min oe15m
+        where dt >= '2022-01-01'
+    )
+    select building,
+           func_id,
+           branch_type,
+           branch_type_name,
+           dt,
+           t1.hour_,
+           (ap_sum - al_sum) value_sum
+    from (
+      select building,
+             func_id,
+             'ALU' as branch_type,
+             '其他' as branch_type_name,
+             sum(if((branch_type == 'AP' and use_range_type == '1'), data_value, 0)) ap_sum,
+             sum(if((branch_type == 'AL' and use_range_type in (2, 3, 4, 6, 7)), data_value, 0)) al_sum,
+             dt,
+             o15mh.hour_
+      from oe15m_hour o15mh
+          left join dim_office_meter dom on o15mh.meter = dom.meter
+      where dt >= '2022-01-01' and  branch_type in ('AP', 'AL')
+      group by building, func_id, dt, o15mh.hour_ ) t1
+    union
+    select building,
+           func_id,
+           concat_ws('-', branch_type, use_range_type),
+           branch_type_name,
+           dt,
+           o15mh.hour_,
+           sum(data_value) value_sum
+    from oe15m_hour o15mh
+        left join dim_office_meter dom2 on o15mh.meter = dom2.meter
+    group by building, func_id, branch_type, branch_type_name, use_range_type, dt, o15mh.hour_
+)
+insert overwrite table saga_dw.dwd_energy_15_min_hour partition (dt)
+select building,
+       func_id,
+       branch_type,
+       branch_type_name,
+       cast(value_sum as decimal(30, 15)) as value_sum ,
+       hour_ as current_hour,
+       current_year,
+       year_mouth,
+       week_of_year,
+       dt
+from total_energy_hour teh
+    left join dim_date_day ddd on dt = date_id
+order by current_hour;"
+
+
+insert_into_table(){
+    case $1 in
+    "all")
+      hive -e "$env_config$dwd_energy_15_min_hour"
+      ;;
+    esac
+}