Ejemplo n.º 1
0
        sys.exit(1)

    dbConfig = sys.argv[1]
    PROJECT_NAME = sys.argv[2]
    etlDate = sys.argv[3]
    businessReadyTime = sys.argv[4]
    whereCondition = sys.argv[5]
    qp_dt = etl_utils.setETLDate(etlDate)
    print qp_dt + "::::::::::::::::::::::::::::::"

    (sqoop_dic, hiveDic) = etl_utils.buildConfDics(dbConfig, "N", "etl_dt",
                                                   qp_dt, buildQuerySql)

    now_time = datetime.datetime.now()
    run_time = now_time.strftime('%Y%m%d %H:%M:%S')

    etl_checkdata.check_data(dbConfig, qp_dt, run_time, "", "source", 1)

    flag = etl_utils.execimport(sqoop_dic)
    print "=========== sqoop flag :", flag
    if flag != 0:
        exit(flag)

    flag = etl_utils.loadData2HiveTable(hiveDic)

    if flag == 0:
        etl_checkdata.check_data(dbConfig, qp_dt, run_time, "", "target", "")
        etl_checkdata.check_data(dbConfig, qp_dt, run_time, "", "source", 2)

    exit(flag)
Ejemplo n.º 2
0
    dbConfig = sys.argv[1]
    PROJECT_NAME = sys.argv[2]
    etlDate = sys.argv[3]
    businessReadyTime = sys.argv[4]
    qp_dt = etl_utils.setETLDate(etlDate)
    print qp_dt + "::::::::::::::::::::::::::::::"

    (sqoop_dic, hiveDic) = etl_utils.buildConfDics(dbConfig, "N", "data_dt",
                                                   qp_dt, buildQuerySql)

    now_time = datetime.datetime.now()
    run_time = now_time.strftime('%Y%m%d %H:%M:%S')
    hive_whereCondion = "data_dt='%s'" % qp_dt

    etl_checkdata.check_data(dbConfig, qp_dt, run_time, "", "source", 1)

    # reload stg data into hive
    # flag = etl_utils.reloadStgData2HiveTable(hiveDic)
    flag = etl_utils.execimport(sqoop_dic)
    print "=========== sqoop flag :", flag
    if flag != 0:
        exit(flag)

    flag = etl_utils.loadData2HiveTable(hiveDic)

    if flag == 0:
        #  pass
        etl_checkdata.check_data(dbConfig, qp_dt, run_time, hive_whereCondion,
                                 "target", "")
        etl_checkdata.check_data(dbConfig, qp_dt, run_time, "", "source", 2)
    dbConfig = sys.argv[1]
    PROJECT_NAME = sys.argv[2]
    etlDate = sys.argv[3]
    businessReadyTime = sys.argv[4]
    whereCondition = sys.argv[5]
    qp_dt=etl_utils.setETLDate(etlDate)
    print qp_dt+ "::::::::::::::::::::::::::::::"

    (sqoop_dic,hiveDic) = etl_utils.buildConfDics(dbConfig,"Y","etl_dt",qp_dt,buildQuerySql)


    now_time = datetime.datetime.now()
    run_time = now_time.strftime('%Y%m%d %H:%M:%S')

    etl_checkdata.check_data(dbConfig,qp_dt,run_time,"","source",1)


    flag = etl_utils.execimport(sqoop_dic)
    print "=========== sqoop flag :", flag
    if flag != 0:
      exit(flag)

    flag = etl_utils.loadData2HiveTable(hiveDic)

    if flag == 0:
       etl_checkdata.check_data(dbConfig,qp_dt,qp_dt,"","source",2)

    exit(flag)

    dbConfig = sys.argv[1]
    PROJECT_NAME = sys.argv[2]
    etlDate = sys.argv[3]
    businessReadyTime = sys.argv[4]
    qp_dt=etl_utils.setETLDate(etlDate)
    print qp_dt+ "::::::::::::::::::::::::::::::"

    (sqoop_dic,hiveDic) = etl_utils.buildConfDics(dbConfig,"N","data_dt",qp_dt,buildQuerySql)


    now_time = datetime.datetime.now()
    run_time = now_time.strftime('%Y%m%d %H:%M:%S')
    hive_whereCondion="data_dt='%s'" % qp_dt

    etl_checkdata.check_data(dbConfig,qp_dt,run_time,"","source",1)
    
    # reload stg data into hive 
    flag = etl_utils.reloadStgData2HiveTable(hiveDic)
    print "=========== reload flag :", flag
    if flag != 0:
      exit(flag)

    if flag == 0:
       etl_checkdata.check_data(dbConfig,qp_dt,run_time,hive_whereCondion,"target","")
       etl_checkdata.check_data(dbConfig,qp_dt,run_time,"","source",2)
    exit(flag)



    max_date = '30001231'

    dic = parseconfs(dbConfig)

    hive_db = dic["hive_db"]
    tb_name = dic["hive_table"]

    joinColStr = dic["joinColStr"]
    p_k = dic["p_k"]

    colStr_md5 = dic["colStr_md5"]
    colStr_as_h = dic["colStr_as_h"]
    colStr_as_m = dic["colStr_as_m"]
    colStr_h = dic["colStr_h"]
    colStr_coalesce = dic["colStr_coalesce"]
    colStr_m = dic["colStr_m"]

    flag = chain_sql(hive_db, tb_name, tx_date, max_date, joinColStr, p_k,
                     colStr_md5, colStr_as_h, colStr_as_m, colStr_h,
                     colStr_coalesce, colStr_m)

    qp_dt = tx_date
    hive_whereCondion = "start_dt <= '%s' and end_dt > '%s'" % (qp_dt, qp_dt)

    if flag == 0:
        etl_checkdata.check_data(dbConfig, qp_dt, qp_dt, hive_whereCondion,
                                 "target", "")

    exit(flag)
    max_date = '30001231'

    dic = parseconfs(dbConfig)

    hive_db = dic["hive_db"]
    tb_name = dic["hive_table"]

    joinColStr =  dic["joinColStr"]
    p_k = dic["p_k"]


    colStr_md5 = dic["colStr_md5"]
    colStr_as_h = dic["colStr_as_h"]
    colStr_as_m = dic["colStr_as_m"]
    colStr_h = dic["colStr_h"]
    colStr_coalesce = dic["colStr_coalesce"]
    colStr_m = dic["colStr_m"]


    flag = chain_sql(hive_db,tb_name,tx_date,max_date,joinColStr,p_k,colStr_md5,colStr_as_h,colStr_as_m,colStr_h,colStr_coalesce,colStr_m)
   
    qp_dt=tx_date
    hive_whereCondion="start_dt <= '%s' and end_dt > '%s'" %(qp_dt,qp_dt)
    
    if flag == 0:
       etl_checkdata.check_data(dbConfig,qp_dt,qp_dt,hive_whereCondion,"target","")

    exit(flag)