示例#1
0
        print "query_sql:" + query_sql

    return query_sql


if __name__ == "__main__":
    if not get_argv(sys.argv):
        sys.exit(1)

    dbConfig = sys.argv[1]
    PROJECT_NAME = sys.argv[2]
    etlDate = sys.argv[3]
    businessReadyTime = sys.argv[4]
    whereCondition = sys.argv[5]
    qp_dt = etl_utils.setETLDate(etlDate)
    print qp_dt + "::::::::::::::::::::::::::::::"

    (sqoop_dic, hiveDic) = etl_utils.buildConfDics(dbConfig, "N", "etl_dt",
                                                   qp_dt, buildQuerySql)

    now_time = datetime.datetime.now()
    run_time = now_time.strftime('%Y%m%d %H:%M:%S')

    etl_checkdata.check_data(dbConfig, qp_dt, run_time, "", "source", 1)

    flag = etl_utils.execimport(sqoop_dic)
    print "=========== sqoop flag :", flag
    if flag != 0:
        exit(flag)
    cmd = "hive -v -e  \"use {db}; {sql};\"".format(db=hive_db, sql=sql_string)

    print "===========================" + cmd
    flag = 0
    flag = subprocess.call(shlex.split(cmd.encode('UTF-8')))
    print "flag :", flag

    return flag


if __name__ == "__main__":
    if not get_argv(sys.argv):
        sys.exit(1)

    tx_date = etl_utils.setETLDate(sys.argv[1])
    dbConfig = sys.argv[2]

    max_date = '30001231'

    dic = parseconfs(dbConfig)

    hive_db = dic["hive_db"]
    tb_name = dic["hive_table"]

    joinColStr = dic["joinColStr"]
    p_k = dic["p_k"]

    colStr_md5 = dic["colStr_md5"]
    colStr_as_h = dic["colStr_as_h"]
    colStr_as_m = dic["colStr_as_m"]
        importType = jsonobj["sqoop.import-type"]

        query_template  = sqoop_import_sqlDic[importType]
        qp_dtStr = "'%s'"%(qp_dt,)
        query_sql = query_template%(colsStr,tableName)

        print "query_sql:" + query_sql

    return query_sql


if __name__ == "__main__":
    if not get_argv(sys.argv):
        sys.exit(1)

    dbConfig = sys.argv[1]
    PROJECT_NAME = sys.argv[2]
    etlDate = sys.argv[3]
    businessReadyTime = sys.argv[4]
    qp_dt=etl_utils.setETLDate(etlDate)
    print qp_dt+ "::::::::::::::::::::::::::::::"

    (sqoop_dic,hiveDic) = etl_utils.buildConfDics(dbConfig,"N","etl_dt",qp_dt,buildQuerySql)

    flag = etl_utils.execimport(sqoop_dic)
    print "=========== sqoop flag :", flag
   # if flag != 0:
    exit(flag)

   # flag = etl_utils.loadData2HiveTableAllDeleteAllInsert(hiveDic)
    cmd = "hive -v -e  \"use {db}; {sql};\"".format(db=hive_db, sql=sql_string)


    print "===========================" + cmd 
    flag= 0
    flag = subprocess.call(shlex.split(cmd.encode('UTF-8')))
    print "flag :", flag

    return flag

if __name__ == "__main__":
    if not get_argv(sys.argv):
        sys.exit(1)

    tx_date= etl_utils.setETLDate(sys.argv[1])
    dbConfig = sys.argv[2]

    max_date = '30001231'

    dic = parseconfs(dbConfig)

    hive_db = dic["hive_db"]
    tb_name = dic["hive_table"]

    joinColStr =  dic["joinColStr"]
    p_k = dic["p_k"]


    colStr_md5 = dic["colStr_md5"]
    colStr_as_h = dic["colStr_as_h"]