def run_remove_hive(conf: ConfigData, the_date: str, delta_day=0): f_date_str = StrTool.get_the_date_str(the_date, delta_day) # "20181101" # "/user/hive/warehouse/rds_posflow.db/t1_trxrecprd_v2/t1_trxrecord_20181204_V2*.csv" del_table = conf.get_table_name( ) # hive_table="rds_posflow.t1_trxrecprd_v2" if the_conf.m_project_id == 1: del_file = conf.get_file_name(f_date_str).replace('.', '*.') MyHdfsFile.delete_hive_ssh(conf.get_data("cdh_ip"), table=del_table, p_name=del_file, username=conf.get_data("cdh_user"), password=conf.get_data("cdh_pass")) if the_conf.m_project_id == 2: conn = connect(host=conf.hive_ip(), port=conf.hive_port(), auth_mechanism=conf.hive_auth(), user=conf.hive_user()) cur = conn.cursor() # "ALTER TABLE rds_posflow.t1_trxrecprd_v2_tmp DROP IF EXISTS PARTITION(p_date=20190208) " sql = "ALTER TABLE {} DROP IF EXISTS PARTITION( p_date={} )".format( del_table, the_date) print(sql) cur.execute(sql) cur.close() conn.close()
def run_hive(configData: ConfigData): a_client = InsecureClient(url=configData.hdfs_ip(), user="******") # "http://10.2.201.197:50070" conn = connect(host=configData.hive_ip(), port=configData.hive_port(), auth_mechanism=configData.hive_auth(), user=configData.hive_user()) cur = conn.cursor() f_date_str = configData.get_f_date() # "20181101" p_date_str = configData.get_p_date() # "2018-11-01" root_path = configData.get_hdfs_path() # "/shouyinbao/bl_shouyinbao/UTF8/" file_name = configData.get_file_name(f_date_str) # "t1_trxrecord_" the_date # "_V2.csv" table_name = configData.get_table_name() print("Start\n") idn = 0 branches = MyHdfsFile.get_child(a_client, str(pathlib.PurePosixPath(root_path).joinpath(f_date_str))) for aBranch in branches: if MyHdfsFile.check_branch(a_client, aBranch): files = MyHdfsFile.get_child(a_client, aBranch) f_a_branch = MyHdfsFile.get_name(aBranch) for aFile in files: if MyHdfsFile.check_file(a_client, aFile, file_name): # '/shouyinbao/bl_shouyinbao/UTF8/20181101/9999997900/t1_trxrecord_20181101_V2.csv' to_file2 = str(pathlib.PurePosixPath(root_path).joinpath(f_date_str, f_a_branch, file_name)) if not configData.get_has_partition(): sql = "LOAD DATA INPATH '{}' INTO TABLE {}".format(to_file2, table_name) # 'test.t1_trxrecprd_v2_zc' # '\' OVERWRITE INTO TABLE test.t1_trxrecprd_v2_bl2' else: sql = "LOAD DATA INPATH '{}' INTO TABLE {} PARTITION ( p_date='{}' )".format(to_file2, table_name, p_date_str) # 'test.t1_trxrecprd_v2_zc' idn += 1 print(str(idn) + " " + sql + "\n") cur.execute(sql) # , async=True) cur.close() conn.close()
def run_conv_file_local_to_hdfs(configData: ConfigData): """ # client.upload('/shouyinbao/', "/home/testFolder/logflow/bl_shouyinbao/UTF8/20181101/9999100000/t1_trxrecord_20181101_V2.csv", cleanup=True) # dat = client.list('/shouyinbao/', status=False) # print(dat) # root_path = "/home/bd/桌面/201811_flow/zc_shouyinbao/UNZIP/" # dest_dir1 = "/home/bd/桌面/201811_flow/zc_shouyinbao/UTF8/" # dest_dir2 = "/shouyinbao/zc_shouyinbao/UTF8/" # root_path = "/home/testFolder/logflow/bl_shouyinbao/UNZIP/" # dest_dir1 = "/home/testFolder/logflow/bl_shouyinbao/UTF8/" # dest_dir2 = "/shouyinbao/zc_shouyinbao/UTF8/" # i_file = '/home/testFolder/logflow/bl_shouyinbao/20181101/9999100000/t1_trxrecord_20181101_V2.csv' # o_file = '/home/testFolder/logflow/bl_shouyinbao/UTF8/20181101/9999100000/t1_trxrecord_20181101_V2.csv' :param configData: :param the_date: :param is_baoli: :return: """ f_date_str = configData.get_f_date() # "20181101" a_client = InsecureClient(configData.hdfs_ip(), user="******") # "http://10.2.201.197:50070" # webhdfs 默认是 dr.who ,不能伪装成其他用户,可以在配置里修改 hadoop.http.staticuser.user=dr.who # https://www.cnblogs.com/peizhe123/p/5540845.html root_path = os.path.join(configData.get_data_path(), f_date_str) dest_dir1 = os.path.join(configData.get_utf8_path(), f_date_str) dest_dir2 = str(pathlib.PurePosixPath(configData.get_hdfs_path()).joinpath(f_date_str)) print("Start\n") f_name = configData.get_file_name(f_date_str) # "t1_trxrecord_" the_date # "_V2.csv" branches = MyLocalFile.get_child_dir(root_path) for aBranch in branches: if MyLocalFile.check_branch(aBranch): files = MyLocalFile.get_child_file(aBranch) f_a_branch = os.path.basename(aBranch) for aFile in files: if MyLocalFile.check_file(aFile, f_name): to_file1 = str(pathlib.PurePath(dest_dir1).joinpath(f_a_branch, f_name)) to_file2 = str(pathlib.PurePosixPath(dest_dir2).joinpath(f_a_branch, f_name)) f_add_head = configData.get_hive_add_date(f_a_branch) f_add_end = configData.get_hive_add_date("789") f_need_head = not configData.get_hive_head() # False MyLocalFile.conv_file_local(aFile, to_file1, need_first_line=f_need_head,p_add_head=f_add_head, p_add_tail=f_add_end,quoting="") MyHdfsFile.safe_make_dir(a_client, to_file2) # client.newupload(to_file2, to_file1, encoding='utf-8') the_file = a_client.status(to_file2, strict=False) if the_file is None: a_client.upload(to_file2, to_file1) #, encoding='utf-8') a_client.set_permission(to_file2, 777) # a_client.set_owner(thePath,owner='hdfs',group='supergroup') elif the_file['type'].lower() == 'file': # 'directory' a_client.set_permission(to_file2, 777)
def run_remove_hive(conf: ConfigData, the_date: str, delta_day=0): f_date_str = StrTool.get_the_date_str(the_date, delta_day) # "20181101" del_table = conf.get_table_name( ) # "hive_table" + str(conf.the_id) # "rds_posflow.loginfo_rsp_bl" del_file = conf.get_file_name(f_date_str).replace( '.', '*.') # "file_ext" + str(conf.the_id) MyHdfsFile.delete_hive_ssh(conf.get_data("cdh_ip"), table=del_table, p_name=del_file, username=conf.get_data("cdh_user"), password=conf.get_data("cdh_pass"))
def run_conv_file_local_to_hdfs(conf: ConfigData, the_date: str): """ :param conf: :param the_date: :return: """ the_date = StrTool.get_the_date_str(the_date) a_client = MyClient(conf.hdfs_ip()) # "http://10.2.201.197:50070" # allinpay_data_bl data_path = os.path.join(conf.get_data_path(), the_date) # allinpay_utf8_bl dest_dir1 = os.path.join(conf.get_utf8_path(), the_date) # hdfs_dir_bl dest_dir2 = str( pathlib.PurePosixPath(conf.get_hdfs_path()).joinpath(the_date)) # file_ext7 = conf.get_data("file_ext7") # _loginfo_rsp_bl_new.csv # 20181101_loginfo_rsp_bl_new.csv # file_ext8 = conf.get_data("file_ext8") # _rsp_agt_bl_new.del # 20181101_rsp_agt_bl_new.del # file_ext9 = conf.get_data("file_ext9") # _rxinfo_rsp_bl.txt # 20181101_rxinfo_rsp_bl.txt # f_list = [file_ext7, file_ext8, file_ext9] print("Start\n") # "file_ext" + str(conf.the_id) file_name = conf.get_file_name(the_date).lower() files = MyLocalFile.get_child_file(data_path) for aFile in files: short_name = os.path.basename(aFile).lower() f_name = pathlib.PurePath(aFile).name if short_name == file_name: to_file1 = str(pathlib.PurePath(dest_dir1).joinpath(f_name)) to_file2 = str(pathlib.PurePosixPath(dest_dir2).joinpath(f_name)) f_add_date = conf.get_hive_add_date(the_date) f_need_head = conf.get_hive_head() MyLocalFile.conv_file_local(aFile, to_file1, need_first_line=f_need_head, p_add_head=f_add_date) MyHdfsFile.safe_make_dir(a_client, to_file2) # a_client.newupload(to_file2, to_file1, encoding='utf-8') the_file = a_client.status(to_file2, strict=False) if the_file is None: a_client.upload(to_file2, to_file1) a_client.set_permission(to_file2, 777) # a_client.set_owner(thePath,owner='hdfs',group='supergroup') elif the_file['type'].lower() == 'file': # 'directory' a_client.set_permission(to_file2, 777)
def run_hive(conf: ConfigData, the_date: str): client = Client(conf.hdfs_ip()) # "http://10.2.201.197:50070" conn = connect(host=conf.hive_ip(), port=conf.hive_port(), auth_mechanism=conf.hive_auth(), user=conf.hive_user()) cur = conn.cursor() the_date = StrTool.get_the_date_str(the_date) # "20181101" root_path = conf.get_data("hdfs_dir_zc") # "/data/posflow/allinpay_utf8_zc/" file_ext3 = conf.get_data("file_ext3") # _loginfo_rsp.txt # 20181101_loginfo_rsp.txt file_ext4 = conf.get_data("file_ext4") # _loginfo_rsp_agt.txt # 20181101_loginfo_rsp_agt.txt file_ext5 = conf.get_data("file_ext5") # _rxinfo_rsp.txt # 20181101_rxinfo_rsp.txt file_ext6 = conf.get_data("file_ext6") # _rxinfo_rsp_agt.txt # 20181101_rxinfo_rsp_agt.txt print("Start\n") file3 = str(pathlib.PurePosixPath(root_path).joinpath(the_date + file_ext3)) file4 = str(pathlib.PurePosixPath(root_path).joinpath(the_date + file_ext4)) file5 = str(pathlib.PurePosixPath(root_path).joinpath(the_date + file_ext5)) file6 = str(pathlib.PurePosixPath(root_path).joinpath(the_date + file_ext6)) f_list = [file3,file4,file5,file6] t_list = ["hive_table3", "hive_table4", "hive_table5", "hive_table6"] for n in range(0,4): if MyHdfsFile.isfile(client, f_list[n]): sql = 'LOAD DATA INPATH \'' + f_list[n] + '\' INTO TABLE ' + conf.get_data(t_list[n]) # 'test.t1_trxrecprd_v2_zc' # '\' OVERWRITE INTO TABLE test.t1_trxrecprd_v2_bl2' print("OK" + " " + sql+"\n") cur.execute(sql) # , async=True) cur.close() conn.close()
def run_hive(configData: ConfigData): a_client = InsecureClient(url=configData.hdfs_ip(), user="******") # "http://10.2.201.197:50070" conn = connect(host=configData.hive_ip(), port=configData.hive_port(), auth_mechanism=configData.hive_auth(), user=configData.hive_user()) cur = conn.cursor() f_date_str = configData.get_f_date() # "20181101" p_date_str = configData.get_p_date() # "2018-11-01" # hdfs_dir_bl root_path = str(pathlib.PurePosixPath(configData.get_hdfs_path()).joinpath(f_date_str)) file_name = str(pathlib.PurePosixPath(root_path).joinpath(configData.get_file_name(f_date_str))) # "/data/posflow/allinpay_utf8_zc/20181101/" # 20181101_loginfo_rsp_bl_new.csv # 20181101_rsp_agt_bl_new.del # 20181101_rxinfo_rsp_bl.txt table_name = configData.get_table_name() print("Start\n") if MyHdfsFile.isfile(a_client, file_name): if not configData.get_has_partition(): sql = "LOAD DATA INPATH '{}' INTO TABLE {}".format(file_name, table_name) # 'test.t1_trxrecprd_v2_zc' # '\' OVERWRITE INTO TABLE test.t1_trxrecprd_v2_bl2' else: sql = "LOAD DATA INPATH '{}' INTO TABLE {} PARTITION ( p_date='{}' )".format(file_name, table_name, p_date_str) # 'test.t1_trxrecprd_v2_zc' print("OK" + " " + sql+"\n") cur.execute(sql) # , async=True) cur.close() conn.close()
def run_hive(conf: ConfigData, the_date: str): a_client = Client(conf.hdfs_ip()) # "http://10.2.201.197:50070" conn = connect(host=conf.hive_ip(), port=conf.hive_port(), auth_mechanism=conf.hive_auth(), user=conf.hive_user()) cur = conn.cursor() print("Start\n") the_date = StrTool.get_the_date_str(the_date) # "20181101" # hdfs_dir_bl root_path = str( pathlib.PurePosixPath(conf.get_hdfs_path()).joinpath(the_date)) file_name = str( pathlib.PurePosixPath(root_path).joinpath( conf.get_file_name(the_date))) # "/data/posflow/allinpay_utf8_zc/20181101/" # 20181101_loginfo_rsp_bl_new.csv # 20181101_rsp_agt_bl_new.del # 20181101_rxinfo_rsp_bl.txt table_name = conf.get_table_name() if MyHdfsFile.isfile(a_client, file_name): sql = 'LOAD DATA INPATH \'' + file_name + '\' INTO TABLE ' + table_name # 'test.t1_trxrecprd_v2_zc' # '\' OVERWRITE INTO TABLE test.t1_trxrecprd_v2_bl2' print("OK" + " " + sql + "\n") cur.execute(sql) # , async=True) cur.close() conn.close()
def run_hive(conf: ConfigData, the_date: str, is_baoli=True): p_client = Client(conf.hdfs_ip()) # "http://10.2.201.197:50070" conn = connect(host=conf.hive_ip(), port=conf.hive_port(), auth_mechanism=conf.hive_auth(), user=conf.hive_user()) cur = conn.cursor() the_date = StrTool.get_the_date_str(the_date) # "20181101" root_path = conf.get_hdfs_path() # "/shouyinbao/bl_shouyinbao/UTF8/" f_name = conf.get_file_name( the_date) # "t1_trxrecord_" the_date # "_V2.csv" table_name = conf.get_table_name() print("Start\n") idn = 0 branches = MyHdfsFile.get_child(p_client, root_path + the_date) for aBranch in branches: if MyHdfsFile.check_branch(p_client, aBranch): files = MyHdfsFile.get_child(p_client, aBranch) f_a_branch = MyHdfsFile.get_name(aBranch) for aFile in files: if MyHdfsFile.check_file(p_client, aFile, f_name): # '/shouyinbao/bl_shouyinbao/UTF8/20181101/9999997900/t1_trxrecord_20181101_V2.csv' to_file2 = str( pathlib.PurePosixPath(root_path).joinpath( the_date, f_a_branch, f_name)) if conf.m_project_id == 1: sql = 'LOAD DATA INPATH \'{}\' INTO TABLE {}'.format( to_file2, table_name) # 'test.t1_trxrecprd_v2_zc' # '\' OVERWRITE INTO TABLE test.t1_trxrecprd_v2_bl2' elif conf.m_project_id == 2: sql = 'LOAD DATA INPATH \'{}\' INTO TABLE {} PARTITION ( p_branch=\'{}\', p_date={} )'.format( to_file2, table_name, f_a_branch, the_date) # 'test.t1_trxrecprd_v2_zc' idn += 1 print(str(idn) + " " + sql + "\n") cur.execute(sql) # , async=True) cur.close() conn.close()
def run_remove_hive(configData: ConfigData): f_date_str = configData.get_f_date() # "20181101" p_date_str = configData.get_p_date() # "2018-11-01" del_table = configData.get_table_name() # "hive_table" + str(configData.the_id) # "rds_posflow.loginfo_rsp_bl" print(configData.cdh_ip()+del_table+f_date_str+configData.get_file_name(f_date_str)+configData.hive_ip()) if not configData.get_has_partition(): del_file = configData.get_file_name(f_date_str).replace('.', '*.') # "file_ext" + str(configData.the_id) MyHdfsFile.delete_hive_ssh(configData.cdh_ip(), table=del_table, p_name=del_file, username=configData.cdh_user(), password=configData.cdh_pass()) else: conn = connect(host=configData.hive_ip(), port=configData.hive_port(), auth_mechanism=configData.hive_auth(), user=configData.hive_user()) cur = conn.cursor() # "ALTER TABLE rds_posflow.t1_trxrecprd_v2_tmp DROP IF EXISTS PARTITION(p_date='2019-02-08') " sql = "ALTER TABLE {} DROP IF EXISTS PARTITION( p_date='{}' )".format(del_table, p_date_str) print(sql) cur.execute(sql) cur.close() conn.close()
def run_conv_file_hdfs(conf: ConfigData, the_date: str, is_baoli=True): the_date = StrTool.get_the_date_str(the_date) client = Client(conf.hdfs_ip()) # "http://10.2.201.197:50070" root_path = conf.get_data_path() # 'D:/DATA/UNZIP/' dest_dir = conf.get_hdfs_path() f_name = conf.get_file_name( the_date) # "t1_trxrecord_" the_date # "_V2.csv" print("Start\n") branches = MyLocalFile.get_child(os.path.join(root_path, the_date)) for aBranch in branches: if MyLocalFile.check_branch(aBranch): files = MyLocalFile.get_child(aBranch) for aFile in files: if MyLocalFile.check_file(aFile, f_name): MyHdfsFile.conv_file_hdfs( aFile, os.path.join(dest_dir, the_date, os.path.basename(aBranch), f_name), client)
def run_remove_hive(configData: ConfigData): f_date_str = configData.get_f_date() # "20181101" p_date_str = configData.get_p_date() # "2018-11-01" # "/user/hive/warehouse/rds_posflow.db/t1_trxrecprd_v2/t1_trxrecord_20181204_V2*.csv" del_table = configData.get_table_name() # hive_table="rds_posflow.t1_trxrecprd_v2" if not configData.get_has_partition(): del_file = configData.get_file_name(f_date_str).replace('.', '*.') MyHdfsFile.delete_hive_ssh(configData.cdh_ip(), table=del_table, p_name=del_file, username=configData.cdh_user(), password=configData.cdh_pass()) else: conn = connect(host=configData.hive_ip(), port=configData.hive_port(), auth_mechanism=configData.hive_auth(), user=configData.hive_user()) cur = conn.cursor() # "ALTER TABLE rds_posflow.t1_trxrecprd_v2_tmp DROP IF EXISTS PARTITION(p_date=2019-02-08) " sql = "ALTER TABLE {} DROP IF EXISTS PARTITION( p_date='{}' )".format(del_table, p_date_str) print(sql) cur.execute(sql) cur.close() conn.close()
def run_conv_file_local_to_hdfs(conf: ConfigData, the_date: str): """ :param conf: :param the_date: :return: """ the_date = StrTool.get_the_date_str(the_date) client = MyClient(conf.hdfs_ip()) # "http://10.2.201.197:50070" root_path = conf.get_data("allinpay_data_zc") dest_dir1 = conf.get_data("allinpay_utf8_zc") dest_dir2 = conf.get_data("hdfs_dir_zc") file_ext3 = conf.get_data("file_ext3") # _loginfo_rsp.txt # 20181101_loginfo_rsp.txt file_ext4 = conf.get_data("file_ext4") # _loginfo_rsp_agt.txt # 20181101_loginfo_rsp_agt.txt file_ext5 = conf.get_data("file_ext5") # _rxinfo_rsp.txt # 20181101_rxinfo_rsp.txt file_ext6 = conf.get_data("file_ext6") # _rxinfo_rsp_agt.txt # 20181101_rxinfo_rsp_agt.txt print("Start\n") files = MyLocalFile.get_child_file(root_path) for aFile in files: short_name = os.path.basename(aFile).lower() if short_name == (the_date + file_ext3).lower() or \ short_name == (the_date + file_ext4).lower() or \ short_name == (the_date + file_ext5).lower() or \ short_name == (the_date + file_ext6).lower(): to_file1 = str(pathlib.PurePath(dest_dir1).joinpath(pathlib.PurePath(aFile).name)) to_file2 = str(pathlib.PurePosixPath(dest_dir2).joinpath(pathlib.PurePath(aFile).name)) MyLocalFile.conv_file_local(aFile, to_file1, need_first_line=True) MyHdfsFile.safe_make_dir(client, to_file2) # client.newupload(to_file2, to_file1, encoding='utf-8') the_file = client.status(to_file2, strict=False) if the_file is None: client.upload(to_file2, to_file1) client.set_permission(to_file2, 777) # client.set_owner(thePath,owner='hdfs',group='supergroup') elif the_file['type'].lower() == 'file': # 'directory' client.set_permission(to_file2, 777)
def run_conv_file_hdfs(configData: ConfigData): f_date_str = configData.get_f_date() # "20181101" client = InsecureClient(configData.hdfs_ip(), user="******") # "http://10.2.201.197:50070" root_path = configData.get_data_path() # 'D:/DATA/UNZIP/' dest_dir = configData.get_hdfs_path() f_name = configData.get_file_name(f_date_str) # "t1_trxrecord_" the_date # "_V2.csv" print("Start\n") branches = MyLocalFile.get_child(os.path.join(root_path, f_date_str)) for aBranch in branches: if MyLocalFile.check_branch(aBranch): files = MyLocalFile.get_child(aBranch) for aFile in files: if MyLocalFile.check_file(aFile, f_name): MyHdfsFile.conv_file_hdfs(aFile, os.path.join(dest_dir, f_date_str, os.path.basename(aBranch), f_name), client)
hdfs_path = str(pathlib.PurePosixPath(conf.get_hdfs_path()).joinpath(sdate)) shutil.rmtree(data_path, ignore_errors=True) shutil.rmtree(utf8_path, ignore_errors=True) client = MyClient(conf.hdfs_ip()) # "http://10.2.201.197:50070" client.delete(hdfs_path, recursive=True) # "/user/hive/warehouse/posflow.db/t1_trxrecprd_v2/t1_trxrecord_20181204_V2*.csv" # hive_table="posflow.t1_trxrecprd_v2", # file_pre1 = 't1_trxrecord_', # file_ext2 = "_V2.csv", if __name__ == "__main__": the_conf = ConfigData(p_is_test=False) client = Client(the_conf.hdfs_ip()) # "http://10.2.201.197:50070" a = MyHdfsFile.get_child(client, "/data/posflow/allinpay_utf8_zc") b = MyHdfsFile.get_child_file(client,"/data/posflow/allinpay_utf8_zc") c = MyHdfsFile.get_child_dir(client, "/data/posflow/allinpay_utf8_zc") # test # MyHdfsFile.delete(client, "/data/posflow/allinpay_utf8_zc", "*agt_cpy*") # test if the_conf.is_test(): day_str = the_conf.test_date() days = 9 else: day_str = StrTool.get_param_str(1, "") days = StrTool.get_param_int(2, 1) day_str = StrTool.get_the_date_str(day_str)