Ejemplo n.º 1
0
def run_conv_file_local_to_hdfs(configData: ConfigData):
    """

    # client.upload('/shouyinbao/', "/home/testFolder/logflow/bl_shouyinbao/UTF8/20181101/9999100000/t1_trxrecord_20181101_V2.csv", cleanup=True)
    # dat = client.list('/shouyinbao/', status=False)
    # print(dat)

    # root_path = "/home/bd/桌面/201811_flow/zc_shouyinbao/UNZIP/"
    # dest_dir1 = "/home/bd/桌面/201811_flow/zc_shouyinbao/UTF8/"
    # dest_dir2 = "/shouyinbao/zc_shouyinbao/UTF8/"

    # root_path = "/home/testFolder/logflow/bl_shouyinbao/UNZIP/"
    # dest_dir1 = "/home/testFolder/logflow/bl_shouyinbao/UTF8/"
    # dest_dir2 = "/shouyinbao/zc_shouyinbao/UTF8/"

    # i_file = '/home/testFolder/logflow/bl_shouyinbao/20181101/9999100000/t1_trxrecord_20181101_V2.csv'
    # o_file = '/home/testFolder/logflow/bl_shouyinbao/UTF8/20181101/9999100000/t1_trxrecord_20181101_V2.csv'

    :param configData:
    :param the_date:
    :param is_baoli:
    :return:
    """
    f_date_str = configData.get_f_date()  # "20181101"

    a_client = InsecureClient(configData.hdfs_ip(), user="******")   # "http://10.2.201.197:50070"
    # webhdfs 默认是 dr.who ,不能伪装成其他用户,可以在配置里修改 hadoop.http.staticuser.user=dr.who
    # https://www.cnblogs.com/peizhe123/p/5540845.html
    root_path = os.path.join(configData.get_data_path(), f_date_str)
    dest_dir1 = os.path.join(configData.get_utf8_path(), f_date_str)
    dest_dir2 = str(pathlib.PurePosixPath(configData.get_hdfs_path()).joinpath(f_date_str))

    print("Start\n")

    f_name = configData.get_file_name(f_date_str)  # "t1_trxrecord_" the_date # "_V2.csv"

    branches = MyLocalFile.get_child_dir(root_path)
    for aBranch in branches:
        if MyLocalFile.check_branch(aBranch):
            files = MyLocalFile.get_child_file(aBranch)
            f_a_branch = os.path.basename(aBranch)
            for aFile in files:
                if MyLocalFile.check_file(aFile, f_name):
                    to_file1 = str(pathlib.PurePath(dest_dir1).joinpath(f_a_branch, f_name))
                    to_file2 = str(pathlib.PurePosixPath(dest_dir2).joinpath(f_a_branch, f_name))
                    f_add_head = configData.get_hive_add_date(f_a_branch)
                    f_add_end = configData.get_hive_add_date("789")
                    f_need_head = not configData.get_hive_head()  # False
                    MyLocalFile.conv_file_local(aFile, to_file1, need_first_line=f_need_head,p_add_head=f_add_head, p_add_tail=f_add_end,quoting="")
                    MyHdfsFile.safe_make_dir(a_client, to_file2)
                    # client.newupload(to_file2, to_file1, encoding='utf-8')
                    the_file = a_client.status(to_file2, strict=False)
                    if the_file is None:
                        a_client.upload(to_file2, to_file1) #, encoding='utf-8')
                        a_client.set_permission(to_file2, 777)
                    # a_client.set_owner(thePath,owner='hdfs',group='supergroup')
                    elif the_file['type'].lower() == 'file':  # 'directory'
                        a_client.set_permission(to_file2, 777)
Ejemplo n.º 2
0
def run_conv_file_local_to_hdfs(conf: ConfigData, the_date: str):
    """

    :param conf:
    :param the_date:
    :return:
    """
    the_date = StrTool.get_the_date_str(the_date)
    a_client = MyClient(conf.hdfs_ip())  # "http://10.2.201.197:50070"
    # allinpay_data_bl
    data_path = os.path.join(conf.get_data_path(), the_date)
    # allinpay_utf8_bl
    dest_dir1 = os.path.join(conf.get_utf8_path(), the_date)
    # hdfs_dir_bl
    dest_dir2 = str(
        pathlib.PurePosixPath(conf.get_hdfs_path()).joinpath(the_date))
    # file_ext7 = conf.get_data("file_ext7")  # _loginfo_rsp_bl_new.csv   # 20181101_loginfo_rsp_bl_new.csv
    # file_ext8 = conf.get_data("file_ext8")  # _rsp_agt_bl_new.del       # 20181101_rsp_agt_bl_new.del
    # file_ext9 = conf.get_data("file_ext9")  # _rxinfo_rsp_bl.txt        # 20181101_rxinfo_rsp_bl.txt

    # f_list = [file_ext7, file_ext8, file_ext9]

    print("Start\n")

    # "file_ext" + str(conf.the_id)
    file_name = conf.get_file_name(the_date).lower()
    files = MyLocalFile.get_child_file(data_path)
    for aFile in files:
        short_name = os.path.basename(aFile).lower()
        f_name = pathlib.PurePath(aFile).name
        if short_name == file_name:
            to_file1 = str(pathlib.PurePath(dest_dir1).joinpath(f_name))
            to_file2 = str(pathlib.PurePosixPath(dest_dir2).joinpath(f_name))
            f_add_date = conf.get_hive_add_date(the_date)
            f_need_head = conf.get_hive_head()
            MyLocalFile.conv_file_local(aFile,
                                        to_file1,
                                        need_first_line=f_need_head,
                                        p_add_head=f_add_date)
            MyHdfsFile.safe_make_dir(a_client, to_file2)
            # a_client.newupload(to_file2, to_file1, encoding='utf-8')
            the_file = a_client.status(to_file2, strict=False)
            if the_file is None:
                a_client.upload(to_file2, to_file1)
                a_client.set_permission(to_file2, 777)
            # a_client.set_owner(thePath,owner='hdfs',group='supergroup')
            elif the_file['type'].lower() == 'file':  # 'directory'
                a_client.set_permission(to_file2, 777)
Ejemplo n.º 3
0
def run_conv_file_local(configData: ConfigData):
    f_date_str = configData.get_f_date()  # "20181101"

    root_path = configData.get_data_path()
    dest_dir = configData.get_utf8_path()

    f_name = configData.get_file_name(f_date_str)  # "t1_trxrecord_" the_date # "_V2.csv"

    print("Start\n")

    branches = MyLocalFile.get_child(os.path.join(root_path, f_date_str))
    for aBranch in branches:
        if MyLocalFile.check_branch(aBranch):
            files = MyLocalFile.get_child(aBranch)
            for aFile in files:
                if MyLocalFile.check_file(aFile, f_name):
                    MyLocalFile.conv_file_local(aFile, os.path.join(dest_dir, f_date_str, os.path.basename(aBranch), f_name), True)
Ejemplo n.º 4
0
def run_conv_file_local(conf: ConfigData, the_date: str, is_baoli=True):
    the_date = StrTool.get_the_date_str(the_date)
    root_path = conf.get_data_path()
    dest_dir = conf.get_utf8_path()

    f_name = conf.get_file_name(
        the_date)  # "t1_trxrecord_" the_date # "_V2.csv"

    print("Start\n")

    branches = MyLocalFile.get_child(os.path.join(root_path, the_date))
    for aBranch in branches:
        if MyLocalFile.check_branch(aBranch):
            files = MyLocalFile.get_child(aBranch)
            for aFile in files:
                if MyLocalFile.check_file(aFile, f_name):
                    MyLocalFile.conv_file_local(
                        aFile,
                        os.path.join(dest_dir, the_date,
                                     os.path.basename(aBranch), f_name), True)
Ejemplo n.º 5
0
def run_conv_file_local_to_hdfs(conf: ConfigData, the_date: str):
    """

    :param conf:
    :param the_date:
    :return:
    """
    the_date = StrTool.get_the_date_str(the_date)
    client = MyClient(conf.hdfs_ip())  # "http://10.2.201.197:50070"
    root_path = conf.get_data("allinpay_data_zc")
    dest_dir1 = conf.get_data("allinpay_utf8_zc")
    dest_dir2 = conf.get_data("hdfs_dir_zc")
    file_ext3 = conf.get_data("file_ext3")  # _loginfo_rsp.txt          # 20181101_loginfo_rsp.txt
    file_ext4 = conf.get_data("file_ext4")  # _loginfo_rsp_agt.txt      # 20181101_loginfo_rsp_agt.txt
    file_ext5 = conf.get_data("file_ext5")  # _rxinfo_rsp.txt           # 20181101_rxinfo_rsp.txt
    file_ext6 = conf.get_data("file_ext6")  # _rxinfo_rsp_agt.txt      # 20181101_rxinfo_rsp_agt.txt

    print("Start\n")

    files = MyLocalFile.get_child_file(root_path)
    for aFile in files:
        short_name = os.path.basename(aFile).lower()
        if short_name == (the_date + file_ext3).lower() or \
                short_name == (the_date + file_ext4).lower() or \
                short_name == (the_date + file_ext5).lower() or \
                short_name == (the_date + file_ext6).lower():
            to_file1 = str(pathlib.PurePath(dest_dir1).joinpath(pathlib.PurePath(aFile).name))
            to_file2 = str(pathlib.PurePosixPath(dest_dir2).joinpath(pathlib.PurePath(aFile).name))
            MyLocalFile.conv_file_local(aFile, to_file1, need_first_line=True)
            MyHdfsFile.safe_make_dir(client, to_file2)
            # client.newupload(to_file2, to_file1, encoding='utf-8')
            the_file = client.status(to_file2, strict=False)
            if the_file is None:
                client.upload(to_file2, to_file1)
                client.set_permission(to_file2, 777)
            # client.set_owner(thePath,owner='hdfs',group='supergroup')
            elif the_file['type'].lower() == 'file':  # 'directory'
                client.set_permission(to_file2, 777)