Пример #1
0
 def __db_insert(self, ib_id):
     """
     将当前目录, 创建一条新记录到dm2_storage_directory表中
     :return:
     """
     sql_insert = '''
     insert into dm2_storage_directory(dsdid, dsdparentid, dsdstorageid, dsddirectory, dsddirtype, dsdlastmodifytime, 
         dsddirectoryname, dsdpath, dsddircreatetime, dsddirlastmodifytime, dsdparentobjid, 
         dsdscanstatus, dsdscanfilestatus, dsdscandirstatus, dsd_directory_valid, dsd_ib_id) 
     values(:dsdid, :dsdparentid, :dsdstorageid, :dsddirectory, :dsddirtype, now(), 
         :dsddirectoryname, :dsdpath, :dsddircreatetime, :dsddirlastmodifytime, :dsdparentobjid,
         1, 1, 1, -1, :ib_id)
     '''
     params = dict()
     params['dsdid'] = self.my_id
     params['dsdparentid'] = self.parent_id
     params['dsdstorageid'] = self.storage_id
     params['dsddirectory'] = CFile.unify(self.file_name_with_rel_path)
     params['dsddirtype'] = self.Dir_Type_Directory
     params['dsddirectoryname'] = self.file_name_without_path
     params['dsdpath'] = CFile.unify(self.file_path_with_rel_path)
     params['dsddircreatetime'] = self.file_create_time
     params['dsddirlastmodifytime'] = self.file_modify_time
     params['dsdparentobjid'] = self.owner_obj_id
     params['ib_id'] = ib_id
     CFactory().give_me_db(self.db_server_id).execute(sql_insert, params)
Пример #2
0
 def __db_insert(self, ib_id):
     """
     将当前目录, 创建一条新记录到dm2_storage_directory表中
     :return:
     """
     sql_insert = '''
     insert into dm2_storage_file(
         dsfid, dsfstorageid, dsfdirectoryid, dsffilerelationname, dsffilename, dsffilemainname, dsfext
         , dsffilecreatetime, dsffilemodifytime, dsfaddtime, dsflastmodifytime, dsffilevalid
         , dsfscanstatus, dsfprocessid, dsf_object_type, dsf_object_confirm, dsf_object_id
         , dsffilesize, dsfparentobjid, dsf_ib_id) 
     values(
         :dsfid, :dsfstorageid, :dsfdirectoryid, :dsffilerelationname, :dsffilename, :dsffilemainname, :dsfext
         , :dsffilecreatetime, :dsffilemodifytime, now(), now(), -1
         , 1, null, null, 0, null
         , :dsffilesize, :dsfparentobjid, :dsf_ib_id)
     '''
     params = dict()
     params['dsfid'] = self.my_id
     params['dsfdirectoryid'] = self.parent_id
     params['dsfstorageid'] = self.storage_id
     params['dsffilerelationname'] = CFile.unify(self.file_name_with_rel_path)
     params['dsffilename'] = self.file_name_without_path
     params['dsffilemainname'] = self.file_main_name
     params['dsfext'] = self.file_ext
     params['dsffilecreatetime'] = self.file_create_time
     params['dsffilemodifytime'] = self.file_modify_time
     params['dsffilesize'] = self.file_size
     params['dsfparentobjid'] = self.owner_obj_id
     params['dsf_ib_id'] = ib_id
     CFactory().give_me_db(self.db_server_id).execute(sql_insert, params)
Пример #3
0
 def custom_init(self):
     """
     自定义初始化方法
     :return:
     """
     super().custom_init()
     engine = CFactory().give_me_db(self.db_server_id)
     if self.my_id is None:
         self._ds_file_or_path = engine.one_row('''
         select dsfid, dsfstorageid, dsfdirectoryid, dsffilerelationname, dsffilename, dsffilemainname, dsfext, 
             dsffilecreatetime, dsffilemodifytime, dsffilevalid, 
             dsf_object_type, dsf_object_confirm, dsf_object_id, dsffilesize, dsfparentobjid, dsf_ib_id
         from dm2_storage_file
         where dsfstorageid = :dsfStorageID and dsfdirectoryid = :dsfDirectoryId and dsffilerelationname = :dsfFileRelationName
         ''', {'dsfStorageID': self.storage_id, 'dsfDirectoryId': self.parent_id,
               'dsfFileRelationName': CFile.unify(self.file_name_with_rel_path)})
         if not self.ds_file_or_path.is_empty():
             self.my_id = self.ds_file_or_path.value_by_name(0, 'dsfid', None)
         if self.my_id is None:
             self.my_id = CUtils.one_id()
     else:
         self._ds_file_or_path = engine.one_row('''
             select dsfid, dsfstorageid, dsfdirectoryid, dsffilerelationname, dsffilename, dsffilemainname, dsfext, 
                 dsffilecreatetime, dsffilemodifytime, dsffilevalid, 
                 dsf_object_type, dsf_object_confirm, dsf_object_id, dsffilesize, dsfparentobjid, dsf_ib_id
             from dm2_storage_file
             where dsfid = :dsfID
             ''', {'dsfid': self.my_id})
Пример #4
0
 def search_type(self):
     """
     检索插件类型
     :return:
     """
     listplugin = []
     plugins_root_dir = CSys.get_plugins_root_dir()
     plugins_type_list = CFile.file_or_subpath_of_path(plugins_root_dir)
     for plugins_type in plugins_type_list:
         if CFile.is_dir(CFile.join_file(
                 plugins_root_dir, plugins_type)) and (
                     not (str(plugins_type)).startswith('_')):
             plugins_root_package_name = '{0}.{1}'.format(
                 CSys.get_plugins_package_root_name(), plugins_type)
             path = CFile.join_file(CSys.get_plugins_root_dir(),
                                    plugins_type)
             plugins_file_list = CFile.file_or_subpath_of_path(
                 path, '{0}_*.{1}'.format(self.Name_Plugins,
                                          self.FileExt_Py))
             for file_name_without_path in plugins_file_list:
                 file_main_name = CFile.file_main_name(
                     file_name_without_path)
                 class_classified_obj = CObject.create_plugins_instance(
                     plugins_root_package_name, file_main_name, None)
                 plugins_info = class_classified_obj.get_information()
                 # 获取插件的类型和名字
                 plugins_info["dsodid"] = '{0}'.format(
                     plugins_type) + CFile.unify_seperator + '{0}'.format(
                         file_main_name)
                 listplugin.append(plugins_info)
     plugin_path = []
     # 遍历listplugin
     for i in listplugin:
         file_dict = {}
         # 获取当前文件工作目录
         work_path = CFile.file_abs_path('.')
         # 拼接通用路径
         main_path = work_path + "/imetadata/business/metadata/inbound/plugins/"
         # 分割插件类型和名字
         list = CUtils.dict_value_by_name(i, "dsodid",
                                          '').split(CFile.unify_seperator)
         # 拼接插件所在路径
         file_path = main_path + CUtils.dict_value_by_name(
             i, "dsodid", '') + "." + self.FileExt_Py
         # 格式化文件路径
         sorted_file_path = CFile.unify(file_path)
         """
         type: dir/file/layer
         source: 待压缩的文件路径
         target: 压缩后路径和名字(根据用户输入的压缩地址,然后拼接出完整的压缩文件)
         """
         file_dict[CResource.Name_Type] = list[0]
         file_dict.setdefault(CResource.Name_Source,
                              []).append(sorted_file_path)
         file_dict[CResource.Name_Target] = str(
             CUtils.dict_value_by_name(i, "dsodtype", '')) + ".zip"
         print(file_dict)
         plugin_path.append(file_dict)
     return plugin_path
Пример #5
0
def package(output_relation_dir):
    """
    编译根目录下的包括子目录里的所有py文件成pyc文件到新的文件夹下
    :param output_relation_dir: 需编译的目录
    :return:
    """
    output_relation_dir = CFile.unify(CUtils.any_2_str(output_relation_dir))

    application_dir = CFile.file_path(CFile.file_abs_path(__file__))
    output_dir = CFile.file_abs_path(CFile.join_file(application_dir, output_relation_dir))

    for each_directory, dir_name_list, file_name_without_path_list in os.walk(application_dir):
        directory_source = each_directory
        directory_name = CFile.file_name(directory_source)
        directory_relation = CFile.file_relation_path(each_directory, application_dir)
        directory_target = CFile.join_file(output_dir, directory_relation)

        path_deploy_enable = deploy_match_pattern_list(directory_relation, 'path.white_list', True, True)
        path_deploy_enable = path_deploy_enable and deploy_match_pattern_list(directory_relation, 'path.black_list',
                                                                              False, True)
        if path_deploy_enable:
            directory_deploy_enable = deploy_match_pattern_list(directory_name, 'directory.white_list', True, True)
            directory_deploy_enable = directory_deploy_enable and deploy_match_pattern_list(
                directory_name, 'directory.black_list',
                False, True
            )

            if directory_deploy_enable:
                for file_name_without_path in file_name_without_path_list:
                    file_deploy_enable = deploy_match_pattern_list(
                        file_name_without_path, 'file.white_list', True,
                        True
                    )
                    file_deploy_enable = file_deploy_enable and deploy_match_pattern_list(
                        file_name_without_path, 'file.black_list',
                        False, True
                    )

                    file_name_with_path_source = CFile.join_file(directory_source, file_name_without_path)
                    if file_deploy_enable:
                        file_compile_enable = deploy_match_pattern_list(
                            file_name_without_path, 'compile.file.white_list',
                            True, False
                        )
                        if file_compile_enable:
                            file_compile_enable = deploy_match_pattern_list(
                                file_name_without_path, 'compile.file.black_list',
                                False, False
                            )

                        file_name_without_path_target = CFile.change_file_ext(file_name_without_path, 'pyc')
                        file_name_with_path_target = CFile.join_file(directory_target, file_name_without_path_target)
                        CFile.check_and_create_directory_itself(directory_target)
                        if file_compile_enable:
                            py_compile.compile(file_name_with_path_source, cfile=file_name_with_path_target)
                            print('{0}-compile-success'.format(file_name_with_path_source))
                        else:
                            CFile.copy_file_to(file_name_with_path_source, directory_target)
                            print('{0}-no_compile'.format(file_name_with_path_source))
Пример #6
0
    def __inbound_object_detail_of_schema(self, list_file_fullname):
        sql_detail_insert = '''
        INSERT INTO dm2_storage_obj_detail(
            dodid, dodobjectid, dodfilename, dodfileext, dodfilesize, 
            dodfilecreatetime, dodfilemodifytime, 
            dodlastmodifytime, dodfiletype)
        VALUES (
            :dodid, :dodobjectid, :dodfilename, :dodfileext, :dodfilesize, 
            :dodfilecreatetime, :dodfilemodifytime, now(), 
            :dodfiletype)
        '''

        sql_detail_insert_params_list = []

        # query_storage_id = self.file_info.storage_id
        query_file_relation_name = self.file_info.file_name_with_rel_path
        for item_file_name_with_path in list_file_fullname:
            CLogger().debug(item_file_name_with_path)
            if not CFile.file_or_path_exist(item_file_name_with_path):
                continue

            params = dict()
            file_relation_name = CFile.file_relation_path(
                item_file_name_with_path, self.file_info.root_path)
            if CUtils.equal_ignore_case(query_file_relation_name,
                                        file_relation_name):
                params['dodid'] = self.object_id
            else:
                params['dodid'] = CUtils.one_id()
            # 文件类型
            params['dodfiletype'] = self.FileType_File
            if CFile.is_dir(item_file_name_with_path):
                params['dodfiletype'] = self.FileType_Dir
            params['dodobjectid'] = self.object_id
            params['dodfilename'] = CFile.unify(file_relation_name)
            params['dodfileext'] = CFile.file_ext(item_file_name_with_path)
            params['dodfilesize'] = CFile.file_size(item_file_name_with_path)
            params['dodfilecreatetime'] = CFile.file_create_time(
                item_file_name_with_path)
            params['dodfilemodifytime'] = CFile.file_modify_time(
                item_file_name_with_path)
            # params['dodstorageid'] = query_storage_id
            # params['dodfilerelationname'] = CFile.file_relation_path(
            #     item_file_name_with_path,
            #     self.file_info.root_path)
            sql_params_tuple = (sql_detail_insert, params)
            sql_detail_insert_params_list.append(sql_params_tuple)

        if len(sql_detail_insert_params_list) > 0:
            try:
                CFactory().give_me_db(
                    self.file_info.db_server_id).execute_batch(
                        sql_detail_insert_params_list)
            except Exception as error:
                CLogger().warning('数据库处理出现异常, 错误信息为: {0}'.format(
                    error.__str__()))
                return CResult.merge_result(self.Failure, '处理失败!')
        return CResult.merge_result(self.Success, '处理完毕!')
Пример #7
0
    def __stat_object_detail_of_schema(self) -> str:
        """
        将数据附属文件的统计信息入库
        . 仅适用于Directory_Itself模式
        :return:
        """
        result_sub_dir_count, result_file_count, result_file_size_sum = CFile.stat_of_path(
            self.__detail_file_path__, self.__detail_file_recurse__,
            self.__detail_file_match_text__, self.__detail_file_match_type__)

        query_file_relation_name = self.file_info.file_name_with_rel_path
        params = dict()
        file_relation_name = CFile.file_relation_path(
            self.__detail_file_path__, self.file_info.root_path)
        if CUtils.equal_ignore_case(query_file_relation_name,
                                    file_relation_name):
            params['dodid'] = self.object_id
        else:
            params['dodid'] = CUtils.one_id()

        params['dodfiletype'] = self.FileType_Dir
        params['dodfileext'] = None

        if CFile.is_file(self.__detail_file_path__):
            params['dodfiletype'] = self.FileType_File
            params['dodfileext'] = CFile.file_ext(self.__detail_file_path__)

        params['dodobjectid'] = self.object_id
        params['dodfilename'] = CFile.unify(file_relation_name)

        params['doddircount'] = result_sub_dir_count
        params['dodfilecount'] = result_file_count
        params['dodfilesize'] = result_file_size_sum
        params['dodfilecreatetime'] = CFile.file_create_time(
            self.__detail_file_path__)
        params['dodfilemodifytime'] = CFile.file_modify_time(
            self.__detail_file_path__)

        try:
            CFactory().give_me_db(self.file_info.db_server_id).execute(
                '''
                INSERT INTO dm2_storage_obj_detail(
                    dodid, dodobjectid, dodfilename, dodfileext, dodfilesize, doddircount, dodfilecount,
                    dodfilecreatetime, dodfilemodifytime, dodlastmodifytime, dodfiletype)
                VALUES (
                    :dodid, :dodobjectid, :dodfilename, :dodfileext, :dodfilesize, :doddircount, :dodfilecount,
                    :dodfilecreatetime, :dodfilemodifytime, now(), :dodfiletype)
                ''', params)
            return CResult.merge_result(self.Success, '处理完毕!')
        except Exception as error:
            CLogger().warning('数据库处理出现异常, 错误信息为: {0}'.format(error.__str__()))
            return CResult.merge_result(
                self.Failure, '数据库处理出现异常, 错误信息为: {0}'.format(error.__str__()))
Пример #8
0
    def process_mission(self, dataset) -> str:
        """
        :param dataset:
        :return:
        """
        ds_ib_id = dataset.value_by_name(0, 'query_ib_id', '')
        ds_storage_id = dataset.value_by_name(0, 'query_storage_id', '')
        ds_storage_title = dataset.value_by_name(0, 'query_storage_title', '')
        ds_storage_root_dir = dataset.value_by_name(0, 'query_rootpath', '')
        ds_ib_directory_name = dataset.value_by_name(0,
                                                     'query_ib_relation_dir',
                                                     '')
        ds_ib_directory_id = dataset.value_by_name(0,
                                                   'query_ib_relation_dir_id',
                                                   '')
        ds_ib_batch_no = dataset.value_by_name(0, 'query_ib_batchno', '')
        # 按需要再开启
        # ds_ib_option = CUtils.any_2_str(dataset.value_by_name(0, 'query_ib_option', ''))

        if not CUtils.equal_ignore_case(ds_ib_directory_name, ''):
            CLogger().debug('正在入库的是存储[{0}]下的目录[{1}]'.format(
                ds_storage_title,
                CFile.join_file(ds_storage_root_dir, ds_ib_directory_name)))
        else:
            CLogger().debug('正在入库的是存储[{0}]下的目录[{1}]'.format(
                ds_storage_title, ds_storage_root_dir))

        try:
            ds_ib_information_updated = False
            # 检查目录名格式并自动修正
            if not CUtils.equal_ignore_case(ds_ib_directory_name, ''):
                ds_ib_directory = CFile.unify(
                    CFile.add_prefix(ds_ib_directory_name))
                if not CUtils.equal_ignore_case(ds_ib_directory,
                                                ds_ib_directory_name):
                    ds_ib_directory_name = ds_ib_directory
                    ds_ib_information_updated = True

            if CUtils.equal_ignore_case(ds_ib_batch_no, ''):
                ds_ib_batch_no = CFactory().give_me_db(
                    self.get_mission_db_id()).seq_next_value(
                        self.Seq_Type_Date_AutoInc)
                ds_ib_information_updated = True

            if CUtils.equal_ignore_case(ds_ib_directory_id, ''):
                ds_ib_directory_id = CUtils.one_id()
                ds_ib_information_updated = True

            if ds_ib_information_updated:
                self.correct_ib_information(ds_ib_id, ds_ib_directory_name,
                                            ds_ib_batch_no, ds_ib_directory_id)

            if not CUtils.equal_ignore_case(ds_ib_directory_name, ''):
                ib_full_directory = CFile.join_file(ds_storage_root_dir,
                                                    ds_ib_directory_name)
            else:
                ib_full_directory = ds_storage_root_dir

            self.clear_anything_in_directory(ds_ib_id)
            metadata_rule_file_name = CFile.join_file(
                ib_full_directory, self.FileName_MetaData_Rule)
            metadata_rule_content = ''
            if CFile.file_or_path_exist(metadata_rule_file_name):
                try:
                    metadata_rule_content = CXml.file_2_str(
                        metadata_rule_file_name)
                    CLogger().debug('在目录[{0}]下发现元数据规则文件, 它的内容为[{1}]'.format(
                        ib_full_directory, metadata_rule_content))
                except Exception as error:
                    result = CResult.merge_result(
                        self.Failure,
                        '在目录[{0}]下发现元数据规则文件, 但它的格式不合法, 详细错误为: [{1}]'.format(
                            ib_full_directory, error.__str__()))
                    self.update_inbound_qi_result(ds_ib_id, result)
                    return result

            path_obj = CDMPathInfo(self.FileType_Dir, ib_full_directory,
                                   ds_storage_id,
                                   ds_ib_directory_id, ds_storage_id, None,
                                   self.get_mission_db_id(),
                                   metadata_rule_content)

            if path_obj.white_black_valid():
                path_obj.db_check_and_update(ds_ib_id)

                result = CResult.merge_result(
                    self.Success, '目录[{0}]的入库质检任务创建成功, 系统正在质检, 请稍后...'.format(
                        ib_full_directory))
            else:
                result = CResult.merge_result(
                    self.Failure,
                    '目录[{0}]未通过黑白名单检验, 不允许入库! '.format(ib_full_directory))

            self.update_inbound_qi_result(ds_ib_id, result)
            return result
        except Exception as error:
            result = CResult.merge_result(
                self.Failure, '目录[{0}]的入库质检任务创建过程出现错误, 详细错误为: [{1}]'.format(
                    CFile.join_file(ds_storage_root_dir, ds_ib_directory_name),
                    error.__str__()))
            self.update_inbound_qi_result(ds_ib_id, result)
            return result
Пример #9
0
    def process_mission(self, dataset) -> str:
        """
        详细算法复杂, 参见readme.md中[### 数据入库调度]章节
        :param dataset:
        :return:
        """
        ds_src_storage_id = dataset.value_by_name(0, 'query_storage_id', '')
        ds_src_storage_type = dataset.value_by_name(0, 'query_storage_type',
                                                    self.Storage_Type_Mix)
        ds_src_root_path = dataset.value_by_name(0, 'query_rootpath', '')
        ds_src_dir_id = dataset.value_by_name(0, 'query_ib_dir_id', '')

        ds_ib_id = dataset.value_by_name(0, 'query_ib_id', '')
        ds_ib_directory_name = dataset.value_by_name(0,
                                                     'query_ib_relation_dir',
                                                     '')
        ds_ib_batch_no = dataset.value_by_name(0, 'query_ib_batchno', '')
        ds_ib_option = dataset.value_by_name(0, 'query_ib_option', '')

        src_need_storage_size = self.get_storage_size(ds_ib_id,
                                                      ds_src_storage_id,
                                                      ds_ib_directory_name,
                                                      ds_ib_option)
        src_path = ds_src_root_path
        if not CUtils.equal_ignore_case(ds_ib_directory_name, ''):
            src_path = CFile.join_file(src_path, ds_ib_directory_name)
        src_dataset_metadata_filename = CFile.join_file(
            src_path, self.FileName_MetaData_Bus_21AT)

        CLogger().debug('入库的目录为: {0}.{1}'.format(ds_ib_id,
                                                 ds_ib_directory_name))
        try:
            # 检查所有文件与元数据是否相符
            all_ib_file_or_path_existed = self.check_all_ib_file_or_path_existed(
                ds_ib_id)
            if not CResult.result_success(all_ib_file_or_path_existed):
                self.update_ib_result(ds_ib_id, all_ib_file_or_path_existed)
                return all_ib_file_or_path_existed

            # 将数据入库的记录保存到日志中
            result = self.ib_log(ds_ib_id, ds_src_storage_id,
                                 ds_ib_directory_name)
            if not CResult.result_success(result):
                self.update_ib_result(ds_ib_id, result)
                return result

            # 如果是在核心存储或混合存储中直接入库, 则仅仅改变元数据状态即可
            if CUtils.equal_ignore_case(ds_src_storage_type, self.Storage_Type_Mix) \
                    or CUtils.equal_ignore_case(ds_src_storage_type, self.Storage_Type_Core):
                result_ib_in_core_or_mix_storage = self.update_ib_data_status_in_core_or_mix_storage(
                    ds_ib_id, ds_src_storage_id, ds_ib_directory_name,
                    ds_src_dir_id)
                self.update_ib_result(ds_ib_id,
                                      result_ib_in_core_or_mix_storage)
                return result_ib_in_core_or_mix_storage

            # 加载目录下的待入库数据集的元数据文件
            src_dataset_xml = CXml()
            src_dataset_type = self.Name_Default
            if CFile.file_or_path_exist(src_dataset_metadata_filename):
                src_dataset_xml.load_file(src_dataset_metadata_filename)
                src_dataset_type = CXml.get_element_text(
                    src_dataset_xml.xpath_one(self.Path_MD_Bus_ProductType))
            if CUtils.equal_ignore_case(src_dataset_type, ''):
                src_dataset_type = self.Name_Default

            # 获取匹配的入库模式
            src_ib_schema = self.get_ib_schema(src_dataset_type, ds_ib_option)
            if src_ib_schema is None:
                result = CResult.merge_result(
                    self.Failure,
                    '目录为[{0}.{1}]的数据集类型为[{2}], 未找到匹配的入库模式, 请检查修正后重试!'.format(
                        ds_ib_id, ds_ib_directory_name, src_dataset_type))
                self.update_ib_result(ds_ib_id, result)
                return result

            # 计算入库的目标存储\存储根目录\目标子目录在目标存储中的副目录的标识\目标子目录\反馈消息
            dest_ib_storage_id, dest_ib_root_path, desc_ib_dir_id, dest_ib_subpath, message = self.get_dest_storage(
                ds_ib_batch_no, src_need_storage_size, ds_ib_option,
                src_ib_schema, src_dataset_xml)
            if dest_ib_storage_id is None or dest_ib_subpath is None:
                result = CResult.merge_result(self.Failure, message)
                self.update_ib_result(ds_ib_id, result)
                return result

            dest_ib_subpath = CFile.unify(dest_ib_subpath)
            if CJson.json_attr_value(ds_ib_option,
                                     self.Path_IB_Switch_CheckFileLocked,
                                     self.DB_False) == self.DB_True:
                src_ib_files_not_locked, message = self.check_src_ib_files_not_locked(
                    ds_src_root_path, src_path)
                if not src_ib_files_not_locked:
                    result = CResult.merge_result(self.Failure, message)
                    self.update_ib_result(ds_ib_id, result)
                    return result

            proc_ib_src_path = ds_src_root_path
            proc_ib_dest_path = dest_ib_root_path
            if not CUtils.equal_ignore_case(dest_ib_subpath, ''):
                proc_ib_dest_path = CFile.join_file(dest_ib_root_path,
                                                    dest_ib_subpath)

            if not CUtils.equal_ignore_case(ds_ib_directory_name, ''):
                proc_ib_src_path = CFile.join_file(proc_ib_src_path,
                                                   ds_ib_directory_name)
                proc_ib_dest_path = CFile.join_file(proc_ib_dest_path,
                                                    ds_ib_directory_name)

            # --------------------------------------------------------------至此, 数据入库前的检查处理完毕
            # 移动源目录至目标目录, 如果是根目录, 则仅仅移动文件
            result = self.ib_files_move(
                proc_ib_src_path, proc_ib_dest_path,
                CUtils.equal_ignore_case(ds_ib_directory_name, ''))
            if not CResult.result_success(result):
                # 利用相同的方法, 把移动的数据, 重新移动回原目录, 这里理论上应该100%成功
                sub_result = self.ib_files_move(
                    proc_ib_dest_path, proc_ib_src_path,
                    CUtils.equal_ignore_case(ds_ib_directory_name, ''))
                if not CResult.result_success(sub_result):
                    sub_result_message = CResult.result_message(sub_result)
                    result_message = CResult.result_message(result)
                    result = CResult.merge_result(
                        self.Failure,
                        '{0}\n{1}'.format(result_message, sub_result_message))

                self.update_ib_result(ds_ib_id, result)
                return result

            # 将源文件的元数据, 移动至目标存储下, 如果出现异常, 则在方法内部rollback
            result = self.src_ib_metadata_move_to_storage(
                ds_ib_id, ds_src_storage_id, ds_src_dir_id,
                ds_ib_directory_name, dest_ib_storage_id, desc_ib_dir_id,
                dest_ib_subpath)
            if not CResult.result_success(result):
                # 利用相同的方法, 把移动的数据, 重新移动回原目录, 这里理论上应该100%成功
                sub_result = self.ib_files_move(
                    proc_ib_dest_path, proc_ib_src_path,
                    CUtils.equal_ignore_case(ds_ib_directory_name, ''))
                if not CResult.result_success(sub_result):
                    sub_result_message = CResult.result_message(sub_result)
                    result_message = CResult.result_message(result)
                    result = CResult.merge_result(
                        self.Failure,
                        '{0}/n{1}'.format(result_message, sub_result_message))

                self.update_ib_result(ds_ib_id, result)
                return result

            result = CResult.merge_result(
                self.Success,
                '目录为[{0}.{1}]入库成功!'.format(ds_ib_id, ds_ib_directory_name))
            self.update_ib_result(ds_ib_id, result)
            return result
        except Exception as error:
            result = CResult.merge_result(
                self.Failure, '目录为[{0}.{1}]入库出现异常! 错误原因为: {2}'.format(
                    ds_ib_id, ds_ib_directory_name, error.__str__()))
            self.update_ib_result(ds_ib_id, result)
            return result