def __init__(self, file_type, file_name_with_full_path): self.__file_name_with_full_path = file_name_with_full_path self.__file_name_without_path = CFile.file_name( self.file_name_with_full_path) self.__file_main_name = CFile.file_main_name( self.file_name_with_full_path) self.__file_ext = CFile.file_ext(self.file_name_with_full_path) self.__file_path = CFile.file_path(self.file_name_with_full_path) self.__file_main_name_with_full_path = CFile.join_file( self.file_path, self.file_main_name) self.__file_type = file_type self.__file_existed = CFile.file_or_path_exist( self.file_name_with_full_path) if self.__file_existed: if CFile.is_file(self.file_name_with_full_path): self.__file_size = CFile.file_size( self.file_name_with_full_path) self.__file_create_time = CFile.file_create_time( self.file_name_with_full_path) self.__file_access_time = CFile.file_access_time( self.file_name_with_full_path) self.__file_modify_time = CFile.file_modify_time( self.file_name_with_full_path)
def __inbound_object_detail_of_schema(self, list_file_fullname): sql_detail_insert = ''' INSERT INTO dm2_storage_obj_detail( dodid, dodobjectid, dodfilename, dodfileext, dodfilesize, dodfilecreatetime, dodfilemodifytime, dodlastmodifytime, dodfiletype) VALUES ( :dodid, :dodobjectid, :dodfilename, :dodfileext, :dodfilesize, :dodfilecreatetime, :dodfilemodifytime, now(), :dodfiletype) ''' sql_detail_insert_params_list = [] # query_storage_id = self.file_info.storage_id query_file_relation_name = self.file_info.file_name_with_rel_path for item_file_name_with_path in list_file_fullname: CLogger().debug(item_file_name_with_path) if not CFile.file_or_path_exist(item_file_name_with_path): continue params = dict() file_relation_name = CFile.file_relation_path( item_file_name_with_path, self.file_info.root_path) if CUtils.equal_ignore_case(query_file_relation_name, file_relation_name): params['dodid'] = self.object_id else: params['dodid'] = CUtils.one_id() # 文件类型 params['dodfiletype'] = self.FileType_File if CFile.is_dir(item_file_name_with_path): params['dodfiletype'] = self.FileType_Dir params['dodobjectid'] = self.object_id params['dodfilename'] = CFile.unify(file_relation_name) params['dodfileext'] = CFile.file_ext(item_file_name_with_path) params['dodfilesize'] = CFile.file_size(item_file_name_with_path) params['dodfilecreatetime'] = CFile.file_create_time( item_file_name_with_path) params['dodfilemodifytime'] = CFile.file_modify_time( item_file_name_with_path) # params['dodstorageid'] = query_storage_id # params['dodfilerelationname'] = CFile.file_relation_path( # item_file_name_with_path, # self.file_info.root_path) sql_params_tuple = (sql_detail_insert, params) sql_detail_insert_params_list.append(sql_params_tuple) if len(sql_detail_insert_params_list) > 0: try: CFactory().give_me_db( self.file_info.db_server_id).execute_batch( sql_detail_insert_params_list) except Exception as error: CLogger().warning('数据库处理出现异常, 错误信息为: {0}'.format( error.__str__())) return CResult.merge_result(self.Failure, '处理失败!') return CResult.merge_result(self.Success, '处理完毕!')
def check_all_ib_file_or_path_existed(self, ib_id): """ 判断待入库数据的元数据, 与实体数据是否相符 . 返回CResult . 如果全部相符, 则返回True . 如果有任何一个不符, 则返回False, 且把不符的文件名通过信息返回 :param ib_id: :return: """ invalid_file_list = [] more_failure_file = False sql_all_ib_file = ''' select coalesce(dm2_storage.dstownerpath, dm2_storage.dstunipath) || dm2_storage_file.dsffilerelationname as file_name , dm2_storage_file.dsffilesize as file_size , dm2_storage_file.dsffilemodifytime as file_modify_time from dm2_storage_file left join dm2_storage on dm2_storage.dstid = dm2_storage_file.dsfstorageid where dsf_ib_id = :ib_id ''' params_all_ib_file = {'ib_id': ib_id} ds_ib_file = CFactory().give_me_db(self.get_mission_db_id()).all_row( sql_all_ib_file, params_all_ib_file) for ds_ib_file_index in range(ds_ib_file.size()): file_valid = True file_name = ds_ib_file.value_by_name(ds_ib_file_index, 'file_name', '') if not CUtils.equal_ignore_case(file_name, ''): if not CFile.file_or_path_exist(file_name): file_valid = False elif not CUtils.equal_ignore_case( CFile.file_modify_time(file_name), ds_ib_file.value_by_name(ds_ib_file_index, 'file_modify_time', '')): file_valid = False elif CFile.file_size(file_name) != ds_ib_file.value_by_name( ds_ib_file_index, 'file_size', 0): file_valid = False if not file_valid: if len(invalid_file_list) <= 3: invalid_file_list.append(file_name) else: more_failure_file = True break if len(invalid_file_list) > 0: message = '' for invalid_file in invalid_file_list: message = CUtils.str_append(message, invalid_file) if more_failure_file: message = CUtils.str_append(message, '...') message = CUtils.str_append(message, '上述数据与库中记录不统一, 请重新扫描入库! ') return CResult.merge_result(self.Failure, message) else: return CResult.merge_result(self.Success, '所有文件均存在, 且与库中记录统一! ')
def __stat_object_detail_of_schema(self) -> str: """ 将数据附属文件的统计信息入库 . 仅适用于Directory_Itself模式 :return: """ result_sub_dir_count, result_file_count, result_file_size_sum = CFile.stat_of_path( self.__detail_file_path__, self.__detail_file_recurse__, self.__detail_file_match_text__, self.__detail_file_match_type__) query_file_relation_name = self.file_info.file_name_with_rel_path params = dict() file_relation_name = CFile.file_relation_path( self.__detail_file_path__, self.file_info.root_path) if CUtils.equal_ignore_case(query_file_relation_name, file_relation_name): params['dodid'] = self.object_id else: params['dodid'] = CUtils.one_id() params['dodfiletype'] = self.FileType_Dir params['dodfileext'] = None if CFile.is_file(self.__detail_file_path__): params['dodfiletype'] = self.FileType_File params['dodfileext'] = CFile.file_ext(self.__detail_file_path__) params['dodobjectid'] = self.object_id params['dodfilename'] = CFile.unify(file_relation_name) params['doddircount'] = result_sub_dir_count params['dodfilecount'] = result_file_count params['dodfilesize'] = result_file_size_sum params['dodfilecreatetime'] = CFile.file_create_time( self.__detail_file_path__) params['dodfilemodifytime'] = CFile.file_modify_time( self.__detail_file_path__) try: CFactory().give_me_db(self.file_info.db_server_id).execute( ''' INSERT INTO dm2_storage_obj_detail( dodid, dodobjectid, dodfilename, dodfileext, dodfilesize, doddircount, dodfilecount, dodfilecreatetime, dodfilemodifytime, dodlastmodifytime, dodfiletype) VALUES ( :dodid, :dodobjectid, :dodfilename, :dodfileext, :dodfilesize, :doddircount, :dodfilecount, :dodfilecreatetime, :dodfilemodifytime, now(), :dodfiletype) ''', params) return CResult.merge_result(self.Success, '处理完毕!') except Exception as error: CLogger().warning('数据库处理出现异常, 错误信息为: {0}'.format(error.__str__())) return CResult.merge_result( self.Failure, '数据库处理出现异常, 错误信息为: {0}'.format(error.__str__()))
def process_mission(self, dataset): storage_id = dataset.value_by_name(0, 'root_directory_id', '') storage_root_path = dataset.value_by_name(0, 'root_directory', '') CLogger().debug('storage_id: {0}'.format(storage_id)) ds_retry_times = dataset.value_by_name(0, 'retry_times', 0) if ds_retry_times >= self.abnormal_job_retry_times(): ds_last_process_memo = CUtils.any_2_str( dataset.value_by_name(0, 'last_process_memo', None)) process_result = CResult.merge_result( self.Failure, '{0}, \n系统已经重试{1}次, 仍然未能解决, 请人工检查修正后重试!'.format( ds_last_process_memo, ds_retry_times)) self.update_status(storage_id, process_result, self.ProcStatus_Error) return process_result sql_check_root_storage_dir_exist = ''' select dsdid from dm2_storage_directory where dsdid = :dsdid ''' sql_update_root_storage_dir = ''' update dm2_storage_directory set dsdParentID = '-1', dsdDirectory = '', dsdDirtype = {1} , dsdDirectoryName = '', dsdPath = '' , dsdDirCreateTime = :dsddircreatetime, dsdDirLastModifyTime = :dsddirlastmodifytime , dsdLastModifyTime = Now(), dsd_directory_valid = {0} where dsdid = :dsdid '''.format(self.File_Status_Unknown, self.Dir_Type_Root) sql_insert_root_storage_dir = ''' insert into dm2_storage_directory( dsdid, dsdparentid, dsdstorageid, dsddirectory, dsddirtype, dsdlastmodifytime , dsddirectoryname, dsd_directory_valid, dsdpath, dsddircreatetime, dsddirlastmodifytime) values(:dsdid, '-1', :dsdStorageID, '', {1}, Now() , '', {0}, '', :dsddircreatetime, :dsddirlastmodifytime ) '''.format(self.File_Status_Unknown, self.Dir_Type_Root) try: db = CFactory().give_me_db(self.get_mission_db_id()) params = dict() params['dsdid'] = storage_id params['dsdStorageID'] = storage_id if CFile.file_or_path_exist(storage_root_path): params['dsdDirCreateTime'] = CFile.file_modify_time( storage_root_path) params['dsddirlastmodifytime'] = CFile.file_modify_time( storage_root_path) if db.if_exists(sql_check_root_storage_dir_exist, params): db.execute(sql_update_root_storage_dir, params) else: db.execute(sql_insert_root_storage_dir, params) process_result = CResult.merge_result(CResult.Success, '存储扫描处理成功') self.update_status(storage_id, process_result) return process_result except DBException as err: process_result = CResult.merge_result( CResult.Exception, '存储扫描失败, 原因为{0}'.format(err.__str__)) self.update_status(storage_id, process_result) return process_result