def update_dir_status(self, dir_id, result, status=None): if status is not None: sql_update_directory_status = ''' update dm2_storage_directory set dsdscanstatus = :status, dsdscanmemo = :memo, dsdlastmodifytime = now() where dsdid = :dsdid ''' elif CResult.result_success(result): sql_update_directory_status = ''' update dm2_storage_directory set dsdscanstatus = {0}, dsdscanmemo = :memo, dsdlastmodifytime = now() where dsdid = :dsdid '''.format(self.ProcStatus_Finished) else: sql_update_directory_status = ''' update dm2_storage_directory set dsdscanstatus = (dsdscanstatus / 10 + 1) * 10 + {0}, dsdscanmemo = :memo, dsdlastmodifytime = now() where dsdid = :dsdid '''.format(self.ProcStatus_InQueue) params = dict() params['dsdid'] = dir_id params['memo'] = CResult.result_message(result) params['status'] = status CFactory().give_me_db(self.get_mission_db_id()).execute(sql_update_directory_status, params)
def update_notify_result(self, notify_id, result): if CResult.result_success(result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsi_na_status = {0} , dsi_na_proc_id = null , dsi_na_proc_memo = :notify_message , dsiproctime = now() where dsiid = :notify_id '''.format(self.ProcStatus_WaitConfirm), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsi_na_status = {0} , dsi_na_proc_id = null , dsi_na_proc_memo = :notify_message , dsiproctime = now() where dsiid = :notify_id '''.format(self.ProcStatus_Error), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) })
def db_update_object_exception(self, dso_id, process_result, process_status=None): if process_status is not None: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = :status , dsometadataparsememo = :dsometadataparsememo , dsolastmodifytime = now() where dsoid = :dsoid '''.format(self.ProcStatus_InQueue), { 'dsoid': dso_id, 'status': process_status, 'dsometadataparsememo': CResult.result_message(process_result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = (dsometadataparsestatus / 10 + 1) * 10 + {0} , dsometadataparsememo = :dsometadataparsememo , dsolastmodifytime = now() where dsoid = :dsoid '''.format(self.ProcStatus_InQueue), { 'dsoid': dso_id, 'dsometadataparsememo': CResult.result_message(process_result) })
def db_update_object_status(self, dso_id, process_result): CLogger().debug(CResult.result_message(process_result)) if CResult.result_success(process_result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsotagsparsestatus = {0} , dsolastmodifytime = now() , dsotagsparsememo = :dsotagsparsememo where dsoid = :dsoid '''.format(self.ProcStatus_Finished), { 'dsoid': dso_id, 'dsotagsparsememo': CResult.result_message(process_result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsotagsparsestatus = {0} , dsolastmodifytime = now() , dsotagsparsememo = :dsotagsparsememo where dsoid = :dsoid '''.format(self.ProcStatus_Error), { 'dsoid': dso_id, 'dsotagsparsememo': CResult.result_message(process_result) })
def process(self) -> str: """ 在这里处理将__file_info__中记录的对象所对应的文件或目录信息, 根据__detail_*变量的定义, 进行目录扫描, 记录到dm2_storage_object_detail中 :return: """ self._before_process() if self._only_stat_file: result = self.__stat_object_detail_of_schema() if not CResult.result_success(result): return result else: if not CUtils.equal_ignore_case(self.__detail_file_path__, ''): list_file_fullname = CFile.file_or_dir_fullname_of_path( self.__detail_file_path__, self.__detail_file_recurse__, self.__detail_file_match_text__, self.__detail_file_match_type__) result = self.__inbound_object_detail_of_schema( list_file_fullname) if not CResult.result_success(result): return result if len(self._file_custom_list) > 0: return self.inbound_object_detail_of_custom(self._file_custom_list) else: return CResult.merge_result(self.Success, '对象附属文件处理成功结束!')
def update_ib_data_status_in_core_or_mix_storage(self, ib_id, storage_id, ib_directory_name, ib_dir_id): """ 如果是在线存储或混合存储, 直接将业务状态修改即可 :param ib_id: :param ib_dir_id: :param storage_id: :param ib_directory_name: :return: """ sql_update_file = ''' update dm2_storage_file set dsf_bus_status = '{0}' where dsf_ib_id = :ib_id '''.format(self.IB_Bus_Status_Online) params_update_file = {'ib_id': ib_id} # 更新子目录状态 sql_update_directory = ''' update dm2_storage_directory set dsd_bus_status = '{0}' where dsd_ib_id = :ib_id '''.format(self.IB_Bus_Status_Online) params_update_directory = {'ib_id': ib_id} # 更新对象状态 sql_update_object = ''' update dm2_storage_object set dso_bus_status = '{0}' where dso_ib_id = :ib_id '''.format(self.IB_Bus_Status_Online) params_update_object = {'ib_id': ib_id} # 将入库记录中的目标存储标识进行更新 sql_update_ib_target_storage = ''' update dm2_storage_inbound set dsitargetstorageid = :target_storage_id where dsiid = :ib_id ''' params_update_ib_target_storage = { 'target_storage_id': storage_id, 'ib_id': ib_id } commands = [(sql_update_file, params_update_file), (sql_update_directory, params_update_directory), (sql_update_object, params_update_object), (sql_update_ib_target_storage, params_update_ib_target_storage)] try: CFactory().give_me_db( self.get_mission_db_id()).execute_batch(commands) return CResult.merge_result( self.Success, '存储[{0}]下的数据[{1}]入库成功!'.format(storage_id, ib_directory_name)) except Exception as error: return CResult.merge_result( self.Failure, '存储[{0}]下的数据[{1}]入库成功失败, 错误原因为: [{2}]!'.format( storage_id, ib_directory_name, error.__str__()))
def init_metadata_bus(self, parser: CMetaDataParser) -> str: """ 提取xml格式的业务元数据, 加载到parser的metadata对象中 :param parser: :return: """ if not CFile.file_or_path_exist(self.__bus_metadata_xml_file_name__): return CResult.merge_result( self.Failure, '元数据文件[{0}]不存在, 无法解析! '.format( self.__bus_metadata_xml_file_name__)) try: parser.metadata.set_metadata_bus_file( self.Success, '元数据文件[{0}]成功加载! '.format(self.__bus_metadata_xml_file_name__), self.MetaDataFormat_XML, self.__bus_metadata_xml_file_name__) return CResult.merge_result( self.Success, '元数据文件[{0}]成功加载! '.format(self.__bus_metadata_xml_file_name__)) except: parser.metadata.set_metadata_bus( self.Failure, '元数据文件[{0}]格式不合法, 无法处理! '.format( self.__bus_metadata_xml_file_name__), self.MetaDataFormat_Text, '') return CResult.merge_result( self.Exception, '元数据文件[{0}]格式不合法, 无法处理! '.format( self.__bus_metadata_xml_file_name__))
def import_data(self, data_source: CDataSetSeqReader, data_target: CTable) -> str: success_record_count = 0 if not data_source.first(): return CResult.merge_result( self.Success, '数据源无有效导入数据, 系统自动设定导入成功! ' ) while True: try: result = self.__import_each_record(data_source, data_target) if not CResult.result_success(result): return result except Exception as error: return CResult.merge_result( self.Failure, '第{0}条数据入库失败, 详细错误原因为: {1}!'.format(success_record_count, error.__str__()) ) success_record_count = success_record_count + 1 if not data_source.next(): break return CResult.merge_result( self.Success, '数据源的全部数据导入成功, 共导入记录数[{0}]! '.format(success_record_count) )
def process_mission(self, dataset) -> str: ds_id = dataset.value_by_name(0, 'query_dir_id', '') ds_storage_id = dataset.value_by_name(0, 'query_storage_id', '') inbound_id = dataset.value_by_name(0, 'query_dir_ib_id', None) ds_subpath = dataset.value_by_name(0, 'query_subpath', '') ds_root_path = dataset.value_by_name(0, 'query_rootpath', '') ds_retry_times = dataset.value_by_name(0, 'retry_times', 0) if ds_retry_times >= self.abnormal_job_retry_times(): ds_last_process_memo = CUtils.any_2_str( dataset.value_by_name(0, 'last_process_memo', None)) process_result = CResult.merge_result( self.Failure, '{0}, \n系统已经重试{1}次, 仍然未能解决, 请人工检查修正后重试!'.format( ds_last_process_memo, ds_retry_times)) self.update_dir_status(ds_id, process_result, self.ProcStatus_Error) return process_result if ds_subpath == '': ds_subpath = ds_root_path else: ds_subpath = CFile.join_file(ds_root_path, ds_subpath) # 将所有子目录, 文件的可用性, 都改为未知!!!! self.init_file_or_subpath_valid_unknown(ds_id) try: sql_get_rule = ''' select dsdScanRule from dm2_storage_directory where dsdStorageid = :dsdStorageID and position((dsddirectory || '{0}') in :dsdDirectory) = 1 and dsdScanRule is not null order by dsddirectory desc limit 1 '''.format(CFile.sep()) rule_ds = CFactory().give_me_db(self.get_mission_db_id()).one_row( sql_get_rule, { 'dsdStorageID': ds_storage_id, 'dsdDirectory': ds_subpath }) ds_rule_content = rule_ds.value_by_name(0, 'dsScanRule', '') CLogger().debug('处理的目录为: {0}'.format(ds_subpath)) self.parser_file_or_subpath_of_path(dataset, ds_id, ds_subpath, ds_rule_content, inbound_id) result = CResult.merge_result( self.Success, '目录为[{0}]下的文件和子目录扫描处理成功!'.format(ds_subpath)) self.update_dir_status(ds_id, result) return result except Exception as err: result = CResult.merge_result( self.Failure, '目录为[{0}]下的文件和子目录扫描处理出现错误!错误原因为: {1}'.format( ds_subpath, err.__str__())) self.update_dir_status(ds_id, result) return result finally: self.exchange_file_or_subpath_valid_unknown2invalid(ds_id)
def __inbound_object_detail_of_schema(self, list_file_fullname): sql_detail_insert = ''' INSERT INTO dm2_storage_obj_detail( dodid, dodobjectid, dodfilename, dodfileext, dodfilesize, dodfilecreatetime, dodfilemodifytime, dodlastmodifytime, dodfiletype) VALUES ( :dodid, :dodobjectid, :dodfilename, :dodfileext, :dodfilesize, :dodfilecreatetime, :dodfilemodifytime, now(), :dodfiletype) ''' sql_detail_insert_params_list = [] # query_storage_id = self.file_info.storage_id query_file_relation_name = self.file_info.file_name_with_rel_path for item_file_name_with_path in list_file_fullname: CLogger().debug(item_file_name_with_path) if not CFile.file_or_path_exist(item_file_name_with_path): continue params = dict() file_relation_name = CFile.file_relation_path( item_file_name_with_path, self.file_info.root_path) if CUtils.equal_ignore_case(query_file_relation_name, file_relation_name): params['dodid'] = self.object_id else: params['dodid'] = CUtils.one_id() # 文件类型 params['dodfiletype'] = self.FileType_File if CFile.is_dir(item_file_name_with_path): params['dodfiletype'] = self.FileType_Dir params['dodobjectid'] = self.object_id params['dodfilename'] = CFile.unify(file_relation_name) params['dodfileext'] = CFile.file_ext(item_file_name_with_path) params['dodfilesize'] = CFile.file_size(item_file_name_with_path) params['dodfilecreatetime'] = CFile.file_create_time( item_file_name_with_path) params['dodfilemodifytime'] = CFile.file_modify_time( item_file_name_with_path) # params['dodstorageid'] = query_storage_id # params['dodfilerelationname'] = CFile.file_relation_path( # item_file_name_with_path, # self.file_info.root_path) sql_params_tuple = (sql_detail_insert, params) sql_detail_insert_params_list.append(sql_params_tuple) if len(sql_detail_insert_params_list) > 0: try: CFactory().give_me_db( self.file_info.db_server_id).execute_batch( sql_detail_insert_params_list) except Exception as error: CLogger().warning('数据库处理出现异常, 错误信息为: {0}'.format( error.__str__())) return CResult.merge_result(self.Failure, '处理失败!') return CResult.merge_result(self.Success, '处理完毕!')
def execute(self) -> str: mission_data = self.get_mission_info() if mission_data is None: return CResult.merge_result(CResult.Failure, '任务配置异常, 系统无法处理该任务!') if not mission_data.is_empty(): return self.process_mission(mission_data) else: return CResult.merge_result(CResult.Failure, '没有可执行的任务!')
def check_all_ib_file_or_path_existed(self, ib_id): """ 判断待入库数据的元数据, 与实体数据是否相符 . 返回CResult . 如果全部相符, 则返回True . 如果有任何一个不符, 则返回False, 且把不符的文件名通过信息返回 :param ib_id: :return: """ invalid_file_list = [] more_failure_file = False sql_all_ib_file = ''' select coalesce(dm2_storage.dstownerpath, dm2_storage.dstunipath) || dm2_storage_file.dsffilerelationname as file_name , dm2_storage_file.dsffilesize as file_size , dm2_storage_file.dsffilemodifytime as file_modify_time from dm2_storage_file left join dm2_storage on dm2_storage.dstid = dm2_storage_file.dsfstorageid where dsf_ib_id = :ib_id ''' params_all_ib_file = {'ib_id': ib_id} ds_ib_file = CFactory().give_me_db(self.get_mission_db_id()).all_row( sql_all_ib_file, params_all_ib_file) for ds_ib_file_index in range(ds_ib_file.size()): file_valid = True file_name = ds_ib_file.value_by_name(ds_ib_file_index, 'file_name', '') if not CUtils.equal_ignore_case(file_name, ''): if not CFile.file_or_path_exist(file_name): file_valid = False elif not CUtils.equal_ignore_case( CFile.file_modify_time(file_name), ds_ib_file.value_by_name(ds_ib_file_index, 'file_modify_time', '')): file_valid = False elif CFile.file_size(file_name) != ds_ib_file.value_by_name( ds_ib_file_index, 'file_size', 0): file_valid = False if not file_valid: if len(invalid_file_list) <= 3: invalid_file_list.append(file_name) else: more_failure_file = True break if len(invalid_file_list) > 0: message = '' for invalid_file in invalid_file_list: message = CUtils.str_append(message, invalid_file) if more_failure_file: message = CUtils.str_append(message, '...') message = CUtils.str_append(message, '上述数据与库中记录不统一, 请重新扫描入库! ') return CResult.merge_result(self.Failure, message) else: return CResult.merge_result(self.Success, '所有文件均存在, 且与库中记录统一! ')
def process(self) -> str: """ todo 负责人 赵宇飞 在这里提取矢量数据的快视图, 将元数据文件存储在self.file_content.view_root_dir下 注意返回的串中有快视图和拇指图的文件名 注意: 如果出现内存泄漏现象, 则使用新建进程提取元数据, 放置到文件中, 在本进程中解析元数据!!! :return: """ result = CResult.merge_result(self.Success, '处理完毕!') result = CResult.merge_result_info(result, self.Name_Browse, '/aa/bb_browse.png') result = CResult.merge_result_info(result, self.Name_Thumb, '/aa/bb_thumb.png') return result
def process(self) -> str: """ 完成 负责人 张源博、赵宇飞 在这里提取影像数据的快视图, 将元数据文件存储在self.file_content.view_root_dir下 注意返回的串中有快视图和拇指图的文件名 注意: 如果出现内存泄漏现象, 则使用新建进程提取元数据, 放置到文件中, 在本进程中解析元数据!!! :return: """ # 获取对象类型 type = 'default' group = 'default' catalog = 'default' # 构建数据对象object对应的识别插件,获取get_information里面的信息 class_classified_obj = CObject.get_plugins_instance_by_object_id(self.file_info.db_server_id, self.object_id) if class_classified_obj is not None: plugins_info = class_classified_obj.get_information() type = CUtils.dict_value_by_name(plugins_info, class_classified_obj.Plugins_Info_Type, 'default') group = CUtils.dict_value_by_name(plugins_info, class_classified_obj.Plugins_Info_Group, 'default') catalog = CUtils.dict_value_by_name(plugins_info, class_classified_obj.Plugins_Info_Catalog, 'default') create_time = CTime.today() create_format_time = CTime.format_str(create_time, '%Y%m%d') year = CTime.format_str(create_time, '%Y') month = CTime.format_str(create_time, '%m') day = CTime.format_str(create_time, '%d') sep = CFile.sep() # 操作系统的不同处理分隔符不同 sep_list = [catalog, group, type, year, month, day] relative_path_part = sep.join(sep_list) # 相对路径格式 view_relative_path_browse = r'{2}{0}{2}{1}_browse.png'.format(relative_path_part, self.object_id, sep) view_relative_path_thumb = r'{2}{0}{2}{1}_thumb.jpg'.format(relative_path_part, self.object_id, sep) view_relative_path_geotiff = r'{2}{0}{2}{1}_browse.tiff'.format(relative_path_part, self.object_id, sep) browse_full_path = CFile.join_file(self.file_content.view_root_dir, view_relative_path_browse) thumb_full_path = CFile.join_file(self.file_content.view_root_dir, view_relative_path_thumb) geotiff_full_path = CFile.join_file(self.file_content.view_root_dir, view_relative_path_geotiff) # 进程调用模式 json_out_view = CJson() json_out_view.set_value_of_name('image_path', self.file_info.file_name_with_full_path) json_out_view.set_value_of_name('browse_full_path', browse_full_path) json_out_view.set_value_of_name('thumb_full_path', thumb_full_path) json_out_view.set_value_of_name('geotiff_full_path', geotiff_full_path) result_view = CProcessUtils.processing_method(self.create_view_json, json_out_view) # result_view = self.create_view(self.file_info.file_name_with_full_path, browse_full_path, thumb_full_path, # geotiff_full_path) # result_view = self.create_view_json(json_out_view) if CResult.result_success(result_view): result = CResult.merge_result(self.Success, '处理完毕!') result = CResult.merge_result_info(result, self.Name_Browse, view_relative_path_browse) result = CResult.merge_result_info(result, self.Name_Thumb, view_relative_path_thumb) result = CResult.merge_result_info(result, self.Name_Browse_GeoTiff, view_relative_path_geotiff) else: result = result_view return result
def delete_data(self, session: Session = None) -> str: try: sql_text, sql_params = self.__prepare_delete() if session is None: self.__database.execute(sql_text, sql_params) else: self.__database.session_execute(session, sql_text, sql_params) return CResult.merge_result(CResult.Success) except Exception as error: return CResult.merge_result(CResult.Failure, error.__str__())
def __stat_object_detail_of_schema(self) -> str: """ 将数据附属文件的统计信息入库 . 仅适用于Directory_Itself模式 :return: """ result_sub_dir_count, result_file_count, result_file_size_sum = CFile.stat_of_path( self.__detail_file_path__, self.__detail_file_recurse__, self.__detail_file_match_text__, self.__detail_file_match_type__) query_file_relation_name = self.file_info.file_name_with_rel_path params = dict() file_relation_name = CFile.file_relation_path( self.__detail_file_path__, self.file_info.root_path) if CUtils.equal_ignore_case(query_file_relation_name, file_relation_name): params['dodid'] = self.object_id else: params['dodid'] = CUtils.one_id() params['dodfiletype'] = self.FileType_Dir params['dodfileext'] = None if CFile.is_file(self.__detail_file_path__): params['dodfiletype'] = self.FileType_File params['dodfileext'] = CFile.file_ext(self.__detail_file_path__) params['dodobjectid'] = self.object_id params['dodfilename'] = CFile.unify(file_relation_name) params['doddircount'] = result_sub_dir_count params['dodfilecount'] = result_file_count params['dodfilesize'] = result_file_size_sum params['dodfilecreatetime'] = CFile.file_create_time( self.__detail_file_path__) params['dodfilemodifytime'] = CFile.file_modify_time( self.__detail_file_path__) try: CFactory().give_me_db(self.file_info.db_server_id).execute( ''' INSERT INTO dm2_storage_obj_detail( dodid, dodobjectid, dodfilename, dodfileext, dodfilesize, doddircount, dodfilecount, dodfilecreatetime, dodfilemodifytime, dodlastmodifytime, dodfiletype) VALUES ( :dodid, :dodobjectid, :dodfilename, :dodfileext, :dodfilesize, :doddircount, :dodfilecount, :dodfilecreatetime, :dodfilemodifytime, now(), :dodfiletype) ''', params) return CResult.merge_result(self.Success, '处理完毕!') except Exception as error: CLogger().warning('数据库处理出现异常, 错误信息为: {0}'.format(error.__str__())) return CResult.merge_result( self.Failure, '数据库处理出现异常, 错误信息为: {0}'.format(error.__str__()))
def update_data(self, session: Session = None) -> str: try: sql_list = self.__prepare_update_data() if session is None: self.__database.execute_batch(sql_list) else: self.__database.session_execute_batch(session, sql_list) return CResult.merge_result(CResult.Success) except Exception as error: return CResult.merge_result(CResult.Failure, error.__str__())
def _do_sync(self) -> str: try: table_name = CUtils.dict_value_by_name(self.information(), 'table_name', '') # 因此类插件的表格情况特殊,为双主键,且要先确定插入还是更新,所以不用table.if_exists()方法 sql_check = ''' select aprid from {0} where aprid='{1}' '''.format(table_name, self._obj_id) record_cheak = CFactory().give_me_db(self._db_id).one_row(sql_check).size() # 查找记录数 if record_cheak == 0: # 记录数为0则拼接插入语句 insert_or_updata = self.DB_True else: # 记录数不为0则拼接更新语句 insert_or_updata = self.DB_False table = CTable() table.load_info(self._db_id, table_name) # insert_or_updatad的意义是要先确定是更新还是插入,不能把不该更新的,在插入时是默认值的参数更新 for field_dict in self.get_sync_dict_list(insert_or_updata): field_name = CUtils.dict_value_by_name(field_dict, 'field_name', '') # 获取字段名 field_value = CUtils.dict_value_by_name(field_dict, 'field_value', '') # 获取字段值 field_value_type = CUtils.dict_value_by_name(field_dict, 'field_value_type', '') # 获取值类型 if CUtils.equal_ignore_case(field_value, ''): table.column_list.column_by_name(field_name).set_null() elif CUtils.equal_ignore_case(field_value_type, self.DataValueType_Value): table.column_list.column_by_name(field_name).set_value(field_value) elif CUtils.equal_ignore_case(field_value_type, self.DataValueType_SQL): table.column_list.column_by_name(field_name).set_sql(field_value) elif CUtils.equal_ignore_case(field_value_type, self.DataValueType_Array): table.column_list.column_by_name(field_name).set_array(field_value) else: pass # 不多执行table.if_exists()多查一次哭,所以不用savedata()方法 if insert_or_updata: result = table.insert_data() else: result = table.update_data() if CResult.result_success(result): return CResult.merge_result( self.Success, '对象[{0}]的同步成功! '.format(self._obj_name) ) else: return result except Exception as error: return CResult.merge_result( self.Failure, '数据检索分发模块在进行数据同步时出现错误:同步的对象[{0}]在处理时出现异常, 详细情况: [{1}]!'.format( self._obj_name, error.__str__() ) )
def access(self, obj_id, obj_name, obj_type, quality) -> str: """ 解析数管中识别出的对象, 与第三方模块的访问能力, 在本方法中进行处理 返回的json格式字符串中, 是默认的CResult格式, 但是在其中还增加了Access属性, 通过它反馈当前对象是否满足第三方模块的应用要求 注意: 一定要反馈Access属性 :return: """ result = self.__test_module_obj(obj_id, obj_name) if not CResult.result_success(result): return CResult.merge_result_info(result, self.Name_Access, self.DataAccess_Forbid) else: return CResult.merge_result_info(result, self.Name_Access, self.DataAccess_Pass)
def sync(self) -> str: try: result = self.process_metadata_bus_dict() if not CResult.result_success(result): return CResult.merge_result( self.Failure, '卫星数据的业务元数据的详细内容解析出错!原因为{0}'.format( CResult.result_message(result))) main_result = self.process_main_table() metadata_result = self.process_metadata_table() ndi_result = self.process_ndi_table() if not CResult.result_success(main_result): return main_result elif not CResult.result_success(metadata_result): return metadata_result elif not CResult.result_success(ndi_result): return ndi_result else: return CResult.merge_result( self.Success, '对象[{0}]的同步成功! '.format(self._obj_name)) except Exception as error: return CResult.merge_result( self.Failure, '数据检索分发模块在进行数据同步时出现错误:同步的对象[{0}]在处理时出现异常, 详细情况: [{1}]!'.format( self._obj_name, error.__str__()))
def access(self) -> str: """ 解析数管中识别出的对象, 与第三方模块的访问能力, 在本方法中进行处理 返回的json格式字符串中, 是默认的CResult格式, 但是在其中还增加了Access属性, 通过它反馈当前对象是否满足第三方模块的应用要求 注意: 一定要反馈Access属性 :return: """ result = CResult.merge_result( self.Success, '模块[{0}.{1}]对对象[{2}]的访问能力已经分析完毕!'.format( CUtils.dict_value_by_name(self.information(), self.Name_ID, ''), CUtils.dict_value_by_name(self.information(), self.Name_Title, ''), self._obj_name)) return CResult.merge_result_info(result, self.Name_Access, self.DataAccess_Forbid)
def process_mission(self, dataset) -> str: """ :param dataset: :return: """ ds_na_id = dataset.value_by_name(0, 'na_id', '') ds_app_id = dataset.value_by_name(0, 'app_id', '') ds_object_id = dataset.value_by_name(0, 'object_id', '') ds_object_type = dataset.value_by_name(0, 'object_type', '') ds_object_name = dataset.value_by_name(0, 'object_name', '') ds_object_access = dataset.value_by_name(0, 'object_access', self.DataAccess_Forbid) CLogger().debug('与第三方模块[{0}]同步的对象为: [{1}]'.format( ds_app_id, ds_object_name)) try: module_file_name = CFile.join_file( CSys.get_metadata_data_access_modules_root_dir(), '{0}.{1}'.format(ds_app_id, self.FileExt_Py)) if not CFile.file_or_path_exist(module_file_name): message = '第三方模块[{0}]没有设置对应的算法, 直接通过!'.format(ds_app_id) result = CResult.merge_result(self.Success, message) self.update_sync_result(ds_na_id, result) return result module_obj = CObject.create_module_instance( CSys.get_metadata_data_access_modules_root_name(), ds_app_id, self.get_mission_db_id()) if module_obj is None: message = '第三方模块[{0}]没有设置对应的算法, 直接通过!'.format(ds_app_id) result = CResult.merge_result(self.Success, message) self.update_sync_result(ds_na_id, result) return result module_title = CUtils.dict_value_by_name(module_obj.information(), self.Name_Title, '') result = module_obj.sync(ds_object_access, ds_object_id, ds_object_name, ds_object_type, None) self.update_sync_result(ds_na_id, result) return result except Exception as error: result = CResult.merge_result( self.Failure, '与第三方模块[{0}]同步的对象: [{1}]的同步过程出现异常, 详细情况: [{2}]!'.format( ds_app_id, ds_object_name, error.__str__())) self.update_sync_result(ds_na_id, result) return result
def process(self) -> str: """ """ browse_full_path = CFile.join_file(self.view_path, '{0}_browse.png'.format(self.object_id)) thumb_full_path = CFile.join_file(self.view_path, '{0}_thumb.jpg'.format(self.object_id)) geotiff_full_path = CFile.join_file(self.view_path, '{0}_browse.tiff'.format(self.object_id)) # 进程调用模式 json_out_view = CJson() json_out_view.set_value_of_name('image_path', self.transform_file) json_out_view.set_value_of_name('browse_full_path', browse_full_path) json_out_view.set_value_of_name('thumb_full_path', thumb_full_path) json_out_view.set_value_of_name('geotiff_full_path', geotiff_full_path) result_view = CProcessUtils.processing_method(self.create_view_json, json_out_view) # result_view = self.create_view(self.file_info.file_name_with_full_path, browse_full_path, thumb_full_path, # geotiff_full_path) # result_view = self.create_view_json(json_out_view) if CResult.result_success(result_view): # 清理不必要的文件 delect_file_list = list() delect_file_list.append('{0}.aux.xml'.format(browse_full_path)) delect_file_list.append('{0}.aux.xml'.format(thumb_full_path)) delect_file_list.append(geotiff_full_path) for delect_file in delect_file_list: if CFile.file_or_path_exist(delect_file): CFile.remove_file(delect_file) result = CResult.merge_result(self.Success, '处理完毕!') result = CResult.merge_result_info(result, self.Name_Browse, CFile.file_name(browse_full_path)) result = CResult.merge_result_info(result, self.Name_Thumb, CFile.file_name(thumb_full_path)) result = CResult.merge_result_info(result, self.Name_Browse_GeoTiff, CFile.file_name(geotiff_full_path)) else: # 清理不必要的文件 delect_file_list = list() delect_file_list.append(browse_full_path) delect_file_list.append(thumb_full_path) delect_file_list.append('{0}.aux.xml'.format(browse_full_path)) delect_file_list.append('{0}.aux.xml'.format(thumb_full_path)) delect_file_list.append(geotiff_full_path) for delect_file in delect_file_list: if CFile.file_or_path_exist(delect_file): CFile.remove_file(delect_file) result = result_view return result
def update_inbound_qi_result(self, notify_id, result, storage_type='mix', storage_option=None, ib_option=None): CLogger().debug(CResult.result_message(result)) if CResult.result_success(result): if CUtils.equal_ignore_case(storage_type, self.Storage_Type_InBound): switch_inbound_after_qi_immediately_status = CUtils.equal_ignore_case( settings.application.xpath_one( self.path_switch( self.Path_Setting_MetaData_QI_Switch, self. Switch_Inbound_After_QI_Immediately_Of_IB_Storage), self.Name_ON), self.Name_ON) else: switch_inbound_after_qi_immediately_status = CUtils.equal_ignore_case( settings.application.xpath_one( self.path_switch( self.Path_Setting_MetaData_QI_Switch, self. Switch_Inbound_After_QI_Immediately_Of_MIX_Storage ), self.Name_OFF), self.Name_ON) if switch_inbound_after_qi_immediately_status: next_status = self.IB_Status_IB_InQueue else: next_status = self.IB_Status_QI_Finished CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsiStatus = {0}, dsiprocmemo = :notify_message, dsiproctime = now() where dsiid = :notify_id '''.format(next_status), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsiprocmemo = :notify_message, dsiproctime = now() where dsiid = :notify_id ''', { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) })
def parser_metadata_spatial_after_qa(self, parser: CMetaDataParser): """ 在这里直接指定坐标系 """ result = super().parser_metadata_spatial_after_qa(parser) try: Prj_Project = CUtils.dict_value_by_name(self.get_information(), self.Plugins_Info_Coordinate_System, '') if not CUtils.equal_ignore_case(Prj_Project, ''): parser.metadata.set_metadata_spatial( self.DB_True, '元数据文件[{0}]成功加载! '.format(self.file_info.file_name_with_full_path), self.Spatial_MetaData_Type_Prj_Project, Prj_Project ) parser.metadata.set_metadata_spatial( self.DB_True, '元数据文件[{0}]成功加载! '.format(self.file_info.file_name_with_full_path), self.Spatial_MetaData_Type_Prj_Source, self.Prj_Source_Custom ) except Exception as error: parser.metadata.set_metadata_spatial( self.DB_False, '元数据文件[{0}]格式不合法, 无法处理! 详细错误为: {1}'.format(self.file_info.file_name_with_full_path, error.__str__()), self.MetaDataFormat_Text, '') return CResult.merge_result(self.Exception, '元数据文件[{0}]格式不合法, 无法处理! '.format( self.file_info.file_name_with_full_path)) return result
def db_update_object_status(self, dso_id, process_result, process_status=None): if process_status is not None: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = {0} , dsolastmodifytime = now() where dsoid = :dsoid '''.format(process_status), {'dsoid': dso_id}) elif CResult.result_success(process_result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = {0} , dsolastmodifytime = now() where dsoid = :dsoid '''.format(self.ProcStatus_Finished), {'dsoid': dso_id}) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = (dsometadataparsestatus / 10 + 1) * 10 + {0} , dsolastmodifytime = now() where dsoid = :dsoid '''.format(self.ProcStatus_InQueue), {'dsoid': dso_id})
def process(self) -> str: """ :return: """ return CResult.merge_result( self.Success, '文件[{0}]成功加载! '.format(self.transformer_src_filename))
def process_metadata_bus_dict(self): dataset = self._dataset class_plugins = self.get_class_plugins() try: dsometadataxml_xml = CXml() dsometadataxml = dataset.value_by_name(0, 'dsometadataxml_bus', '') dsometadataxml_xml.load_xml(dsometadataxml) view_path = settings.application.xpath_one( self.Path_Setting_MetaData_Dir_View, None) browser_path = CFile.file_path( dataset.value_by_name(0, 'dso_browser', None)) multiple_metadata_bus_filename_dict = \ class_plugins.get_multiple_metadata_bus_filename_with_path( CFile.join_file(view_path, browser_path) ) result, metadata_bus_dict = class_plugins.metadata_bus_xml_to_dict( dsometadataxml_xml, multiple_metadata_bus_filename_dict) self.set_metadata_bus_dict(metadata_bus_dict) return result except Exception as error: return CResult.merge_result( self.Failure, '卫星数据的业务元数据的详细内容解析出错!原因为{0}'.format(error.__str__()))
def save_metadata_time(self) -> str: """ 完成时间元数据的入库更新操作 :return: """ mdt_ext_result, mdt_ext_memo, mdt_ext_content = self.metadata.metadata_time() if mdt_ext_result == self.DB_False: mdt_ext_content = None if CUtils.equal_ignore_case(mdt_ext_result, ''): mdt_ext_content = None # None相当于sql中的null,可以插入数据库中,而''不能插入jsonb字段中 # 所有元数据入库 CFactory().give_me_db(self.file_info.db_server_id).execute( ''' update dm2_storage_object set dso_time_result = :dso_time_result , dso_time_parsermemo = :dso_time_parsermemo , dso_time = :dso_time where dsoid = :dsoid ''', { 'dsoid': self.object_id, 'dso_time_result': mdt_ext_result, 'dso_time_parsermemo': mdt_ext_memo, 'dso_time': mdt_ext_content } ) return CResult.merge_result(self.Success, '时间元数据处理完毕!')
def save_metadata_view(self) -> str: """ 完成可视元数据的入库更新操作 :return: """ mdt_view_result, mdt_view_memo, mdt_view_thumb_file, mdt_view_browse_file = self.metadata.metadata_view() if mdt_view_result == self.DB_False: mdt_view_thumb_file = None mdt_view_browse_file = None # 所有元数据入库 CFactory().give_me_db(self.file_info.db_server_id).execute( ''' update dm2_storage_object set dso_view_result = :dso_view_result , dso_view_parsermemo = :dso_view_parsermemo , dso_browser = :dso_browser , dso_thumb = :dso_thumb where dsoid = :dsoid ''', { 'dsoid': self.object_id, 'dso_view_result': mdt_view_result, 'dso_view_parsermemo': mdt_view_memo, 'dso_browser': mdt_view_browse_file, 'dso_thumb': mdt_view_thumb_file } ) return CResult.merge_result(self.Success, '可视化元数据处理完毕!')