def process(self) -> str: """ 在这里处理将__file_info__中记录的对象所对应的文件或目录信息, 根据__detail_*变量的定义, 进行目录扫描, 记录到dm2_storage_object_detail中 :return: """ self._before_process() if self._only_stat_file: result = self.__stat_object_detail_of_schema() if not CResult.result_success(result): return result else: if not CUtils.equal_ignore_case(self.__detail_file_path__, ''): list_file_fullname = CFile.file_or_dir_fullname_of_path( self.__detail_file_path__, self.__detail_file_recurse__, self.__detail_file_match_text__, self.__detail_file_match_type__) result = self.__inbound_object_detail_of_schema( list_file_fullname) if not CResult.result_success(result): return result if len(self._file_custom_list) > 0: return self.inbound_object_detail_of_custom(self._file_custom_list) else: return CResult.merge_result(self.Success, '对象附属文件处理成功结束!')
def sync(self) -> str: try: result = self.process_metadata_bus_dict() if not CResult.result_success(result): return CResult.merge_result( self.Failure, '卫星数据的业务元数据的详细内容解析出错!原因为{0}'.format( CResult.result_message(result))) main_result = self.process_main_table() metadata_result = self.process_metadata_table() ndi_result = self.process_ndi_table() if not CResult.result_success(main_result): return main_result elif not CResult.result_success(metadata_result): return metadata_result elif not CResult.result_success(ndi_result): return ndi_result else: return CResult.merge_result( self.Success, '对象[{0}]的同步成功! '.format(self._obj_name)) except Exception as error: return CResult.merge_result( self.Failure, '数据检索分发模块在进行数据同步时出现错误:同步的对象[{0}]在处理时出现异常, 详细情况: [{1}]!'.format( self._obj_name, error.__str__()))
def db_update_object_status(self, dso_id, process_result): CLogger().debug(CResult.result_message(process_result)) if CResult.result_success(process_result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsotagsparsestatus = {0} , dsolastmodifytime = now() , dsotagsparsememo = :dsotagsparsememo where dsoid = :dsoid '''.format(self.ProcStatus_Finished), { 'dsoid': dso_id, 'dsotagsparsememo': CResult.result_message(process_result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsotagsparsestatus = {0} , dsolastmodifytime = now() , dsotagsparsememo = :dsotagsparsememo where dsoid = :dsoid '''.format(self.ProcStatus_Error), { 'dsoid': dso_id, 'dsotagsparsememo': CResult.result_message(process_result) })
def update_dir_status(self, dir_id, result, status=None): if status is not None: sql_update_directory_status = ''' update dm2_storage_directory set dsdscanstatus = :status, dsdscanmemo = :memo, dsdlastmodifytime = now() where dsdid = :dsdid ''' elif CResult.result_success(result): sql_update_directory_status = ''' update dm2_storage_directory set dsdscanstatus = {0}, dsdscanmemo = :memo, dsdlastmodifytime = now() where dsdid = :dsdid '''.format(self.ProcStatus_Finished) else: sql_update_directory_status = ''' update dm2_storage_directory set dsdscanstatus = (dsdscanstatus / 10 + 1) * 10 + {0}, dsdscanmemo = :memo, dsdlastmodifytime = now() where dsdid = :dsdid '''.format(self.ProcStatus_InQueue) params = dict() params['dsdid'] = dir_id params['memo'] = CResult.result_message(result) params['status'] = status CFactory().give_me_db(self.get_mission_db_id()).execute(sql_update_directory_status, params)
def process(self) -> str: """ todo 负责人 赵宇飞 在这里提取影像数据的空间信息, 以文件形式存储在self.file_content.work_root_dir下 注意返回的串中有空间信息的文件名 注意: 如果出现内存泄漏现象, 则使用新建进程提取元数据, 放置到文件中, 在本进程中解析元数据!!! :return: """ result_process = self.process_raster() if CResult.result_success(result_process): file_path = self.file_content.work_root_dir dict_temp_file_name = { self.Name_Native_Center: '{0}_native_center.wkt'.format(self.object_name), self.Name_Native_BBox: '{0}_native_bbox.wkt'.format(self.object_name), self.Name_Native_Geom: '{0}_native_geom.wkt'.format(self.object_name), self.Name_Wgs84_Center: '{0}_wgs84_center.wkt'.format(self.object_name), self.Name_Wgs84_BBox: '{0}_wgs84_bbox.wkt'.format(self.object_name), self.Name_Wgs84_Geom: '{0}_wgs84_geom.wkt'.format(self.object_name) } dict_temp_prj_name = { self.Name_Prj_Wkt: CResult.result_info(result_process, self.Name_Prj_Wkt, None), self.Name_Prj_Proj4: CResult.result_info(result_process, self.Name_Prj_Proj4, None), self.Name_Prj_Project: CResult.result_info(result_process, self.Name_Prj_Project, None), self.Name_Prj_Coordinate: CResult.result_info(result_process, self.Name_Prj_Coordinate, None), self.Name_Prj_Source: CResult.result_info(result_process, self.Name_Prj_Source, None), self.Name_Prj_Zone: CResult.result_info(result_process, self.Name_Prj_Zone, None), self.Name_Prj_Degree: CResult.result_info(result_process, self.Name_Prj_Degree, None) } result = CResult.merge_result(self.Success, '处理完毕!') for file_type, file_name in dict_temp_file_name.items(): result = CResult.merge_result_info(result, file_type, CFile.join_file(file_path, file_name)) for prj_type, prj_name in dict_temp_prj_name.items(): result = CResult.merge_result_info(result, prj_type, prj_name) else: result = CResult.merge_result(self.Failure, CResult.result_message(result_process)) return result
def db_update_object_status(self, dso_id, process_result, process_status=None): if process_status is not None: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = {0} , dsolastmodifytime = now() where dsoid = :dsoid '''.format(process_status), {'dsoid': dso_id}) elif CResult.result_success(process_result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = {0} , dsolastmodifytime = now() where dsoid = :dsoid '''.format(self.ProcStatus_Finished), {'dsoid': dso_id}) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = (dsometadataparsestatus / 10 + 1) * 10 + {0} , dsolastmodifytime = now() where dsoid = :dsoid '''.format(self.ProcStatus_InQueue), {'dsoid': dso_id})
def import_data(self, data_source: CDataSetSeqReader, data_target: CTable) -> str: success_record_count = 0 if not data_source.first(): return CResult.merge_result( self.Success, '数据源无有效导入数据, 系统自动设定导入成功! ' ) while True: try: result = self.__import_each_record(data_source, data_target) if not CResult.result_success(result): return result except Exception as error: return CResult.merge_result( self.Failure, '第{0}条数据入库失败, 详细错误原因为: {1}!'.format(success_record_count, error.__str__()) ) success_record_count = success_record_count + 1 if not data_source.next(): break return CResult.merge_result( self.Success, '数据源的全部数据导入成功, 共导入记录数[{0}]! '.format(success_record_count) )
def update_notify_result(self, notify_id, result): if CResult.result_success(result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsi_na_status = {0} , dsi_na_proc_id = null , dsi_na_proc_memo = :notify_message , dsiproctime = now() where dsiid = :notify_id '''.format(self.ProcStatus_WaitConfirm), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsi_na_status = {0} , dsi_na_proc_id = null , dsi_na_proc_memo = :notify_message , dsiproctime = now() where dsiid = :notify_id '''.format(self.ProcStatus_Error), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) })
def process(self) -> str: """ 完成 负责人 张源博、赵宇飞 在这里提取影像数据的快视图, 将元数据文件存储在self.file_content.view_root_dir下 注意返回的串中有快视图和拇指图的文件名 注意: 如果出现内存泄漏现象, 则使用新建进程提取元数据, 放置到文件中, 在本进程中解析元数据!!! :return: """ # 获取对象类型 type = 'default' group = 'default' catalog = 'default' # 构建数据对象object对应的识别插件,获取get_information里面的信息 class_classified_obj = CObject.get_plugins_instance_by_object_id(self.file_info.db_server_id, self.object_id) if class_classified_obj is not None: plugins_info = class_classified_obj.get_information() type = CUtils.dict_value_by_name(plugins_info, class_classified_obj.Plugins_Info_Type, 'default') group = CUtils.dict_value_by_name(plugins_info, class_classified_obj.Plugins_Info_Group, 'default') catalog = CUtils.dict_value_by_name(plugins_info, class_classified_obj.Plugins_Info_Catalog, 'default') create_time = CTime.today() create_format_time = CTime.format_str(create_time, '%Y%m%d') year = CTime.format_str(create_time, '%Y') month = CTime.format_str(create_time, '%m') day = CTime.format_str(create_time, '%d') sep = CFile.sep() # 操作系统的不同处理分隔符不同 sep_list = [catalog, group, type, year, month, day] relative_path_part = sep.join(sep_list) # 相对路径格式 view_relative_path_browse = r'{2}{0}{2}{1}_browse.png'.format(relative_path_part, self.object_id, sep) view_relative_path_thumb = r'{2}{0}{2}{1}_thumb.jpg'.format(relative_path_part, self.object_id, sep) view_relative_path_geotiff = r'{2}{0}{2}{1}_browse.tiff'.format(relative_path_part, self.object_id, sep) browse_full_path = CFile.join_file(self.file_content.view_root_dir, view_relative_path_browse) thumb_full_path = CFile.join_file(self.file_content.view_root_dir, view_relative_path_thumb) geotiff_full_path = CFile.join_file(self.file_content.view_root_dir, view_relative_path_geotiff) # 进程调用模式 json_out_view = CJson() json_out_view.set_value_of_name('image_path', self.file_info.file_name_with_full_path) json_out_view.set_value_of_name('browse_full_path', browse_full_path) json_out_view.set_value_of_name('thumb_full_path', thumb_full_path) json_out_view.set_value_of_name('geotiff_full_path', geotiff_full_path) result_view = CProcessUtils.processing_method(self.create_view_json, json_out_view) # result_view = self.create_view(self.file_info.file_name_with_full_path, browse_full_path, thumb_full_path, # geotiff_full_path) # result_view = self.create_view_json(json_out_view) if CResult.result_success(result_view): result = CResult.merge_result(self.Success, '处理完毕!') result = CResult.merge_result_info(result, self.Name_Browse, view_relative_path_browse) result = CResult.merge_result_info(result, self.Name_Thumb, view_relative_path_thumb) result = CResult.merge_result_info(result, self.Name_Browse_GeoTiff, view_relative_path_geotiff) else: result = result_view return result
def _do_sync(self) -> str: try: table_name = CUtils.dict_value_by_name(self.information(), 'table_name', '') # 因此类插件的表格情况特殊,为双主键,且要先确定插入还是更新,所以不用table.if_exists()方法 sql_check = ''' select aprid from {0} where aprid='{1}' '''.format(table_name, self._obj_id) record_cheak = CFactory().give_me_db(self._db_id).one_row(sql_check).size() # 查找记录数 if record_cheak == 0: # 记录数为0则拼接插入语句 insert_or_updata = self.DB_True else: # 记录数不为0则拼接更新语句 insert_or_updata = self.DB_False table = CTable() table.load_info(self._db_id, table_name) # insert_or_updatad的意义是要先确定是更新还是插入,不能把不该更新的,在插入时是默认值的参数更新 for field_dict in self.get_sync_dict_list(insert_or_updata): field_name = CUtils.dict_value_by_name(field_dict, 'field_name', '') # 获取字段名 field_value = CUtils.dict_value_by_name(field_dict, 'field_value', '') # 获取字段值 field_value_type = CUtils.dict_value_by_name(field_dict, 'field_value_type', '') # 获取值类型 if CUtils.equal_ignore_case(field_value, ''): table.column_list.column_by_name(field_name).set_null() elif CUtils.equal_ignore_case(field_value_type, self.DataValueType_Value): table.column_list.column_by_name(field_name).set_value(field_value) elif CUtils.equal_ignore_case(field_value_type, self.DataValueType_SQL): table.column_list.column_by_name(field_name).set_sql(field_value) elif CUtils.equal_ignore_case(field_value_type, self.DataValueType_Array): table.column_list.column_by_name(field_name).set_array(field_value) else: pass # 不多执行table.if_exists()多查一次哭,所以不用savedata()方法 if insert_or_updata: result = table.insert_data() else: result = table.update_data() if CResult.result_success(result): return CResult.merge_result( self.Success, '对象[{0}]的同步成功! '.format(self._obj_name) ) else: return result except Exception as error: return CResult.merge_result( self.Failure, '数据检索分发模块在进行数据同步时出现错误:同步的对象[{0}]在处理时出现异常, 详细情况: [{1}]!'.format( self._obj_name, error.__str__() ) )
def access(self, obj_id, obj_name, obj_type, quality) -> str: """ 解析数管中识别出的对象, 与第三方模块的访问能力, 在本方法中进行处理 返回的json格式字符串中, 是默认的CResult格式, 但是在其中还增加了Access属性, 通过它反馈当前对象是否满足第三方模块的应用要求 注意: 一定要反馈Access属性 :return: """ result = self.__test_module_obj(obj_id, obj_name) if not CResult.result_success(result): return CResult.merge_result_info(result, self.Name_Access, self.DataAccess_Forbid) else: return CResult.merge_result_info(result, self.Name_Access, self.DataAccess_Pass)
def update_inbound_qi_result(self, notify_id, result, storage_type='mix', storage_option=None, ib_option=None): CLogger().debug(CResult.result_message(result)) if CResult.result_success(result): if CUtils.equal_ignore_case(storage_type, self.Storage_Type_InBound): switch_inbound_after_qi_immediately_status = CUtils.equal_ignore_case( settings.application.xpath_one( self.path_switch( self.Path_Setting_MetaData_QI_Switch, self. Switch_Inbound_After_QI_Immediately_Of_IB_Storage), self.Name_ON), self.Name_ON) else: switch_inbound_after_qi_immediately_status = CUtils.equal_ignore_case( settings.application.xpath_one( self.path_switch( self.Path_Setting_MetaData_QI_Switch, self. Switch_Inbound_After_QI_Immediately_Of_MIX_Storage ), self.Name_OFF), self.Name_ON) if switch_inbound_after_qi_immediately_status: next_status = self.IB_Status_IB_InQueue else: next_status = self.IB_Status_QI_Finished CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsiStatus = {0}, dsiprocmemo = :notify_message, dsiproctime = now() where dsiid = :notify_id '''.format(next_status), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsiprocmemo = :notify_message, dsiproctime = now() where dsiid = :notify_id ''', { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) })
def process(self) -> str: """ """ browse_full_path = CFile.join_file(self.view_path, '{0}_browse.png'.format(self.object_id)) thumb_full_path = CFile.join_file(self.view_path, '{0}_thumb.jpg'.format(self.object_id)) geotiff_full_path = CFile.join_file(self.view_path, '{0}_browse.tiff'.format(self.object_id)) # 进程调用模式 json_out_view = CJson() json_out_view.set_value_of_name('image_path', self.transform_file) json_out_view.set_value_of_name('browse_full_path', browse_full_path) json_out_view.set_value_of_name('thumb_full_path', thumb_full_path) json_out_view.set_value_of_name('geotiff_full_path', geotiff_full_path) result_view = CProcessUtils.processing_method(self.create_view_json, json_out_view) # result_view = self.create_view(self.file_info.file_name_with_full_path, browse_full_path, thumb_full_path, # geotiff_full_path) # result_view = self.create_view_json(json_out_view) if CResult.result_success(result_view): # 清理不必要的文件 delect_file_list = list() delect_file_list.append('{0}.aux.xml'.format(browse_full_path)) delect_file_list.append('{0}.aux.xml'.format(thumb_full_path)) delect_file_list.append(geotiff_full_path) for delect_file in delect_file_list: if CFile.file_or_path_exist(delect_file): CFile.remove_file(delect_file) result = CResult.merge_result(self.Success, '处理完毕!') result = CResult.merge_result_info(result, self.Name_Browse, CFile.file_name(browse_full_path)) result = CResult.merge_result_info(result, self.Name_Thumb, CFile.file_name(thumb_full_path)) result = CResult.merge_result_info(result, self.Name_Browse_GeoTiff, CFile.file_name(geotiff_full_path)) else: # 清理不必要的文件 delect_file_list = list() delect_file_list.append(browse_full_path) delect_file_list.append(thumb_full_path) delect_file_list.append('{0}.aux.xml'.format(browse_full_path)) delect_file_list.append('{0}.aux.xml'.format(thumb_full_path)) delect_file_list.append(geotiff_full_path) for delect_file in delect_file_list: if CFile.file_or_path_exist(delect_file): CFile.remove_file(delect_file) result = result_view return result
def __update_layer_update_result(self, deploy_id, result): if CResult.result_success(result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_v_qfg_layer set dpStatus = 0, dpprocessresult = :message where dpid = :id ''', {'id': deploy_id, 'message': CResult.result_message(result)} ) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_v_qfg_layer set dpStatus = 21, dpprocessresult = :message where dpid = :id ''', {'id': deploy_id, 'message': CResult.result_message(result)} )
def start(self): schedule = self.get_or_create_sch_job() if schedule is None: CLogger().warning('无法创建Job对象: {0}.{1}.{2}'.format( self.__schedule_id__, self.__schedule_trigger__, self.__schedule_algorithm__)) return schedule.before_execute() schedule.abnormal_mission_restart() while True: mission_process_result = schedule.execute() if not CResult.result_success(mission_process_result): time.sleep(5) if self.should_stop(): break schedule.before_stop()
def update_sync_result(self, na_id, result): if CResult.result_success(result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_obj_na set dson_notify_status = {0}, dson_notify_proc_id = null, dson_notify_proc_memo = :proc_message where dsonid = :id '''.format(self.ProcStatus_Finished), { 'id': na_id, 'proc_message': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_obj_na set dson_notify_status = {0}, dson_notify_proc_id = null, dson_notify_proc_memo = :proc_message where dsonid = :id '''.format(self.ProcStatus_Error), { 'id': na_id, 'proc_message': CResult.result_message(result) })
def update_inbound_na_result(self, notify_id, result): CLogger().debug(CResult.result_message(result)) if CResult.result_success(result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsi_na_status = {0}, dsi_na_proc_memo = :notify_message, dsiproctime = now() where dsiid = :notify_id '''.format(self.ProcStatus_Finished), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsi_na_proc_memo = :notify_message, dsiproctime = now() where dsiid = :notify_id ''', { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) })
def update_inbound_qi_result(self, notify_id, result): CLogger().debug(CResult.result_message(result)) if CResult.result_success(result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsiStatus = {0}, dsiprocmemo = :notify_message where dsiid = :notify_id '''.format(self.IB_Status_QI_Processing), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsiStatus = {0}, dsiprocmemo = :notify_message where dsiid = :notify_id '''.format(self.IB_Status_QI_Error), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) })
def update_ib_result(self, ib_id, result): if CResult.result_success(result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsistatus = {0}, dsiprocmemo = :ib_message, dsi_na_status = {1} where dsiid = :ib_id '''.format(self.ProcStatus_Finished, self.ProcStatus_InQueue), { 'ib_id': ib_id, 'ib_message': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsistatus = {0}, dsiprocmemo = :ib_message where dsiid = :ib_id '''.format(self.IB_Status_IB_Error), { 'ib_id': ib_id, 'ib_message': CResult.result_message(result) })
def db_update_object_status(self, dso_id, result, status=None): if status is not None: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsodetailparsestatus = :status , dsolastmodifytime = now() , dsodetailparsememo = :dsodetailparsememo where dsoid = :dsoid ''', { 'dsoid': dso_id, 'dsodetailparsememo': CResult.result_message(result), 'status': status }) elif CResult.result_success(result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsodetailparsestatus = {0} , dsolastmodifytime = now() , dsodetailparsememo = :dsodetailparsememo where dsoid = :dsoid '''.format(self.ProcStatus_Finished), { 'dsoid': dso_id, 'dsodetailparsememo': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsodetailparsestatus = (dsodetailparsestatus / 10 + 1) * 10 + {0}, , dsolastmodifytime = now() , dsodetailparsememo = :dsodetailparsememo where dsoid = :dsoid '''.format(self.ProcStatus_InQueue), { 'dsoid': dso_id, 'dsodetailparsememo': CResult.result_message(result) })
def access(self, obj_id, obj_name, obj_type, quality) -> str: """ 解析数管中识别出的对象, 与第三方模块的访问能力, 在本方法中进行处理 返回的json格式字符串中, 是默认的CResult格式, 但是在其中还增加了Access属性, 通过它反馈当前对象是否满足第三方模块的应用要求 注意: 一定要反馈Access属性 :return: """ module_obj_real, result = self.__find_module_obj(obj_id) if not CResult.result_success(result): return result if module_obj_real is None: message = '没有对应的算法, 直接通过!' result = CResult.merge_result(self.Success, message) return result module_obj_real_type = type(module_obj_real) try: result = module_obj_real.access() return result except Exception as error: return CResult.merge_result( self.Failure, '模块插件{0}检查访问可用性出现异常, 具体错误原因为: {1}'.format( module_obj_real_type, error.__str__()))
def sync(self, object_access, obj_id, obj_name, obj_type, quality) -> str: """ 处理数管中识别的对象, 与第三方模块的同步 . 如果第三方模块自行处理, 则无需继承本方法 . 如果第三方模块可以处理, 则在本模块中, 从数据库中提取对象的信息, 写入第三方模块的数据表中, 或者调用第三方模块接口 注意: 在本方法中, 不要用_quality_info属性, 因为外部调用方考虑的效率因素, 没有传入!!! :return: """ # 根据objecttype类型查找distribution文件夹下对应的类文件(识别通过objecttype找object_def表中的dsodtype字段与类对象中的info[self.Name_Type]值相同) if CUtils.equal_ignore_case(self.DataAccess_Pass, object_access): distribution_obj_real, result = self.__find_module_obj(obj_id) if not CResult.result_success(result): return result elif distribution_obj_real is None: message = '没有对应的算法, 直接通过!' result = CResult.merge_result(self.Success, message) return result result = distribution_obj_real.sync() return result else: # todo(赵宇飞) 这里如果权限是等待审批或禁用, 则从ap3系列表中删除记录 return CResult.merge_result(self.Success, '这里需要从ap3系列表中删除记录, 等待实现!')
def process_mission(self, dataset) -> str: """ 详细算法复杂, 参见readme.md中[### 数据入库调度]章节 :param dataset: :return: """ ds_src_storage_id = dataset.value_by_name(0, 'query_storage_id', '') ds_src_storage_type = dataset.value_by_name(0, 'query_storage_type', self.Storage_Type_Mix) ds_src_root_path = dataset.value_by_name(0, 'query_rootpath', '') ds_src_dir_id = dataset.value_by_name(0, 'query_ib_dir_id', '') ds_ib_id = dataset.value_by_name(0, 'query_ib_id', '') ds_ib_directory_name = dataset.value_by_name(0, 'query_ib_relation_dir', '') ds_ib_batch_no = dataset.value_by_name(0, 'query_ib_batchno', '') ds_ib_option = dataset.value_by_name(0, 'query_ib_option', '') src_need_storage_size = self.get_storage_size(ds_ib_id, ds_src_storage_id, ds_ib_directory_name, ds_ib_option) src_path = ds_src_root_path if not CUtils.equal_ignore_case(ds_ib_directory_name, ''): src_path = CFile.join_file(src_path, ds_ib_directory_name) src_dataset_metadata_filename = CFile.join_file( src_path, self.FileName_MetaData_Bus_21AT) CLogger().debug('入库的目录为: {0}.{1}'.format(ds_ib_id, ds_ib_directory_name)) try: # 检查所有文件与元数据是否相符 all_ib_file_or_path_existed = self.check_all_ib_file_or_path_existed( ds_ib_id) if not CResult.result_success(all_ib_file_or_path_existed): self.update_ib_result(ds_ib_id, all_ib_file_or_path_existed) return all_ib_file_or_path_existed # 将数据入库的记录保存到日志中 result = self.ib_log(ds_ib_id, ds_src_storage_id, ds_ib_directory_name) if not CResult.result_success(result): self.update_ib_result(ds_ib_id, result) return result # 如果是在核心存储或混合存储中直接入库, 则仅仅改变元数据状态即可 if CUtils.equal_ignore_case(ds_src_storage_type, self.Storage_Type_Mix) \ or CUtils.equal_ignore_case(ds_src_storage_type, self.Storage_Type_Core): result_ib_in_core_or_mix_storage = self.update_ib_data_status_in_core_or_mix_storage( ds_ib_id, ds_src_storage_id, ds_ib_directory_name, ds_src_dir_id) self.update_ib_result(ds_ib_id, result_ib_in_core_or_mix_storage) return result_ib_in_core_or_mix_storage # 加载目录下的待入库数据集的元数据文件 src_dataset_xml = CXml() src_dataset_type = self.Name_Default if CFile.file_or_path_exist(src_dataset_metadata_filename): src_dataset_xml.load_file(src_dataset_metadata_filename) src_dataset_type = CXml.get_element_text( src_dataset_xml.xpath_one(self.Path_MD_Bus_ProductType)) if CUtils.equal_ignore_case(src_dataset_type, ''): src_dataset_type = self.Name_Default # 获取匹配的入库模式 src_ib_schema = self.get_ib_schema(src_dataset_type, ds_ib_option) if src_ib_schema is None: result = CResult.merge_result( self.Failure, '目录为[{0}.{1}]的数据集类型为[{2}], 未找到匹配的入库模式, 请检查修正后重试!'.format( ds_ib_id, ds_ib_directory_name, src_dataset_type)) self.update_ib_result(ds_ib_id, result) return result # 计算入库的目标存储\存储根目录\目标子目录在目标存储中的副目录的标识\目标子目录\反馈消息 dest_ib_storage_id, dest_ib_root_path, desc_ib_dir_id, dest_ib_subpath, message = self.get_dest_storage( ds_ib_batch_no, src_need_storage_size, ds_ib_option, src_ib_schema, src_dataset_xml) if dest_ib_storage_id is None or dest_ib_subpath is None: result = CResult.merge_result(self.Failure, message) self.update_ib_result(ds_ib_id, result) return result dest_ib_subpath = CFile.unify(dest_ib_subpath) if CJson.json_attr_value(ds_ib_option, self.Path_IB_Switch_CheckFileLocked, self.DB_False) == self.DB_True: src_ib_files_not_locked, message = self.check_src_ib_files_not_locked( ds_src_root_path, src_path) if not src_ib_files_not_locked: result = CResult.merge_result(self.Failure, message) self.update_ib_result(ds_ib_id, result) return result proc_ib_src_path = ds_src_root_path proc_ib_dest_path = dest_ib_root_path if not CUtils.equal_ignore_case(dest_ib_subpath, ''): proc_ib_dest_path = CFile.join_file(dest_ib_root_path, dest_ib_subpath) if not CUtils.equal_ignore_case(ds_ib_directory_name, ''): proc_ib_src_path = CFile.join_file(proc_ib_src_path, ds_ib_directory_name) proc_ib_dest_path = CFile.join_file(proc_ib_dest_path, ds_ib_directory_name) # --------------------------------------------------------------至此, 数据入库前的检查处理完毕 # 移动源目录至目标目录, 如果是根目录, 则仅仅移动文件 result = self.ib_files_move( proc_ib_src_path, proc_ib_dest_path, CUtils.equal_ignore_case(ds_ib_directory_name, '')) if not CResult.result_success(result): # 利用相同的方法, 把移动的数据, 重新移动回原目录, 这里理论上应该100%成功 sub_result = self.ib_files_move( proc_ib_dest_path, proc_ib_src_path, CUtils.equal_ignore_case(ds_ib_directory_name, '')) if not CResult.result_success(sub_result): sub_result_message = CResult.result_message(sub_result) result_message = CResult.result_message(result) result = CResult.merge_result( self.Failure, '{0}\n{1}'.format(result_message, sub_result_message)) self.update_ib_result(ds_ib_id, result) return result # 将源文件的元数据, 移动至目标存储下, 如果出现异常, 则在方法内部rollback result = self.src_ib_metadata_move_to_storage( ds_ib_id, ds_src_storage_id, ds_src_dir_id, ds_ib_directory_name, dest_ib_storage_id, desc_ib_dir_id, dest_ib_subpath) if not CResult.result_success(result): # 利用相同的方法, 把移动的数据, 重新移动回原目录, 这里理论上应该100%成功 sub_result = self.ib_files_move( proc_ib_dest_path, proc_ib_src_path, CUtils.equal_ignore_case(ds_ib_directory_name, '')) if not CResult.result_success(sub_result): sub_result_message = CResult.result_message(sub_result) result_message = CResult.result_message(result) result = CResult.merge_result( self.Failure, '{0}/n{1}'.format(result_message, sub_result_message)) self.update_ib_result(ds_ib_id, result) return result result = CResult.merge_result( self.Success, '目录为[{0}.{1}]入库成功!'.format(ds_ib_id, ds_ib_directory_name)) self.update_ib_result(ds_ib_id, result) return result except Exception as error: result = CResult.merge_result( self.Failure, '目录为[{0}.{1}]入库出现异常! 错误原因为: {2}'.format( ds_ib_id, ds_ib_directory_name, error.__str__())) self.update_ib_result(ds_ib_id, result) return result
def parser_metadata_custom(self, parser: CMetaDataParser) -> str: """ 自定义的元数据解析, 在所有质检和其他处理之后触发 :param parser: :return: """ meta_data_json = parser.metadata.metadata_json() if meta_data_json is None: return CResult.merge_result( self.Success, '数据[{0}]的质检和空间等元数据解析完毕, 但子图层解析有误, 无法获取JSON格式的元数据! '.format( self.file_info.file_name_with_full_path, ) ) json_data_source = meta_data_json.xpath_one('datasource', None) layer_list = meta_data_json.xpath_one(self.Name_Layers, None) if layer_list is None: return CResult.merge_result( self.Success, '数据[{0}]的质检和空间等元数据解析完毕, 但子图层解析有误, 元数据中无法找到layers节点! '.format( self.file_info.file_name_with_full_path, ) ) mdb_ib_id = CFactory().give_me_db(self.file_info.db_server_id).one_value( ''' select dso_ib_id from dm2_storage_object where dsoid = :object_id ''', { 'object_id': parser.object_id } ) error_message_list = [] table = CTable() table.load_info(self.file_info.db_server_id, self.TableName_DM_Object) for layer in layer_list: layer_name = CUtils.dict_value_by_name(layer, self.Name_Name, '') if CUtils.equal_ignore_case(layer_name, ''): continue layer_alias_name = CUtils.dict_value_by_name(layer, self.Name_Description, layer_name) layer_metadata_json = CJson() layer_metadata_json.set_value_of_name('datasource', json_data_source) layer_metadata_json.set_value_of_name('layer_count', 1) layer_metadata_json.set_value_of_name('layers', [layer]) layer_metadata_text = layer_metadata_json.to_json() try: sql_find_layer_existed = ''' select dsoid as layer_id_existed from dm2_storage_object where upper(dsoobjectname) = upper(:layer_name) and dsoparentobjid = :object_id ''' layer_id_existed = CFactory().give_me_db(self.file_info.db_server_id).one_value( sql_find_layer_existed, { 'layer_name': layer_name, 'object_id': parser.object_id } ) if layer_id_existed is None: layer_id_existed = CUtils.one_id() table.column_list.reset() table.column_list.column_by_name('dsoid').set_value(layer_id_existed) table.column_list.column_by_name('dsoobjectname').set_value(layer_name) table.column_list.column_by_name('dsoobjecttype').set_value( CUtils.dict_value_by_name( self.get_information(), self.Plugins_Info_Child_Layer_Plugins_Name, '' ) ) table.column_list.column_by_name('dsodatatype').set_value( CUtils.dict_value_by_name( self.get_information(), self.Plugins_Info_Child_Layer_Data_Type, '' ) ) table.column_list.column_by_name('dsoalphacode').set_value(CUtils.alpha_text(layer_name)) table.column_list.column_by_name('dsoaliasname').set_value(layer_alias_name) table.column_list.column_by_name('dsoparentobjid').set_value(parser.object_id) table.column_list.column_by_name('dso_ib_id').set_value(mdb_ib_id) table.column_list.column_by_name('dsometadatatext').set_value(layer_metadata_text) table.column_list.column_by_name('dsometadatajson').set_value(layer_metadata_text) table.column_list.column_by_name('dsometadataparsestatus').set_value(self.ProcStatus_InQueue) table.column_list.column_by_name('dsotagsparsestatus').set_value(self.ProcStatus_InQueue) table.column_list.column_by_name('dsodetailparsestatus').set_value(self.ProcStatus_InQueue) result = table.save_data() if not CResult.result_success(result): error_message_list.append( '图层[{0}]的创建过程出现错误, 详细信息为: {1}'.format( layer_name, CResult.result_message(result) ) ) except Exception as error: error_message_list.append('图层[{0}]的创建过程出现错误, 详细信息为: {1}'.format(layer_name, error.__str__())) if len(error_message_list) > 0: return CResult.merge_result( self.Failure, '数据[{0}]的质检和空间等元数据解析完毕, 但子图层解析有误, 详细情况如下: \n{1}'.format( self.file_info.file_name_with_full_path, CUtils.list_2_str(error_message_list, '', '\n', '', True) ) ) else: return CResult.merge_result( self.Success, '数据[{0}]的自定义元数据解析完毕! '.format( self.file_info.file_name_with_full_path, ) )
def process_mission(self, dataset) -> str: """ :param dataset: :return: """ ds_storage_id = dataset.value_by_name(0, 'query_storage_id', '') ds_storage_title = dataset.value_by_name(0, 'query_storage_title', '') ds_ib_id = dataset.value_by_name(0, 'query_ib_id', '') ds_ib_directory_name = dataset.value_by_name(0, 'query_ib_relation_dir', '') ds_ib_batch_no = dataset.value_by_name(0, 'query_ib_batchno', '') ds_ib_option = CUtils.any_2_str( dataset.value_by_name(0, 'query_ib_option', '')) CLogger().debug('与第三方模块同步的目录为: {0}.{1}'.format(ds_ib_id, ds_ib_directory_name)) data_count = 0 try: module_name_list = CJson.json_attr_value( ds_ib_option, self.Path_IB_Opt_Notify_module, None) if module_name_list is None: modules_root_dir = CSys.get_metadata_data_access_modules_root_dir( ) module_file_list = CFile.file_or_subpath_of_path( modules_root_dir, '{0}_*.{1}'.format(self.Name_Module, self.FileExt_Py)) module_name_list = list() for module_file in module_file_list: module_name_list.append(CFile.file_main_name(module_file)) sql_ib_need_notify_object = ''' select dsoid, dsoobjecttype, dsoobjectname, dso_da_result from dm2_storage_object where dso_ib_id = :ib_id ''' dataset = CFactory().give_me_db(self.get_mission_db_id()).all_row( sql_ib_need_notify_object, {'ib_id': ds_ib_id}) if dataset.is_empty(): result = CResult.merge_result( self.Success, '存储[{0}]下, 批次为[{1}]的目录[{2}]下无任何对象, 不再通知给第三方应用!'.format( ds_storage_title, ds_ib_batch_no, ds_ib_directory_name)) self.update_notify_result(ds_ib_id, result) return result CLogger().debug( '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象等待通知给第三方应用!'.format( ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, dataset.size())) data_count = dataset.size() error_message = '' for data_index in range(data_count): record_object = dataset.record(data_index) object_id = CUtils.dict_value_by_name(record_object, 'dsoid', '') object_type = CUtils.dict_value_by_name( record_object, 'dsoobjecttype', '') object_name = CUtils.dict_value_by_name( record_object, 'dsoobjectname', '') object_da_result_text = CUtils.any_2_str( CUtils.dict_value_by_name(record_object, 'dso_da_result', '')) object_da_result = CJson() object_da_result.load_json_text(object_da_result_text) for module_name in module_name_list: module_obj = CObject.create_module_instance( CSys.get_metadata_data_access_modules_root_name(), module_name, self.get_mission_db_id()) module_id = module_name module_title = CUtils.dict_value_by_name( module_obj.information(), self.Name_Title, '') module_enable = CUtils.dict_value_by_name( module_obj.information(), self.Name_Enable, True) if not module_enable: continue module_access = object_da_result.xpath_one( '{0}.{1}'.format(module_id, self.Name_Result), self.DataAccess_Forbid) module_access_memo = object_da_result.xpath_one( '{0}.{1}'.format(module_id, self.Name_Message), '') CLogger().debug( '存储[{0}]下, 批次为[{1}]的目录[{2}]下的对象[{3}], 与模块[{4}]的访问权限为[{5}]!' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, object_name, module_title, module_access)) # todo(王西亚) 仔细考虑这里是否要放开, 是放开pass的, 还是放开pass和wait!!!!!! # if not \ # ( # CUtils.equal_ignore_case(module_access, self.DataAccess_Pass) # or CUtils.equal_ignore_case(module_access, self.DataAccess_Wait) # ): # continue result = module_obj.notify_object(ds_ib_id, module_access, module_access_memo, object_id, object_name, object_type, None) if not CResult.result_success(result): message = CResult.result_message(result) CLogger().debug( '存储[{0}]下, 批次为[{1}]的目录[{2}]下的对象[{3}], 与模块[{4}]的通知处理结果出现错误, 详细情况: [{5}]!' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, object_name, module_title, message)) error_message = CUtils.str_append( error_message, message) if CUtils.equal_ignore_case(error_message, ''): result = CResult.merge_result( self.Success, '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象成功通知给第三方应用!'.format( ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, data_count)) self.update_notify_result(ds_ib_id, result) return result else: result = CResult.merge_result( self.Failure, '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象在通知给第三方应用时, 部分出现错误! 错误信息如下: \n{4}' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, data_count, error_message)) self.update_notify_result(ds_ib_id, result) return result except Exception as error: result = CResult.merge_result( self.Failure, '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象通知给第三方应用时出现异常! 错误原因为: {4}!' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, data_count, error.__str__())) self.update_notify_result(ds_ib_id, result) return result
def process_mission(self, dataset): dso_id = dataset.value_by_name(0, 'dsoid', '') dso_data_type = dataset.value_by_name(0, 'dsodatatype', '') dso_object_type = dataset.value_by_name(0, 'dsoobjecttype', '') dso_object_name = dataset.value_by_name(0, 'dsoobjectname', '') dso_object_da_content = CUtils.any_2_str(dataset.value_by_name(0, 'dso_da_result', '')) dso_object_quality = dataset.value_by_name(0, 'dso_quality', '') ds_retry_times = dataset.value_by_name(0, 'retry_times', 0) if ds_retry_times >= self.abnormal_job_retry_times(): ds_last_process_memo = CUtils.any_2_str(dataset.value_by_name(0, 'last_process_memo', None)) process_result = CResult.merge_result( self.Failure, '{0}, \n系统已经重试{1}次, 仍然未能解决, 请人工检查修正后重试!'.format( ds_last_process_memo, ds_retry_times ) ) self.update_status(dso_id, None, process_result, self.ProcStatus_Error) return process_result dso_quality = CXml() dso_quality.load_xml(dso_object_quality) dso_da_json = CJson() dso_da_json.load_json_text(dso_object_da_content) CLogger().debug( '开始处理对象: {0}.{1}.{2}.{3}对各个子系统的支撑能力'.format(dso_id, dso_data_type, dso_object_type, dso_object_name)) try: modules_root_dir = CSys.get_metadata_data_access_modules_root_dir() modules_file_list = CFile.file_or_subpath_of_path( modules_root_dir, '{0}_*.{1}'.format(self.Name_Module, self.FileExt_Py) ) for file_name_without_path in modules_file_list: file_main_name = CFile.file_main_name(file_name_without_path) # 判断模块的可访问是否已经被人工审批, 如果人工审批, 则这里不再计算和覆盖 module_access = dso_da_json.xpath_one( '{0}.{1}'.format(file_main_name, self.Name_Audit), self.Name_System ) if CUtils.equal_ignore_case(module_access, self.Name_User): continue try: module_obj = CObject.create_module_instance( CSys.get_metadata_data_access_modules_root_name(), file_main_name, self.get_mission_db_id() ) module_title = CUtils.dict_value_by_name(module_obj.information(), self.Name_Title, '') result = CUtils.any_2_str(module_obj.access(dso_id, dso_object_name, dso_data_type, dso_quality) ) if CResult.result_success(result): module_access = CResult.result_info(result, self.Name_Access, self.DataAccess_Forbid) else: CLogger().debug('模块[{0}]解析出现错误, 系统将忽略本模块, 继续处理下一个!'.format(file_main_name)) module_access = self.DataAccess_Unknown module_access_message = CResult.result_message(result) module_obj = {self.Name_Audit: self.Name_System, self.Name_Result: module_access, self.Name_Title: module_title, self.Name_Message: module_access_message} dso_da_json.set_value_of_name(file_main_name, module_obj) except Exception as error: CLogger().debug('模块[{0}]解析出现异常, 原因为[{1}], 请检查!'.format(file_main_name, error.__str__())) module_access = self.DataAccess_Unknown module_access_message = '模块[{0}]解析出现异常, 原因为[{1}], 请检查!'.format(file_main_name, error.__str__()) module_obj = {self.Name_Audit: self.Name_System, self.Name_Result: module_access, self.Name_Title: file_main_name, self.Name_Message: module_access_message} dso_da_json.set_value_of_name(file_main_name, module_obj) process_result = CResult.merge_result( self.Success, '对象[{0}.{1}]访问权限解析成功!'.format(dso_id, dso_object_name) ) self.update_status( dso_id, dso_da_json.to_json(), process_result ) return process_result except Exception as error: process_result = CResult.merge_result( self.Failure, '对象[{0}.{1}]访问权限解析出错, 原因为[{2}]!'.format(dso_id, dso_object_name, error.__str__()) ) self.update_status(dso_id, None, process_result) return process_result
def process_mission(self, dataset): dso_id = dataset.value_by_name(0, 'dsoid', '') dso_data_type = dataset.value_by_name(0, 'dsodatatype', '') dso_object_type = dataset.value_by_name(0, 'dsoobjecttype', '') dso_object_name = dataset.value_by_name(0, 'dsoobjectname', '') CLogger().debug('开始处理对象: {0}.{1}.{2}.{3}的元数据'.format( dso_id, dso_data_type, dso_object_type, dso_object_name)) dso_object_retry_times = dataset.value_by_name(0, 'retry_times', 0) if dso_object_retry_times >= self.abnormal_job_retry_times(): dso_object_last_process_memo = CUtils.any_2_str( dataset.value_by_name(0, 'dsometadataparsememo', None)) process_result = CResult.merge_result( self.Failure, '{0}, \n系统已经重试{1}次, 仍然未能解决, 请人工检查修正后重试!'.format( dso_object_last_process_memo, dso_object_retry_times)) self.db_update_object_exception(dso_id, process_result, self.ProcStatus_Error) return process_result ds_file_info = self.get_object_info(dso_id, dso_data_type) if ds_file_info.value_by_name(0, 'query_object_valid', self.DB_False) == self.DB_False: process_result = CResult.merge_result( self.Success, '文件或目录[{0}]不存在,元数据无法解析, 元数据处理正常结束!'.format( ds_file_info.value_by_name(0, 'query_object_fullname', ''))) self.db_update_object_status(dso_id, process_result) return process_result sql_get_rule = ''' select dsdScanRule from dm2_storage_directory where dsdStorageid = :dsdStorageID and Position(dsddirectory || '{0}' in :dsdDirectory) = 1 and dsdScanRule is not null order by dsddirectory desc limit 1 '''.format(CFile.sep()) rule_ds = CFactory().give_me_db(self.get_mission_db_id()).one_row( sql_get_rule, { 'dsdStorageID': ds_file_info.value_by_name(0, 'query_object_storage_id', ''), 'dsdDirectory': ds_file_info.value_by_name(0, 'query_object_relation_path', '') }) ds_rule_content = rule_ds.value_by_name(0, 'dsScanRule', '') file_info_obj = CDMFilePathInfoEx( dso_data_type, ds_file_info.value_by_name(0, 'query_object_fullname', ''), ds_file_info.value_by_name(0, 'query_object_storage_id', ''), ds_file_info.value_by_name(0, 'query_object_file_id', ''), ds_file_info.value_by_name(0, 'query_object_file_parent_id', ''), ds_file_info.value_by_name(0, 'query_object_owner_id', ''), self.get_mission_db_id(), ds_rule_content) plugins_obj = CPluginsMng.plugins(file_info_obj, dso_object_type) if plugins_obj is None: process_result = CResult.merge_result( self.Failure, '文件或目录[{0}]的类型插件[{1}]不存在,元数据无法解析, 处理结束!'.format( ds_file_info.value_by_name(0, 'query_object_fullname', ''), dso_object_type)) self.db_update_object_status(dso_id, process_result, self.ProcStatus_Error) return process_result plugins_obj.classified() if not plugins_obj.create_virtual_content(): process_result = CResult.merge_result( self.Failure, '文件或目录[{0}]的内容提取失败, 元数据无法提取!'.format( ds_file_info.value_by_name(0, 'query_object_fullname', ''))) self.db_update_object_status(dso_id, process_result) return process_result try: metadata_parser = CMetaDataParser(dso_id, dso_object_name, file_info_obj, plugins_obj.file_content, plugins_obj.get_information()) process_result = plugins_obj.parser_metadata(metadata_parser) if CResult.result_success(process_result): message = '' step_success = ( metadata_parser.metadata.metadata_extract_result != self.DB_False) if not step_success: message = CUtils.str_append(message, '实体元数据解析出现错误', ', ') all_step_success = step_success step_success = metadata_parser.metadata.metadata_bus_extract_result != self.DB_False if not step_success: message = CUtils.str_append(message, '实体元数据解析出现错误', ', ') all_step_success = all_step_success and step_success step_success = metadata_parser.metadata.metadata_view_extract_result != self.DB_False if not step_success: message = CUtils.str_append(message, '快视图等可视化元数据解析出现错误', ', ') all_step_success = all_step_success and step_success step_success = metadata_parser.metadata.metadata_time_extract_result != self.DB_False if not step_success: message = CUtils.str_append(message, '时间元数据解析出现错误', ', ') all_step_success = all_step_success and step_success step_success = metadata_parser.metadata.metadata_spatial_extract_result != self.DB_False if not step_success: message = CUtils.str_append(message, '空间投影元数据解析出现错误', ', ') all_step_success = all_step_success and step_success if all_step_success: self.db_update_object_status(dso_id, process_result) return process_result else: process_result = CResult.merge_result( self.Failure, message) self.db_update_object_status(dso_id, process_result) return process_result else: self.db_update_object_status(dso_id, process_result) return process_result except Exception as error: process_result = CResult.merge_result( self.Failure, '文件或目录[{0}]元数据解析过程出现异常! 错误原因为: {1}'.format( ds_file_info.value_by_name(0, 'query_object_fullname', ''), error.__str__())) self.db_update_object_exception(dso_id, process_result) return process_result finally: plugins_obj.destroy_virtual_content()
def process_mission(self, dataset): dso_id = dataset.value_by_name(0, 'dsoid', '') dso_data_type = dataset.value_by_name(0, 'dsodatatype', '') dso_object_type = dataset.value_by_name(0, 'dsoobjecttype', '') dso_object_name = dataset.value_by_name(0, 'dsoobjectname', '') CLogger().debug('开始处理对象: {0}.{1}.{2}.{3}的元数据'.format( dso_id, dso_data_type, dso_object_type, dso_object_name)) ds_retry_times = dataset.value_by_name(0, 'retry_times', 0) if ds_retry_times >= self.abnormal_job_retry_times(): ds_last_process_memo = CUtils.any_2_str( dataset.value_by_name(0, 'last_process_memo', None)) process_result = CResult.merge_result( self.Failure, '{0}, \n系统已经重试{1}次, 仍然未能解决, 请人工检查修正后重试!'.format( ds_last_process_memo, ds_retry_times)) self.db_update_object_status(dso_id, process_result, self.ProcStatus_Error) return process_result ds_file_info = self.get_object_info(dso_id, dso_data_type) if ds_file_info.value_by_name(0, 'query_object_valid', self.DB_False) == self.DB_False: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsodetailparsestatus = 0 , dsolastmodifytime = now() , dsodetailparsememo = '文件或目录不存在,元数据无法解析' where dsoid = :dsoid ''', {'dsoid': dso_id}) return CResult.merge_result( self.Success, '文件或目录[{0}]不存在,元数据无法解析, 元数据处理正常结束!'.format( ds_file_info.value_by_name(0, 'query_object_relation_path', ''))) sql_get_rule = ''' select dsdScanRule from dm2_storage_directory where dsdStorageid = :dsdStorageID and position(dsddirectory || '{0}' in :dsdDirectory) = 1 and dsdScanRule is not null order by dsddirectory desc limit 1 '''.format(CFile.sep()) rule_ds = CFactory().give_me_db(self.get_mission_db_id()).one_row( sql_get_rule, { 'dsdStorageID': ds_file_info.value_by_name(0, 'query_object_storage_id', ''), 'dsdDirectory': ds_file_info.value_by_name(0, 'query_object_relation_path', '') }) ds_rule_content = rule_ds.value_by_name(0, 'dsScanRule', '') file_info_obj = CDMFilePathInfoEx( dso_data_type, ds_file_info.value_by_name(0, 'query_object_fullname', ''), ds_file_info.value_by_name(0, 'query_object_storage_id', ''), ds_file_info.value_by_name(0, 'query_object_file_id', ''), ds_file_info.value_by_name(0, 'query_object_file_parent_id', ''), ds_file_info.value_by_name(0, 'query_object_owner_id', ''), self.get_mission_db_id(), ds_rule_content) plugins_obj = CPluginsMng.plugins(file_info_obj, dso_object_type) if plugins_obj is None: return CResult.merge_result( self.Failure, '文件或目录[{0}]的类型插件[{1}]不存在,对象详情无法解析, 处理结束!'.format( ds_file_info.value_by_name(0, 'query_object_relation_path', ''), dso_object_type)) try: plugins_obj.classified() plugins_information = plugins_obj.get_information() detail_parser = CDetailParserMng.give_me_parser( CUtils.dict_value_by_name( plugins_information, plugins_obj.Plugins_Info_DetailEngine, None), dso_id, dso_object_name, file_info_obj, plugins_obj.object_detail_file_full_name_list()) process_result = plugins_obj.parser_detail(detail_parser) if not CResult.result_success(process_result): self.db_update_object_status(dso_id, process_result) return process_result process_result = self.object_copy_stat( ds_file_info.value_by_name(0, 'query_object_storage_id', ''), dso_id, dso_object_name, ds_file_info.value_by_name(0, 'query_object_relation_path', '')) if CResult.result_success(process_result): # 更新父对象的容量和最后修改时间 self.__update_object_owner_object_size_and_modifytime( file_info_obj.owner_obj_id) result = CResult.merge_result( self.Success, '文件或目录[{0}]对象详情解析成功结束!'.format( ds_file_info.value_by_name( 0, 'query_object_relation_path', ''))) self.db_update_object_status(dso_id, result) return result else: self.db_update_object_status(dso_id, process_result) return process_result except Exception as error: result = CResult.merge_result( self.Failure, '文件或目录[{0}]对象详情解析过程出现错误! 错误原因为: {1}'.format( ds_file_info.value_by_name(0, 'query_object_relation_path', ''), error.__str__())) self.db_update_object_status(dso_id, result) return result