def get_sync_dict_list(self, insert_or_updata) -> list: """ insert_or_updata 指明配置的是更新还是插入,-1时为插入,0为更新 本方法的写法为强规则,调用add_value_to_sync_dict_list配置 第一个参数为list,第二个参数为字段名,第三个参数为字段值,第四个参数为特殊配置 """ sync_dict_list = self.get_sync_predefined_dict_list(insert_or_updata) object_table_data = self._dataset # 时间信息 dso_time = object_table_data.value_by_name(0, 'dso_time', '') dso_time_json = CJson() dso_time_json.load_obj(dso_time) self.add_value_to_sync_dict_list( sync_dict_list, 'begdate', dso_time_json.xpath_one('start_time', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'enddate', dso_time_json.xpath_one('end_time', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'imagedate', CUtils.to_day_format(dso_time_json.xpath_one('time', ''), dso_time_json.xpath_one('time', ''))) self.add_value_to_sync_dict_list( sync_dict_list, 'producetime', CUtils.to_day_format(dso_time_json.xpath_one('time', ''), dso_time_json.xpath_one('time', ''))) self.add_value_to_sync_dict_list( sync_dict_list, 'imagedatetag', self.transform_time_to_imagedatetag( CUtils.to_day_format(dso_time_json.xpath_one('time', ''), dso_time_json.xpath_one('time', '')))) if insert_or_updata: self.add_value_to_sync_dict_list(sync_dict_list, 'isdel', '1') return sync_dict_list
def test_get_attr(self): json = CJson() json.load_json_text(self.test_text) assert json.xpath_one('a', -1) == 1 assert json.xpath_one('aa', -1) == -1 assert json.xpath_one('student[0].name', '') == '小明' assert json.xpath_one('student[1].name', '') == '小王'
def create_view_json(self, params_json: CJson): """ 设置为一个参数,供进程调用 """ image_path = params_json.xpath_one('image_path', None) browse_full_path = params_json.xpath_one('browse_full_path', None) thumb_full_path = params_json.xpath_one('thumb_full_path', None) geotiff_full_path = params_json.xpath_one('geotiff_full_path', None) result = self.create_view(image_path, browse_full_path, thumb_full_path, geotiff_full_path) return result
def a_json_element(cls, audit_id, audit_title, audit_group, audit_result, json_obj: CJson, xpath: str, qa_items: dict) -> list: """ 判断一个json元数据中, 指定的jsonpath, 对应的element, 满足qa_items参数中的检测项目 :param audit_id: :param audit_title: :param audit_group: :param audit_result: :param json_obj: :param xpath: :param qa_items: :return: """ result_dict = cls.__init_audit_dict__(audit_id, audit_title, audit_group, audit_result) if json_obj is None: result_dict[cls.Name_Message] = 'Json对象不合法, 节点[{0}]不存在'.format( xpath) return [result_dict] json_value = json_obj.xpath_one(xpath, None) if json_value is not None: return cls.__a_check_value__(result_dict, json_value, '属性[{0}]'.format(audit_title), qa_items) else: result_dict[cls.Name_Message] = 'Json对象的节点[{0}]不存在, 请检查修正!'.format( xpath) return [result_dict]
def search(self, module_name: str, search_json_obj: CJson, other_option: dict = None) -> CDataSet: """ 根据搜索条件json, 检索符合要求的对象, 并以数据集的方式返回如下字段: 1. object_id 1. object_name 1. object_type 1. object_data_type 1. object_parent_id 1. object_size 1. object_lastmodifytime :param module_name: 模块名称 :param search_json_obj: :param other_option: :return: """ if search_json_obj is None: return CDataSet() params_search = dict() sql_from = '' sql_where = '' if (not CUtils.equal_ignore_case(module_name, self.ModuleName_MetaData)) and \ (not CUtils.equal_ignore_case(module_name, '')): # sql_where = "dm2_storage_object.dso_da_result#>>'{{{0},result}}'='pass'".format(module_name) sql_from = ', dm2_storage_obj_na ' sql_where = " dm2_storage_obj_na.dson_app_id = 'module_name' " condition_obj_access = search_json_obj.xpath_one(self.Name_Access, self.DataAccess_Pass) if not CUtils.equal_ignore_case(condition_obj_access, ''): condition = "dm2_storage_obj_na.dson_object_access = '{0}'".format(CUtils.any_2_str(condition_obj_access)) sql_where = CUtils.str_append(sql_where, condition, ' and ') condition_inbound_id = search_json_obj.xpath_one(self.Name_InBound, None) if not CUtils.equal_ignore_case(condition_inbound_id, ''): condition = "dm2_storage_obj.dso_ib_id = '{0}'".format(CUtils.any_2_str(condition_inbound_id)) sql_where = CUtils.str_append(sql_where, condition, ' and ') condition_tag = search_json_obj.xpath_one(self.Name_Tag, None) if condition_tag is not None: if isinstance(condition_tag, list): condition = CUtils.list_2_str(condition_tag, "'", ", ", "'", True) else: condition = CUtils.list_2_str([condition_tag], "'", ", ", "'", True) if not CUtils.equal_ignore_case(condition, ''): condition = 'dm2_storage_object.dsotags @ > array[{0}]:: CHARACTER VARYING[]'.format(condition) sql_where = CUtils.str_append(sql_where, condition, ' and ') condition_id = search_json_obj.xpath_one(self.Name_ID, None) if condition_id is not None: if isinstance(condition_id, list): condition = self.__condition_list_2_sql('dm2_storage_object_def.dsodid', condition_id, True) else: condition = self.__condition_value_like_2_sql('dm2_storage_object_def.dsodid', condition_id, True) sql_where = CUtils.str_append(sql_where, condition, ' and ') condition_name = search_json_obj.xpath_one(self.Name_Name, None) if condition_name is not None: if isinstance(condition_name, list): condition = self.__condition_list_2_sql('dm2_storage_object_def.dsodname', condition_name, True) else: condition = self.__condition_value_like_2_sql('dm2_storage_object_def.dsodname', condition_name, True) sql_where = CUtils.str_append(sql_where, condition, ' and ') condition_type = search_json_obj.xpath_one(self.Name_Type, None) if condition_type is not None: if isinstance(condition_type, list): condition = self.__condition_list_2_sql('dm2_storage_object_def.dsodtype', condition_type, True) else: condition = self.__condition_value_like_2_sql('dm2_storage_object_def.dsodtype', condition_type, True) sql_where = CUtils.str_append(sql_where, condition, ' and ') condition_group = search_json_obj.xpath_one(self.Name_Group, None) if condition_group is not None: if isinstance(condition_group, list): condition = self.__condition_list_2_sql('dm2_storage_object_def.dsodgroup', condition_group, True) else: condition = self.__condition_value_like_2_sql('dm2_storage_object_def.dsodgroup', condition_group, True) sql_where = CUtils.str_append(sql_where, condition, ' and ') if not CUtils.equal_ignore_case(sql_where, ''): sql_where = ' and {0}'.format(sql_where) sql_search = ''' select dm2_storage_object.dsoid as object_id , dm2_storage_object.dsoobjectname as object_name , dm2_storage_object.dsoobjecttype as object_type , dm2_storage_object.dsodatatype as object_data_type , dm2_storage_object.dsoparentobjid as object_parent_id , dm2_storage_object.dso_volumn_now as object_size , dm2_storage_object.dso_obj_lastmodifytime as object_lastmodifytime from dm2_storage_object, dm2_storage_object_def {0} where dm2_storage_object.dsoobjecttype = dm2_storage_object_def.dsodid and dm2_storage_object.dsoid = dm2_storage_obj_na.dson_object_id {1} '''.format(sql_from, sql_where) return CFactory().give_me_db(self.db_server_id).all_row(sql_search)
def test_load_obj(self): data = {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5} json_obj = CJson() json_obj.load_obj(data) assert json_obj.xpath_one('a', -1) == 1
assert json.xpath_one('aa', -1) == -1 assert json.xpath_one('student[0].name', '') == '小明' assert json.xpath_one('student[1].name', '') == '小王' def test_get_attr_by_class_method(self): value = CJson.json_attr_value(self.test_text, 'b', 1) assert value == 2 def test_get_chn_attr_by_class_method(self): value = CJson.json_attr_value(self.test_text, '中文属性', 1) assert value == 5 def test_load_obj(self): data = {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5} json_obj = CJson() json_obj.load_obj(data) assert json_obj.xpath_one('a', -1) == 1 if __name__ == '__main__': file_name = '/Users/wangxiya/Downloads/gdb.json' json = CJson() json.load_file(file_name) data_source = json.xpath_one('datasource', None) layers = json.xpath_one('layers', None) target_json = CJson() target_json.set_value_of_name('datasource', data_source) target_json.set_value_of_name('layer_count', 1) target_json.set_value_of_name('layers', [layers[0]]) print(target_json.to_json())
def process_mission(self, dataset) -> str: """ :param dataset: :return: """ ds_storage_id = dataset.value_by_name(0, 'query_storage_id', '') ds_storage_title = dataset.value_by_name(0, 'query_storage_title', '') ds_ib_id = dataset.value_by_name(0, 'query_ib_id', '') ds_ib_directory_name = dataset.value_by_name(0, 'query_ib_relation_dir', '') ds_ib_batch_no = dataset.value_by_name(0, 'query_ib_batchno', '') ds_ib_option = CUtils.any_2_str( dataset.value_by_name(0, 'query_ib_option', '')) CLogger().debug('与第三方模块同步的目录为: {0}.{1}'.format(ds_ib_id, ds_ib_directory_name)) data_count = 0 try: module_name_list = CJson.json_attr_value( ds_ib_option, self.Path_IB_Opt_Notify_module, None) if module_name_list is None: modules_root_dir = CSys.get_metadata_data_access_modules_root_dir( ) module_file_list = CFile.file_or_subpath_of_path( modules_root_dir, '{0}_*.{1}'.format(self.Name_Module, self.FileExt_Py)) module_name_list = list() for module_file in module_file_list: module_name_list.append(CFile.file_main_name(module_file)) sql_ib_need_notify_object = ''' select dsoid, dsoobjecttype, dsoobjectname, dso_da_result from dm2_storage_object where dso_ib_id = :ib_id ''' dataset = CFactory().give_me_db(self.get_mission_db_id()).all_row( sql_ib_need_notify_object, {'ib_id': ds_ib_id}) if dataset.is_empty(): result = CResult.merge_result( self.Success, '存储[{0}]下, 批次为[{1}]的目录[{2}]下无任何对象, 不再通知给第三方应用!'.format( ds_storage_title, ds_ib_batch_no, ds_ib_directory_name)) self.update_notify_result(ds_ib_id, result) return result CLogger().debug( '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象等待通知给第三方应用!'.format( ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, dataset.size())) data_count = dataset.size() error_message = '' for data_index in range(data_count): record_object = dataset.record(data_index) object_id = CUtils.dict_value_by_name(record_object, 'dsoid', '') object_type = CUtils.dict_value_by_name( record_object, 'dsoobjecttype', '') object_name = CUtils.dict_value_by_name( record_object, 'dsoobjectname', '') object_da_result_text = CUtils.any_2_str( CUtils.dict_value_by_name(record_object, 'dso_da_result', '')) object_da_result = CJson() object_da_result.load_json_text(object_da_result_text) for module_name in module_name_list: module_obj = CObject.create_module_instance( CSys.get_metadata_data_access_modules_root_name(), module_name, self.get_mission_db_id()) module_id = module_name module_title = CUtils.dict_value_by_name( module_obj.information(), self.Name_Title, '') module_enable = CUtils.dict_value_by_name( module_obj.information(), self.Name_Enable, True) if not module_enable: continue module_access = object_da_result.xpath_one( '{0}.{1}'.format(module_id, self.Name_Result), self.DataAccess_Forbid) module_access_memo = object_da_result.xpath_one( '{0}.{1}'.format(module_id, self.Name_Message), '') CLogger().debug( '存储[{0}]下, 批次为[{1}]的目录[{2}]下的对象[{3}], 与模块[{4}]的访问权限为[{5}]!' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, object_name, module_title, module_access)) # todo(王西亚) 仔细考虑这里是否要放开, 是放开pass的, 还是放开pass和wait!!!!!! # if not \ # ( # CUtils.equal_ignore_case(module_access, self.DataAccess_Pass) # or CUtils.equal_ignore_case(module_access, self.DataAccess_Wait) # ): # continue result = module_obj.notify_object(ds_ib_id, module_access, module_access_memo, object_id, object_name, object_type, None) if not CResult.result_success(result): message = CResult.result_message(result) CLogger().debug( '存储[{0}]下, 批次为[{1}]的目录[{2}]下的对象[{3}], 与模块[{4}]的通知处理结果出现错误, 详细情况: [{5}]!' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, object_name, module_title, message)) error_message = CUtils.str_append( error_message, message) if CUtils.equal_ignore_case(error_message, ''): result = CResult.merge_result( self.Success, '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象成功通知给第三方应用!'.format( ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, data_count)) self.update_notify_result(ds_ib_id, result) return result else: result = CResult.merge_result( self.Failure, '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象在通知给第三方应用时, 部分出现错误! 错误信息如下: \n{4}' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, data_count, error_message)) self.update_notify_result(ds_ib_id, result) return result except Exception as error: result = CResult.merge_result( self.Failure, '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象通知给第三方应用时出现异常! 错误原因为: {4}!' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, data_count, error.__str__())) self.update_notify_result(ds_ib_id, result) return result
def get_sync_mdb_dict_list(self, insert_or_updata) -> list: """ insert_or_updata 中 self.DB_True为insert,DB_False为updata 本方法的写法为强规则,调用add_value_to_sync_dict_list配置 第一个参数为list,第二个参数为字段名,第三个参数为字段值,第四个参数为特殊配置 """ object_id = self._obj_id object_name = self._obj_name dsometadataxml_bus = self._dataset.value_by_name( 0, 'dsometadataxml_bus', '') dso_time = self._dataset.value_by_name(0, 'dso_time', '') dso_time_json = CJson() # 时间数据json dso_time_json.load_obj(dso_time) metadataxml_bus_xml = CXml() # 业务元数据xml metadataxml_bus_xml.load_xml(dsometadataxml_bus) sync_dict_list = self.get_sync_predefined_dict_list(insert_or_updata) self.add_value_to_sync_dict_list(sync_dict_list, 'aprsdid', object_id) self.add_value_to_sync_dict_list( sync_dict_list, 'aprswid', self._dataset.value_by_name(0, 'dsoparentobjid', '')) # sync_dict['fname'] #为空 self.add_value_to_sync_dict_list( sync_dict_list, 'fno', metadataxml_bus_xml.get_element_text_by_xpath_one( "//item[@name='th']")) ''' object_name[0:1] 100万图幅行号为字母 object_name[1:3] 100万图幅列号为数字 object_name[3:4] 比例尺代码为字母 object_name[4:7] 图幅行号为数字 object_name[7:10] 图幅列号为数字 ''' if CUtils.text_is_alpha(object_name[0:1]): self.add_value_to_sync_dict_list(sync_dict_list, 'hrowno', object_name[0:1]) self.add_value_to_sync_dict_list(sync_dict_list, 'hcolno', object_name[1:3]) self.add_value_to_sync_dict_list(sync_dict_list, 'scalecode', object_name[3:4]) self.add_value_to_sync_dict_list(sync_dict_list, 'rowno', object_name[4:7]) self.add_value_to_sync_dict_list(sync_dict_list, 'colno', object_name[7:10]) # sync_dict['expandextent'] # 为空 # sync_dict['pupdatedate'] # 为空 # sync_dict['pversion'] # 为空 # sync_dict['publishdate'] # 为空 self.add_value_to_sync_dict_list( sync_dict_list, 'dataformat', metadataxml_bus_xml.get_element_text_by_xpath_one( "//item[@name='sjgs']")) # sync_dict['maindatasource'] # 为空 self.add_value_to_sync_dict_list( sync_dict_list, 'dsometadatajson', self._dataset.value_by_name(0, 'dsometadataxml_bus', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'createrorganize', metadataxml_bus_xml.get_element_text_by_xpath_one( "//item[@name='sjscdwm']")) self.add_value_to_sync_dict_list( sync_dict_list, 'submitorganize', metadataxml_bus_xml.get_element_text_by_xpath_one( "//item[@name='sjbqdwm']")) self.add_value_to_sync_dict_list( sync_dict_list, 'copyrightorgnize', metadataxml_bus_xml.get_element_text_by_xpath_one( "//item[@name='sjcbdwm']")) # sync_dict['supplyorganize'] # 为空 self.add_value_to_sync_dict_list( sync_dict_list, 'colormodel', metadataxml_bus_xml.get_element_text_by_xpath_one( "//item[@name='yxscms']")) self.add_value_to_sync_dict_list( sync_dict_list, 'piexldepth', metadataxml_bus_xml.get_element_text_by_xpath_one( "//item[@name='xsws']")) # sync_dict['scale'] # 为空 self.add_value_to_sync_dict_list( sync_dict_list, 'mainrssource', metadataxml_bus_xml.get_element_text_by_xpath_one( "//item[@name='wxmc']")) # 插件处理字段 self.add_value_to_sync_dict_list(sync_dict_list, 'datacount', 1) self.add_value_to_sync_dict_list( sync_dict_list, 'secrecylevel', metadataxml_bus_xml.get_element_text_by_xpath_one( "//item[@name='mj']")) # sync_dict['regioncode'] # 为空 # sync_dict['regionname'] # 为空 self.add_value_to_sync_dict_list( sync_dict_list, 'resolution', metadataxml_bus_xml.get_element_text_by_xpath_one( "//item[@name='dmfbl']")) self.add_value_to_sync_dict_list( sync_dict_list, 'imagedate', CUtils.to_day_format(dso_time_json.xpath_one('time', ''), dso_time_json.xpath_one('time', ''))) self.add_value_to_sync_dict_list( sync_dict_list, 'begdate', CUtils.to_day_format(dso_time_json.xpath_one('start_time', ''), dso_time_json.xpath_one('start_time', ''))) self.add_value_to_sync_dict_list( sync_dict_list, 'enddate', CUtils.to_day_format(dso_time_json.xpath_one('end_time', ''), dso_time_json.xpath_one('end_time', ''))) return sync_dict_list
def access(self) -> str: try: # quality_info_xml = self._quality_info # 获取质检xml quality_summary = self._dataset.value_by_name( 0, 'dso_quality_summary', '') quality_summary_json = CJson() quality_summary_json.load_obj(quality_summary) access_wait_flag = self.DB_False # 定义等待标志,为True则存在检查项目为等待 access_forbid_flag = self.DB_False # 定义禁止标志,为True则存在检查项目为禁止 message = '' # 文件与影像质检部分 file_qa = quality_summary_json.xpath_one('total', '') image_qa = quality_summary_json.xpath_one('metadata.data', '') business_qa = quality_summary_json.xpath_one( 'metadata.business', '') if CUtils.equal_ignore_case(file_qa, self.QA_Result_Error) \ or CUtils.equal_ignore_case(image_qa, self.QA_Result_Error) \ or CUtils.equal_ignore_case(business_qa, self.QA_Result_Error): message = message + '[数据与其相关文件的质检存在error!请进行修正!]' access_forbid_flag = self.DB_True elif CUtils.equal_ignore_case(file_qa, self.QA_Result_Warn) \ or CUtils.equal_ignore_case(image_qa, self.QA_Result_Warn) \ or CUtils.equal_ignore_case(business_qa, self.QA_Result_Warn): message = message + '[数据与其相关文件的质检存在warn!请进行检查!]' access_wait_flag = self.DB_True else: pass # 数据库部分 access_wait_flag, access_forbid_flag, message = \ self.db_access_check(access_wait_flag, access_forbid_flag, message) # 开始进行检查的结果判断 access_flag = self.DataAccess_Pass if access_forbid_flag: access_flag = self.DataAccess_Forbid elif access_wait_flag: access_flag = self.DataAccess_Wait if CUtils.equal_ignore_case(message, ''): message = '模块可以进行访问!' result = CResult.merge_result( self.Success, '模块[{0}.{1}]对对象[{2}]的访问能力已经分析完毕!分析结果为:{3}'.format( CUtils.dict_value_by_name(self.information(), self.Name_ID, ''), CUtils.dict_value_by_name(self.information(), self.Name_Title, ''), self._obj_name, message)) return CResult.merge_result_info(result, self.Name_Access, access_flag) except Exception as error: result = CResult.merge_result( self.Failure, '模块[{0}.{1}]对对象[{2}]的访问能力的分析存在异常!详细情况: {3}!'.format( CUtils.dict_value_by_name(self.information(), self.Name_ID, ''), CUtils.dict_value_by_name(self.information(), self.Name_Title, ''), self._obj_name, error.__str__())) return CResult.merge_result_info(result, self.Name_Access, self.DataAccess_Forbid)
def process_main_table(self): object_table_id = self._obj_id # 获取oid object_table_data = self._dataset metadata_bus_dict = self.get_metadata_bus_dict() main_table_name = CUtils.dict_value_by_name(self.information(), 'main_table_name', 'ap_product') main_table = CTable() main_table.load_info(self._db_id, main_table_name) main_table.column_list.column_by_name('id').set_value(object_table_id) productname = CUtils.dict_value_by_name(metadata_bus_dict, 'productname', None) if CUtils.equal_ignore_case(productname, ''): productname = object_table_data.value_by_name( 0, 'dsoobjectname', None) main_table.column_list.column_by_name('productname').set_value( productname) main_table.column_list.column_by_name('producttype').set_value( CUtils.dict_value_by_name(metadata_bus_dict, 'producttype', None)) main_table.column_list.column_by_name('regioncode').set_null() main_table.column_list.column_by_name('productattribute').set_value( CUtils.dict_value_by_name(metadata_bus_dict, 'productattribute', None)) centerlatitude = CUtils.dict_value_by_name(metadata_bus_dict, 'centerlatitude', None) centerlongitude = CUtils.dict_value_by_name(metadata_bus_dict, 'centerlongitude', None) centerlonlat = '{0},{1}'.format(centerlongitude, centerlatitude) main_table.column_list.column_by_name('centerlonlat').set_value( centerlonlat) main_table.column_list.column_by_name('geomwkt').set_sql(''' st_astext( (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}') ) '''.format(object_table_id)) main_table.column_list.column_by_name('geomobj').set_sql(''' (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}') '''.format(object_table_id)) main_table.column_list.column_by_name('browserimg').set_value( '{0}{1}'.format( CFile.sep(), object_table_data.value_by_name(0, 'dso_browser', None))) main_table.column_list.column_by_name('thumbimg').set_value( '{0}{1}'.format( CFile.sep(), object_table_data.value_by_name(0, 'dso_thumb', None))) main_table.column_list.column_by_name('publishdate').set_value( CUtils.dict_value_by_name(metadata_bus_dict, 'publishdate', None)) main_table.column_list.column_by_name('copyright').set_value( CUtils.dict_value_by_name(metadata_bus_dict, 'copyright', None)) dso_time = object_table_data.value_by_name(0, 'dso_time', None) dso_time_json = CJson() dso_time_json.load_obj(dso_time) imgdate = dso_time_json.xpath_one('time', None) if not CUtils.equal_ignore_case(imgdate, ''): main_table.column_list.column_by_name('imgdate').set_value( imgdate[0:4]) else: main_table.column_list.column_by_name('imgdate').set_null() main_table.column_list.column_by_name('starttime').set_value( dso_time_json.xpath_one('start_time', None)) main_table.column_list.column_by_name('endtime').set_value( dso_time_json.xpath_one('end_time', None)) resolution = CUtils.any_2_str( CUtils.dict_value_by_name(metadata_bus_dict, 'resolution', None)) if not CUtils.equal_ignore_case(resolution, ''): if '/' in resolution: resolution_list = resolution.split('/') temp_list = list() for resolution in resolution_list: temp_list.append(CUtils.to_decimal(resolution, -1)) temp_list = list(set(temp_list)) # 去重 if -1 in temp_list: temp_list.remove(-1) if len(temp_list) > 0: main_table.column_list.column_by_name( 'resolution').set_value(min(temp_list)) else: main_table.column_list.column_by_name( 'resolution').set_value(0) else: main_table.column_list.column_by_name('resolution').set_value( resolution) else: main_table.column_list.column_by_name('resolution').set_value(0) main_table.column_list.column_by_name('filesize').set_sql(''' (select sum(dodfilesize) from dm2_storage_obj_detail where dodobjectid='{0}') '''.format(object_table_id)) productid = CUtils.dict_value_by_name(metadata_bus_dict, 'productid', None) if CUtils.equal_ignore_case(productid, ''): object_type = object_table_data.value_by_name(0, 'dsodatatype', '') if CUtils.equal_ignore_case(object_type, self.Name_Dir): main_table.column_list.column_by_name('productid').set_value( object_table_data.value_by_name(0, 'dsoobjectname', None)) elif CUtils.equal_ignore_case(object_type, self.Name_File): main_table.column_list.column_by_name('productid').set_sql(''' (SELECT dsffilename FROM dm2_storage_file WHERE dsf_object_id = '{0}') '''.format(object_table_id)) else: main_table.column_list.column_by_name('productid').set_null() else: main_table.column_list.column_by_name('productid').set_value( productid) main_table.column_list.column_by_name('remark').set_value( CUtils.dict_value_by_name(metadata_bus_dict, 'remark', None)) main_table.column_list.column_by_name('extent').set_sql(''' (select dso_geo_bb_wgs84 from dm2_storage_object where dsoid='{0}') '''.format(object_table_id)) main_table.column_list.column_by_name('proj').set_null() # 原始数据保持空 main_table.column_list.column_by_name('dataid').set_value( object_table_id) main_table.column_list.column_by_name('shplog').set_null() if not main_table.if_exists(): now_time = CUtils.any_2_str( datetime.datetime.now().strftime('%F %T')) main_table.column_list.column_by_name('addtime').set_value( now_time) main_table.column_list.column_by_name('isdel').set_value(0) main_table.column_list.column_by_name('projectnames').set_value( 'productname') result = main_table.save_data() return result
def process_mission(self, dataset) -> str: """ 详细算法复杂, 参见readme.md中[##### 服务发布调度]章节 todo(张雄雄) 开始开发服务发布框架 :param dataset: :return: """ deploy_id = dataset.value_by_name(0, 'dpid', '') deploy_s_title = dataset.value_by_name(0, 'dptitle', '') deploy_s_name = dataset.value_by_name(0, 'dpname', '') CLogger().debug('即将发布服务为: {0}.{1}.{2}'.format(deploy_id, deploy_s_name, deploy_s_title)) try: # 获取服务设置 serdef = ServiceDef(deploy_s_name, deploy_s_title) service_params = dataset.value_by_name(0, 'dpserviceparams', '') if service_params == '' or service_params == None: raise Exception("缺少服务参数") service_params_json = CJson() service_params_json.load_json_text(service_params) serdef.coordinates = service_params_json.xpath_one( "coordinates", ['EPSG:4326']) serdef.cache = service_params_json.xpath_one("cache", False) serdef.cachelevel = service_params_json.xpath_one("cachelevel", 16) # 获取相关图层设置 factory = CFactory() db = factory.give_me_db(self.get_mission_db_id()) layer_rows = db.all_row( "select dpid,dplayer_id,dplayer_name,dplayer_datatype,dplayer_queryable," "dplayer_resultfields,dplayer_style from dp_v_qfg_layer where dpservice_id = '{0}'" .format(deploy_id)) # for row in layer_rows: for i in range(layer_rows.size()): row = layer_rows.record(i) ser_lyrdef = LayerDef() ser_lyrdef.id = row[1] ser_lyrdef.name = row[2] ser_lyrdef.type = row[3] ser_lyrdef.sourcetype = 'File' ser_lyrdef.classidetify = row[6] layer_file_rows = db.all_row( "select dpdf_group_id,dpdf_object_fullname from dp_v_qfg_layer_file where dpdf_layer_id = '{0}'" .format(row[0])) # for file_row in layer_file_rows: for k in range(layer_file_rows.size()): file_row = layer_file_rows.record(k) if file_row[0] not in ser_lyrdef.sourcepath: ser_lyrdef.sourcepath[file_row[0]] = [] ser_lyrdef.sourcepath[file_row[0]].append(file_row[1]) serdef.layers.append(ser_lyrdef) ProcessService(serdef) result = CResult.merge_result( self.Success, '服务: {0}.{1}.{2}发布成功'.format(deploy_id, deploy_s_name, deploy_s_title)) self.update_deploy_result(deploy_id, result) return result except Exception as error: result = CResult.merge_result( self.Failure, '服务: {0}.{1}.{2}发布失败, 错误原因为: {3}'.format( deploy_id, deploy_s_name, deploy_s_title, error.__str__())) self.update_deploy_result(deploy_id, result) return result
def get_sync_xml_dict_list(self, insert_or_updata) -> list: """ insert_or_updata 中 self.DB_True为insert,DB_False为updata 本方法的写法为强规则,调用add_value_to_sync_dict_list配置 第一个参数为list,第二个参数为字段名,第三个参数为字段值,第四个参数为特殊配置 """ object_id = self._obj_id object_name = self._obj_name dsometadataxml_bus = self._dataset.value_by_name( 0, 'dsometadataxml_bus', '') dso_time = self._dataset.value_by_name(0, 'dso_time', '') dso_time_json = CJson() # 时间数据json dso_time_json.load_obj(dso_time) metadataxml_bus_xml = CXml() # 业务元数据xml metadataxml_bus_xml.load_xml(dsometadataxml_bus) sync_dict_list = self.get_sync_predefined_dict_list(insert_or_updata) self.add_value_to_sync_dict_list(sync_dict_list, 'aprndid', object_id) self.add_value_to_sync_dict_list( sync_dict_list, 'aprnwid', self._dataset.value_by_name(0, 'dsoparentobjid', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'dataformat', self._dataset.value_by_name(0, 'dsodatatype', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'projinfo', self._dataset.value_by_name(0, 'dso_prj_project', '')) # sync_dict_list, 'createrorganize' # 为空 # sync_dict_list, 'submitorganize' # 为空 # sync_dict_list, 'copyrightorgnize' # 为空 # sync_dict_list, 'supplyorganize' # 为空 self.add_value_to_sync_dict_list(sync_dict_list, 'metafilename', '{0}_21at.xml'.format(object_name)) # sync_dict_list, 'networksize' # 为空 # sync_dict_list, 'zonetype' # 为空 # sync_dict_list, 'centerline' # 为空 # sync_dict_list, 'zoneno' # 为空 # sync_dict_list, 'coordinateunit' # 为空 # sync_dict_list, 'demname' # 为空 # sync_dict_list, 'demstandard' # 为空 self.add_value_to_sync_dict_list( sync_dict_list, 'dsometadatajson', self._dataset.value_by_name(0, 'dsometadataxml_bus', '')) # 插件处理字段 self.add_value_to_sync_dict_list(sync_dict_list, 'datacount', 1) # sync_dict_list, 'secrecylevel' # 为空 # sync_dict['regioncode'] # 为空 # sync_dict['regionname'] # 为空 # sync_dict_list, 'resolution' # 为空 self.add_value_to_sync_dict_list( sync_dict_list, 'imagedate', CUtils.to_day_format(dso_time_json.xpath_one('time', ''), dso_time_json.xpath_one('time', ''))) self.add_value_to_sync_dict_list( sync_dict_list, 'begdate', CUtils.to_day_format(dso_time_json.xpath_one('start_time', ''), dso_time_json.xpath_one('start_time', ''))) self.add_value_to_sync_dict_list( sync_dict_list, 'enddate', CUtils.to_day_format(dso_time_json.xpath_one('end_time', ''), dso_time_json.xpath_one('end_time', ''))) return sync_dict_list
def execute(self) -> str: inbound_storage_list = CFactory().give_me_db(self.get_mission_db_id()).all_row( ''' select dstid, dsttitle, dstwatchoption, dstscanlasttime from dm2_storage where dstwatch = {0} and dsttype = '{1}' and dstscanstatus = {2} '''.format(self.DB_True, self.Storage_Type_InBound, self.ProcStatus_Finished) ) if inbound_storage_list.is_empty(): return CResult.merge_result(CResult.Success, '本次没有需要检查的定时任务!') for data_index in range(inbound_storage_list.size()): storage_id = inbound_storage_list.value_by_name(data_index, 'dstid', '') storage_title = inbound_storage_list.value_by_name(data_index, 'dsttitle', '') storage_scan_last_time = inbound_storage_list.value_by_name(data_index, 'dstscanlasttime', None) storage_scan_option = CUtils.any_2_str( inbound_storage_list.value_by_name(data_index, 'dstwatchoption', None)) CLogger().debug('正在检查存储[{0}]的定时器, 分析目前是否需要启动扫描...'.format(storage_title)) try: # 如果最后一次扫描时间为空, 则立即启动扫描 if CUtils.equal_ignore_case(storage_scan_last_time, ''): self.start_storage_scan_immediately(storage_id) else: # 扫描周期 json_storage_scan_option = CJson() json_storage_scan_option.load_json_text(storage_scan_option) storage_scan_period_type = json_storage_scan_option.xpath_one( self.Name_Period, self.Scan_Period_Hour ) if CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Minute): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Minute, 15) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} minute') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) elif CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Hour): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Hour, 1) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} hour') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) elif CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Day): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Day, 1) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} day') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) elif CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Week): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Week, 1) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} week') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) elif CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Month): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Month, 1) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} month') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) else: # CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Year): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Year, 1) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} year') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) if over_time >= 0: CLogger().debug('存储[{0}]的定时器将启动...'.format(storage_title)) self.start_storage_scan_immediately(storage_id) except Exception as error: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage set dstlastmodifytime=now() , dstscanmemo=:message where dstid = :storage_id ''', {'storage_id': storage_id, 'message': '系统分析定时扫描条件过程中发现错误, 详细信息为: {0}!'.format(error.__str__())} ) continue return CResult.merge_result(self.Success, '本次分析定时扫描任务成功结束!')
def _do_access(self) -> str: try: quality_info_xml = self._quality_info # 获取质检xml quality_summary = self._dataset.value_by_name(0, 'dso_quality_summary', '') quality_summary_json = CJson() quality_summary_json.load_obj(quality_summary) access_wait_flag = self.DB_False # 定义等待标志,为True则存在检查项目为等待 access_forbid_flag = self.DB_False # 定义禁止标志,为True则存在检查项目为禁止 message = '' # 文件与影像质检部分 file_qa = quality_summary_json.xpath_one('total', '') image_qa = quality_summary_json.xpath_one('metadata.data', '') if CUtils.equal_ignore_case(file_qa, self.QA_Result_Error) \ or CUtils.equal_ignore_case(image_qa, self.QA_Result_Error): message = message + '[数据与其相关文件的质检存在error!请进行修正!]' access_forbid_flag = self.DB_True elif CUtils.equal_ignore_case(file_qa, self.QA_Result_Warn) \ or CUtils.equal_ignore_case(image_qa, self.QA_Result_Warn): message = message + '[数据与其相关文件的质检存在warn!请进行检查!]' access_wait_flag = self.DB_True elif CUtils.equal_ignore_case(quality_summary, ''): message = message + '[数据质检未进行,可能数据存在问题!请进行检查!]' access_forbid_flag = self.DB_True else: pass for qa_name, qa_id in self.access_check_dict().items(): # 循环写好的检查列表 # qa_id = CUtils.dict_value_by_name(access_check_dict, 'qa_id', '') # 获取id qa_node = quality_info_xml.xpath_one("//item[@id='{0}']".format(qa_id)) # 查询xml中的节点 if qa_node is not None: node_result = CXml.get_attr(qa_node, self.Name_Result, '', False) # 获取质检结果 if CUtils.equal_ignore_case(node_result, self.QA_Result_Pass): pass elif CUtils.equal_ignore_case(node_result, self.QA_Result_Warn): # 警告则等待 message = message + '[业务元数据的质检中,项目{0}不符合要求,建议修正!]'.format(qa_name) access_wait_flag = self.DB_True else: # 错误以及其他情况,比如'',或者为其他字段 message = message + '[业务元数据的质检中,项目{0}不符合要求,必须修改后方可入库!]'.format(qa_name) access_forbid_flag = self.DB_True else: message = message + '[业务元数据的质检中,没有项目{0},请进行修正!]'.format(qa_name) access_forbid_flag = self.DB_True # 数据库部分 access_wait_flag, access_forbid_flag, message = \ self.db_access_check(access_wait_flag, access_forbid_flag, message) # 开始进行检查的结果判断 access_flag = self.DataAccess_Pass if access_forbid_flag: access_flag = self.DataAccess_Forbid elif access_wait_flag: access_flag = self.DataAccess_Wait if CUtils.equal_ignore_case(message, ''): message = '模块可以进行访问!' result = CResult.merge_result( self.Success, '模块[{0}.{1}]对对象[{2}]的访问能力已经分析完毕!分析结果为:{3}'.format( CUtils.dict_value_by_name(self.information(), self.Name_ID, ''), CUtils.dict_value_by_name(self.information(), self.Name_Title, ''), self._obj_name, message ) ) return CResult.merge_result_info(result, self.Name_Access, access_flag) except Exception as error: result = CResult.merge_result( self.Failure, '模块[{0}.{1}]对对象[{2}]的访问能力的分析存在异常!详细情况: {3}!'.format( CUtils.dict_value_by_name(self.information(), self.Name_ID, ''), CUtils.dict_value_by_name(self.information(), self.Name_Title, ''), self._obj_name, error.__str__() ) ) return CResult.merge_result_info(result, self.Name_Access, self.DataAccess_Forbid)
def get_sync_predefined_dict_list(self, insert_or_updata) -> list: """ insert_or_updata 指明配置的是更新还是插入,-1时为插入,0为更新 本方法的写法为强规则,调用add_value_to_sync_dict_list配置 第一个参数为list,第二个参数为字段名,第三个参数为字段值,第四个参数为特殊配置 本方法处理公共部分 datacount:数据量 secrecylevel:密级 regioncode:行政区码 regionname:行政区 resolution:分辨率 colormodel:色彩模式 piexldepth:像素位数 scale:比例尺分母 mainrssource:主要星源 交插件去处理 """ sync_dict_list = list() object_table_id = self._obj_id # 获取oid object_table_data = self._dataset self.add_value_to_sync_dict_list(sync_dict_list, 'aprid', object_table_id) self.add_value_to_sync_dict_list( sync_dict_list, 'productname', object_table_data.value_by_name(0, 'dsoobjectname', '')) self.add_value_to_sync_dict_list(sync_dict_list, 'producttype', self._obj_type_code) self.add_value_to_sync_dict_list( sync_dict_list, 'dsodatatype', object_table_data.value_by_name(0, 'dsodatatype', '')) dso_time = object_table_data.value_by_name(0, 'dso_time', '') dso_time_json = CJson() dso_time_json.load_obj(dso_time) self.add_value_to_sync_dict_list( sync_dict_list, 'begdate', dso_time_json.xpath_one('start_time', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'enddate', dso_time_json.xpath_one('end_time', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'imagedate', CUtils.to_day_format(dso_time_json.xpath_one('time', ''), dso_time_json.xpath_one('time', ''))) # datacount:数据数量 # secrecylevel:密级 # regioncode:行政区码 # regionname:行政区 上面四个字段交插件处理 self.add_value_to_sync_dict_list( # 配置子查询,调用函数 sync_dict_list, 'centerx', ''' (select st_x(st_centroid( (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}') ))::decimal(8, 2)) '''.format(object_table_id), self.DataValueType_SQL) self.add_value_to_sync_dict_list( sync_dict_list, 'centery', ''' (select st_y(st_centroid( (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}') ))::decimal(8, 2)) '''.format(object_table_id), self.DataValueType_SQL) self.add_value_to_sync_dict_list( sync_dict_list, 'geomwkt', ''' st_astext( (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}') ) '''.format(object_table_id), self.DataValueType_SQL) self.add_value_to_sync_dict_list( sync_dict_list, 'geomobj', ''' (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}') '''.format(object_table_id), self.DataValueType_SQL) self.add_value_to_sync_dict_list( sync_dict_list, 'browserimg', object_table_data.value_by_name(0, 'dso_browser', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'thumbimg', object_table_data.value_by_name(0, 'dso_thumb', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'producetime', CUtils.to_day_format(dso_time_json.xpath_one('time', ''), dso_time_json.xpath_one('time', ''))) # resolution:分辨率,交插件处理 self.add_value_to_sync_dict_list( sync_dict_list, 'imgsize', ''' (select round((sum(dodfilesize)/1048576),2) from dm2_storage_obj_detail where dodobjectid='{0}') '''.format(object_table_id), self.DataValueType_SQL) # colormodel:交插件处理 # piexldepth:交插件处理 if insert_or_updata: self.add_value_to_sync_dict_list(sync_dict_list, 'isdel', '0') now_time = CUtils.any_2_str( datetime.datetime.now().strftime('%F %T')) self.add_value_to_sync_dict_list(sync_dict_list, 'addtime', now_time) self.add_value_to_sync_dict_list( sync_dict_list, 'extent', "(select dso_geo_bb_wgs84 from dm2_storage_object where dsoid='{0}')" .format(object_table_id), self.DataValueType_SQL) self.add_value_to_sync_dict_list( sync_dict_list, 'proj', object_table_data.value_by_name(0, 'dso_prj_wkt', '')) # remark:暂时为空 # ispublishservice:暂时为空 self.add_value_to_sync_dict_list(sync_dict_list, 'queryable', '0') # scale:交插件处理 # mainrssource:交插件处理 self.add_value_to_sync_dict_list( sync_dict_list, 'dsdid', object_table_data.value_by_name(0, 'query_directory_id', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'dsfid', object_table_data.value_by_name(0, 'query_file_id', '')) self.add_value_to_sync_dict_list( sync_dict_list, 'imagedatetag', self.transform_time_to_imagedatetag( CUtils.to_day_format(dso_time_json.xpath_one('time', ''), dso_time_json.xpath_one('time', '')))) return sync_dict_list
def process_mission(self, dataset): dso_id = dataset.value_by_name(0, 'dsoid', '') dso_data_type = dataset.value_by_name(0, 'dsodatatype', '') dso_object_type = dataset.value_by_name(0, 'dsoobjecttype', '') dso_object_name = dataset.value_by_name(0, 'dsoobjectname', '') dso_object_da_content = CUtils.any_2_str(dataset.value_by_name(0, 'dso_da_result', '')) dso_object_quality = dataset.value_by_name(0, 'dso_quality', '') ds_retry_times = dataset.value_by_name(0, 'retry_times', 0) if ds_retry_times >= self.abnormal_job_retry_times(): ds_last_process_memo = CUtils.any_2_str(dataset.value_by_name(0, 'last_process_memo', None)) process_result = CResult.merge_result( self.Failure, '{0}, \n系统已经重试{1}次, 仍然未能解决, 请人工检查修正后重试!'.format( ds_last_process_memo, ds_retry_times ) ) self.update_status(dso_id, None, process_result, self.ProcStatus_Error) return process_result dso_quality = CXml() dso_quality.load_xml(dso_object_quality) dso_da_json = CJson() dso_da_json.load_json_text(dso_object_da_content) CLogger().debug( '开始处理对象: {0}.{1}.{2}.{3}对各个子系统的支撑能力'.format(dso_id, dso_data_type, dso_object_type, dso_object_name)) try: modules_root_dir = CSys.get_metadata_data_access_modules_root_dir() modules_file_list = CFile.file_or_subpath_of_path( modules_root_dir, '{0}_*.{1}'.format(self.Name_Module, self.FileExt_Py) ) for file_name_without_path in modules_file_list: file_main_name = CFile.file_main_name(file_name_without_path) # 判断模块的可访问是否已经被人工审批, 如果人工审批, 则这里不再计算和覆盖 module_access = dso_da_json.xpath_one( '{0}.{1}'.format(file_main_name, self.Name_Audit), self.Name_System ) if CUtils.equal_ignore_case(module_access, self.Name_User): continue try: module_obj = CObject.create_module_instance( CSys.get_metadata_data_access_modules_root_name(), file_main_name, self.get_mission_db_id() ) module_title = CUtils.dict_value_by_name(module_obj.information(), self.Name_Title, '') result = CUtils.any_2_str(module_obj.access(dso_id, dso_object_name, dso_data_type, dso_quality) ) if CResult.result_success(result): module_access = CResult.result_info(result, self.Name_Access, self.DataAccess_Forbid) else: CLogger().debug('模块[{0}]解析出现错误, 系统将忽略本模块, 继续处理下一个!'.format(file_main_name)) module_access = self.DataAccess_Unknown module_access_message = CResult.result_message(result) module_obj = {self.Name_Audit: self.Name_System, self.Name_Result: module_access, self.Name_Title: module_title, self.Name_Message: module_access_message} dso_da_json.set_value_of_name(file_main_name, module_obj) except Exception as error: CLogger().debug('模块[{0}]解析出现异常, 原因为[{1}], 请检查!'.format(file_main_name, error.__str__())) module_access = self.DataAccess_Unknown module_access_message = '模块[{0}]解析出现异常, 原因为[{1}], 请检查!'.format(file_main_name, error.__str__()) module_obj = {self.Name_Audit: self.Name_System, self.Name_Result: module_access, self.Name_Title: file_main_name, self.Name_Message: module_access_message} dso_da_json.set_value_of_name(file_main_name, module_obj) process_result = CResult.merge_result( self.Success, '对象[{0}.{1}]访问权限解析成功!'.format(dso_id, dso_object_name) ) self.update_status( dso_id, dso_da_json.to_json(), process_result ) return process_result except Exception as error: process_result = CResult.merge_result( self.Failure, '对象[{0}.{1}]访问权限解析出错, 原因为[{2}]!'.format(dso_id, dso_object_name, error.__str__()) ) self.update_status(dso_id, None, process_result) return process_result