def test_get_attr(self): json = CJson() json.load_json_text(self.test_text) assert json.xpath_one('a', -1) == 1 assert json.xpath_one('aa', -1) == -1 assert json.xpath_one('student[0].name', '') == '小明' assert json.xpath_one('student[1].name', '') == '小王'
def merge_result(cls, result, message=None, base=None) -> str: new_result = CJson() if base is not None: new_result.load_json_text(base) new_result.set_value_of_name(cls.Name_Result, result) if message is not None: new_result.set_value_of_name(cls.Name_Message, message) return new_result.to_json()
def test_load_text(self): json = CJson() json.load_json_text(self.test_text) assert True
def process_mission(self, dataset): dso_id = dataset.value_by_name(0, 'dsoid', '') dso_data_type = dataset.value_by_name(0, 'dsodatatype', '') dso_object_type = dataset.value_by_name(0, 'dsoobjecttype', '') dso_object_name = dataset.value_by_name(0, 'dsoobjectname', '') dso_object_da_content = CUtils.any_2_str(dataset.value_by_name(0, 'dso_da_result', '')) dso_object_quality = dataset.value_by_name(0, 'dso_quality', '') ds_retry_times = dataset.value_by_name(0, 'retry_times', 0) if ds_retry_times >= self.abnormal_job_retry_times(): ds_last_process_memo = CUtils.any_2_str(dataset.value_by_name(0, 'last_process_memo', None)) process_result = CResult.merge_result( self.Failure, '{0}, \n系统已经重试{1}次, 仍然未能解决, 请人工检查修正后重试!'.format( ds_last_process_memo, ds_retry_times ) ) self.update_status(dso_id, None, process_result, self.ProcStatus_Error) return process_result dso_quality = CXml() dso_quality.load_xml(dso_object_quality) dso_da_json = CJson() dso_da_json.load_json_text(dso_object_da_content) CLogger().debug( '开始处理对象: {0}.{1}.{2}.{3}对各个子系统的支撑能力'.format(dso_id, dso_data_type, dso_object_type, dso_object_name)) try: modules_root_dir = CSys.get_metadata_data_access_modules_root_dir() modules_file_list = CFile.file_or_subpath_of_path( modules_root_dir, '{0}_*.{1}'.format(self.Name_Module, self.FileExt_Py) ) for file_name_without_path in modules_file_list: file_main_name = CFile.file_main_name(file_name_without_path) # 判断模块的可访问是否已经被人工审批, 如果人工审批, 则这里不再计算和覆盖 module_access = dso_da_json.xpath_one( '{0}.{1}'.format(file_main_name, self.Name_Audit), self.Name_System ) if CUtils.equal_ignore_case(module_access, self.Name_User): continue try: module_obj = CObject.create_module_instance( CSys.get_metadata_data_access_modules_root_name(), file_main_name, self.get_mission_db_id() ) module_title = CUtils.dict_value_by_name(module_obj.information(), self.Name_Title, '') result = CUtils.any_2_str(module_obj.access(dso_id, dso_object_name, dso_data_type, dso_quality) ) if CResult.result_success(result): module_access = CResult.result_info(result, self.Name_Access, self.DataAccess_Forbid) else: CLogger().debug('模块[{0}]解析出现错误, 系统将忽略本模块, 继续处理下一个!'.format(file_main_name)) module_access = self.DataAccess_Unknown module_access_message = CResult.result_message(result) module_obj = {self.Name_Audit: self.Name_System, self.Name_Result: module_access, self.Name_Title: module_title, self.Name_Message: module_access_message} dso_da_json.set_value_of_name(file_main_name, module_obj) except Exception as error: CLogger().debug('模块[{0}]解析出现异常, 原因为[{1}], 请检查!'.format(file_main_name, error.__str__())) module_access = self.DataAccess_Unknown module_access_message = '模块[{0}]解析出现异常, 原因为[{1}], 请检查!'.format(file_main_name, error.__str__()) module_obj = {self.Name_Audit: self.Name_System, self.Name_Result: module_access, self.Name_Title: file_main_name, self.Name_Message: module_access_message} dso_da_json.set_value_of_name(file_main_name, module_obj) process_result = CResult.merge_result( self.Success, '对象[{0}.{1}]访问权限解析成功!'.format(dso_id, dso_object_name) ) self.update_status( dso_id, dso_da_json.to_json(), process_result ) return process_result except Exception as error: process_result = CResult.merge_result( self.Failure, '对象[{0}.{1}]访问权限解析出错, 原因为[{2}]!'.format(dso_id, dso_object_name, error.__str__()) ) self.update_status(dso_id, None, process_result) return process_result
def process_mission(self, dataset) -> str: """ :param dataset: :return: """ ds_storage_id = dataset.value_by_name(0, 'query_storage_id', '') ds_storage_title = dataset.value_by_name(0, 'query_storage_title', '') ds_ib_id = dataset.value_by_name(0, 'query_ib_id', '') ds_ib_directory_name = dataset.value_by_name(0, 'query_ib_relation_dir', '') ds_ib_batch_no = dataset.value_by_name(0, 'query_ib_batchno', '') ds_ib_option = CUtils.any_2_str( dataset.value_by_name(0, 'query_ib_option', '')) CLogger().debug('与第三方模块同步的目录为: {0}.{1}'.format(ds_ib_id, ds_ib_directory_name)) data_count = 0 try: module_name_list = CJson.json_attr_value( ds_ib_option, self.Path_IB_Opt_Notify_module, None) if module_name_list is None: modules_root_dir = CSys.get_metadata_data_access_modules_root_dir( ) module_file_list = CFile.file_or_subpath_of_path( modules_root_dir, '{0}_*.{1}'.format(self.Name_Module, self.FileExt_Py)) module_name_list = list() for module_file in module_file_list: module_name_list.append(CFile.file_main_name(module_file)) sql_ib_need_notify_object = ''' select dsoid, dsoobjecttype, dsoobjectname, dso_da_result from dm2_storage_object where dso_ib_id = :ib_id ''' dataset = CFactory().give_me_db(self.get_mission_db_id()).all_row( sql_ib_need_notify_object, {'ib_id': ds_ib_id}) if dataset.is_empty(): result = CResult.merge_result( self.Success, '存储[{0}]下, 批次为[{1}]的目录[{2}]下无任何对象, 不再通知给第三方应用!'.format( ds_storage_title, ds_ib_batch_no, ds_ib_directory_name)) self.update_notify_result(ds_ib_id, result) return result CLogger().debug( '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象等待通知给第三方应用!'.format( ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, dataset.size())) data_count = dataset.size() error_message = '' for data_index in range(data_count): record_object = dataset.record(data_index) object_id = CUtils.dict_value_by_name(record_object, 'dsoid', '') object_type = CUtils.dict_value_by_name( record_object, 'dsoobjecttype', '') object_name = CUtils.dict_value_by_name( record_object, 'dsoobjectname', '') object_da_result_text = CUtils.any_2_str( CUtils.dict_value_by_name(record_object, 'dso_da_result', '')) object_da_result = CJson() object_da_result.load_json_text(object_da_result_text) for module_name in module_name_list: module_obj = CObject.create_module_instance( CSys.get_metadata_data_access_modules_root_name(), module_name, self.get_mission_db_id()) module_id = module_name module_title = CUtils.dict_value_by_name( module_obj.information(), self.Name_Title, '') module_enable = CUtils.dict_value_by_name( module_obj.information(), self.Name_Enable, True) if not module_enable: continue module_access = object_da_result.xpath_one( '{0}.{1}'.format(module_id, self.Name_Result), self.DataAccess_Forbid) module_access_memo = object_da_result.xpath_one( '{0}.{1}'.format(module_id, self.Name_Message), '') CLogger().debug( '存储[{0}]下, 批次为[{1}]的目录[{2}]下的对象[{3}], 与模块[{4}]的访问权限为[{5}]!' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, object_name, module_title, module_access)) # todo(王西亚) 仔细考虑这里是否要放开, 是放开pass的, 还是放开pass和wait!!!!!! # if not \ # ( # CUtils.equal_ignore_case(module_access, self.DataAccess_Pass) # or CUtils.equal_ignore_case(module_access, self.DataAccess_Wait) # ): # continue result = module_obj.notify_object(ds_ib_id, module_access, module_access_memo, object_id, object_name, object_type, None) if not CResult.result_success(result): message = CResult.result_message(result) CLogger().debug( '存储[{0}]下, 批次为[{1}]的目录[{2}]下的对象[{3}], 与模块[{4}]的通知处理结果出现错误, 详细情况: [{5}]!' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, object_name, module_title, message)) error_message = CUtils.str_append( error_message, message) if CUtils.equal_ignore_case(error_message, ''): result = CResult.merge_result( self.Success, '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象成功通知给第三方应用!'.format( ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, data_count)) self.update_notify_result(ds_ib_id, result) return result else: result = CResult.merge_result( self.Failure, '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象在通知给第三方应用时, 部分出现错误! 错误信息如下: \n{4}' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, data_count, error_message)) self.update_notify_result(ds_ib_id, result) return result except Exception as error: result = CResult.merge_result( self.Failure, '存储[{0}]下, 批次为[{1}]的目录[{2}]下有[{3}]个对象通知给第三方应用时出现异常! 错误原因为: {4}!' .format(ds_storage_title, ds_ib_batch_no, ds_ib_directory_name, data_count, error.__str__())) self.update_notify_result(ds_ib_id, result) return result
def process_mission(self, dataset) -> str: """ 详细算法复杂, 参见readme.md中[##### 服务发布调度]章节 todo(张雄雄) 开始开发服务发布框架 :param dataset: :return: """ deploy_id = dataset.value_by_name(0, 'dpid', '') deploy_s_title = dataset.value_by_name(0, 'dptitle', '') deploy_s_name = dataset.value_by_name(0, 'dpname', '') CLogger().debug('即将发布服务为: {0}.{1}.{2}'.format(deploy_id, deploy_s_name, deploy_s_title)) try: # 获取服务设置 serdef = ServiceDef(deploy_s_name, deploy_s_title) service_params = dataset.value_by_name(0, 'dpserviceparams', '') if service_params == '' or service_params == None: raise Exception("缺少服务参数") service_params_json = CJson() service_params_json.load_json_text(service_params) serdef.coordinates = service_params_json.xpath_one( "coordinates", ['EPSG:4326']) serdef.cache = service_params_json.xpath_one("cache", False) serdef.cachelevel = service_params_json.xpath_one("cachelevel", 16) # 获取相关图层设置 factory = CFactory() db = factory.give_me_db(self.get_mission_db_id()) layer_rows = db.all_row( "select dpid,dplayer_id,dplayer_name,dplayer_datatype,dplayer_queryable," "dplayer_resultfields,dplayer_style from dp_v_qfg_layer where dpservice_id = '{0}'" .format(deploy_id)) # for row in layer_rows: for i in range(layer_rows.size()): row = layer_rows.record(i) ser_lyrdef = LayerDef() ser_lyrdef.id = row[1] ser_lyrdef.name = row[2] ser_lyrdef.type = row[3] ser_lyrdef.sourcetype = 'File' ser_lyrdef.classidetify = row[6] layer_file_rows = db.all_row( "select dpdf_group_id,dpdf_object_fullname from dp_v_qfg_layer_file where dpdf_layer_id = '{0}'" .format(row[0])) # for file_row in layer_file_rows: for k in range(layer_file_rows.size()): file_row = layer_file_rows.record(k) if file_row[0] not in ser_lyrdef.sourcepath: ser_lyrdef.sourcepath[file_row[0]] = [] ser_lyrdef.sourcepath[file_row[0]].append(file_row[1]) serdef.layers.append(ser_lyrdef) ProcessService(serdef) result = CResult.merge_result( self.Success, '服务: {0}.{1}.{2}发布成功'.format(deploy_id, deploy_s_name, deploy_s_title)) self.update_deploy_result(deploy_id, result) return result except Exception as error: result = CResult.merge_result( self.Failure, '服务: {0}.{1}.{2}发布失败, 错误原因为: {3}'.format( deploy_id, deploy_s_name, deploy_s_title, error.__str__())) self.update_deploy_result(deploy_id, result) return result
def ReplaceSameTemplate(service_def) -> tuple: '''创建服务配置文件,包括 mapfile,mapproxy yaml file 和 wsgi file :return service extent of geography, all mapproxy cache name, wsgi file path ''' # 比较坐标系,如果同一图层下的坐标系不同,返回错误 ''' for lyr_def in service_def.layers: geo_data_paths = GetAllSources(service_def, lyr_def, False) if not geoUtils.CompareCoords(geo_data_paths): CLogger().info("geo datas in layer {0} have different spatialreference".format(lyr_def.id)) raise Exception("geo datas in layer {0} have different spatialreference".format(lyr_def.id)) del geo_data_paths ''' # 配置文件路径 yamlfile = os.path.join(application.xpath_one('data2service.yaml_dir', None), service_def.id + '.yaml') wsgifile = os.path.join(application.xpath_one('data2service.wsgi_dir', None), service_def.id + '.wsgi') temp_dir = os.path.join(basedir, 'template') with open(os.path.join(temp_dir, 'service_yaml_layer.txt'), 'r') as fp: service_yaml_lyr = fp.read() with open(os.path.join(temp_dir, 'service_yaml_cache.txt'), 'r') as fp: service_yaml_cache = fp.read() with open(os.path.join(temp_dir, 'service_yaml_source.txt'), 'r') as fp: service_yaml_source = fp.read() all_prjextent = [] all_geoextent = [] all_map_lyr_name = '' all_service_yaml_lyr = '' all_service_yaml_cache = '' all_service_yaml_source = '' all_service_yaml_cache_name = '' # 逐个图层处理 for lyr_def in service_def.layers: cur_index = 0 shp_path_list = GetAllSources(service_def, lyr_def, True) lyr_geoextent = [] if len(shp_path_list) < 1: CLogger().info("no vector/raster in layer " + lyr_def.id) continue mapHelper = mapfileHelper.MapfileHelper() mapHelper.loads(os.path.join(basedir, "template/service_id.map")) mapHelper.setvalue("MAP.NAME", '"map_' + service_def.id + '_' + lyr_def.id + '"') # proj4 = geoUtils.GetProj4(shp_path_list[0]) proj4 = "+proj=longlat +datum=WGS84 +no_defs" CLogger().info(proj4) proj4 = proj4.replace('+', '') map_prj = '\r\n' map_unit = 'dd' for defs in proj4.split(): map_prj += '"' + defs + '"\n' if defs.startswith('unit') and defs.split('=')[-1] == 'm': map_unit = 'meters' map_prj += '\r\n' mapHelper.setvalue("MAP.UNITS", map_unit) mapHelper.setvalue("MAP.PROJECTION", map_prj) mapHelper.setvalue("MAP.WEB.METADATA.'WMS_TITLE'", 'map_' + service_def.id) ori_lyr = mapHelper.getvalue("MAP.LAYER.0") copy_lyr = copy.deepcopy(ori_lyr) for shp in shp_path_list: shp_prj = '\r\n' shp_proj4 = geoUtils.GetProj4(shp) shp_proj4 = shp_proj4.replace('+', '') for defs in shp_proj4.split(): shp_prj += '"' + defs + '"\n' shp_prj += '\r\n' geoextent, prjextent = geoUtils.GetVectorExtent(shp) if prjextent == (0, 0, 0, 0): CLogger().info(shp + 'is null') continue geoextent = ExtendEnvlope(geoextent) all_prjextent.append(prjextent) all_geoextent.append(geoextent) lyr_geoextent.append(geoextent) # map layer if cur_index > 0: cur_lyr = copy.deepcopy(copy_lyr) mapHelper.setvalue("MAP.LAYER.{0}".format(cur_index), cur_lyr) base_name = lyr_def.id + str(cur_index) CLogger().info(base_name + '正在准备创建') mapHelper.setvalue("MAP.LAYER.{0}.NAME".format(cur_index), u"'{0}'".format(base_name)) mapHelper.setvalue("MAP.LAYER.{0}.METADATA.'WMS_TITLE'".format(cur_index), u"'{0}'".format(base_name)) mapHelper.setvalue("MAP.LAYER.{0}.PROJECTION".format(cur_index), shp_prj) # print(lyr_def.classidetify) if len(lyr_def.classidetify) > 10: json = CJson() json.load_json_text(lyr_def.classidetify) mapHelper.setvalue("MAP.LAYER.{0}.CLASS.0".format(cur_index), json.json_obj) if lyr_def.type.lower() == "vector": mapHelper.setvalue("MAP.LAYER.{0}.CONNECTION".format(cur_index), u"'{0}'".format(shp)) mapHelper.setvalue("MAP.LAYER.{0}.CONNECTIONTYPE".format(cur_index), "OGR") mapHelper.setvalue("MAP.LAYER.{0}.TYPE".format(cur_index), "POLYGON") if lyr_def.sourcetype.lower() not in ["file", "folder"]: mapHelper.setvalue("MAP.LAYER.{0}.DATA".format(cur_index), u"'{0}'".format(os.path.split(shp)[-1])) else: mapHelper.setvalue("MAP.LAYER.{0}.TILEINDEX".format(cur_index), u"'{0}'".format(shp)) mapHelper.setvalue("MAP.LAYER.{0}.TYPE".format(cur_index), "RASTER") all_map_lyr_name = all_map_lyr_name + base_name + ',' cur_index += 1 if len(all_prjextent) < 1: continue # one layer one mapfile mapfile = os.path.join(application.xpath_one('data2service.map_dir', None), service_def.id + '_' + lyr_def.id + '.map') res_prjextent = geoUtils.UnionExt(all_prjextent) all_ext_prj = "{0} {1} {2} {3}".format(res_prjextent[0], res_prjextent[2], res_prjextent[1], res_prjextent[3]) mapHelper.setvalue("MAP.EXTENT", all_ext_prj) mapHelper.dumps(mapfile) all_prjextent = [] # yaml values if len(lyr_geoextent) < 1: continue lyr_cache_name = 'cache_' + lyr_def.id cache_source_name = 'source_' + lyr_def.id res_geoextent = geoUtils.UnionExt(lyr_geoextent) ext_yaml_lyr = "{0},{1},{2},{3}".format(res_geoextent[0], res_geoextent[2], res_geoextent[1], res_geoextent[3]) all_service_yaml_cache_name = all_service_yaml_cache_name + lyr_cache_name + ',' # yaml layer values yaml_lyr_values = dict({'$lyr_name$': lyr_def.id, '$lyr_cache$': lyr_cache_name}) all_service_yaml_lyr = all_service_yaml_lyr + '\r\n' + RepString(service_yaml_lyr, yaml_lyr_values) # yaml cache values yaml_cache_values = dict({'$cache_source$': cache_source_name, '$lyr_cache$': lyr_cache_name, '$grids$': service_def.getGrids()}) CLogger().info("the service grids are: " + service_def.getGrids()) all_service_yaml_cache = all_service_yaml_cache + '\r\n' + RepString(service_yaml_cache, yaml_cache_values) # yaml source values all_map_lyr_name = all_map_lyr_name.rstrip(',') yaml_source_values = dict( {'$cache_source$': cache_source_name, '$map_file$': mapfile, '$lyr_name$': all_map_lyr_name, '$coverage_bbox$': ext_yaml_lyr}) all_map_lyr_name = '' yaml_source_values['$server_bin$'] = application.xpath_one('data2service.service_yaml.server_bin', None) yaml_source_values['$server_dir$'] = application.xpath_one('data2service.service_yaml.server_dir', None) all_service_yaml_source = all_service_yaml_source + '\r\n' + RepString(service_yaml_source, yaml_source_values) # yaml yaml_cache_dir = application.xpath_one('data2service.service_yaml.cache_dir', None).replace('$orderid$', service_def.id) if not os.path.exists(yaml_cache_dir): os.makedirs(yaml_cache_dir) # yaml values yaml_values = dict({'$cache_dir$': yaml_cache_dir, '$yaml_layer$': all_service_yaml_lyr, '$yaml_cache$': all_service_yaml_cache, '$yaml_source$': all_service_yaml_source}) RepFile(os.path.join(basedir, "template/service_id.yaml"), yamlfile, yaml_values) # wsgi logfile = os.path.join(application.xpath_one('data2service.wsgi_dir', None), 'service_' + service_def.id + '.log') wsgivalues = dict({'$log_file$': logfile, '$yaml_file$': yamlfile}) RepFile(os.path.join(basedir, "template/service_id.wsgi"), wsgifile, wsgivalues) res_all_geoextent = geoUtils.UnionExt(all_geoextent) ext_yaml_lyr = "{0},{1},{2},{3}".format(res_all_geoextent[0], res_all_geoextent[2], res_all_geoextent[1], res_all_geoextent[3]) return ext_yaml_lyr, all_service_yaml_cache_name.rstrip(','), wsgifile
class CMetaData(CResource): def __init__(self): self.__quality__ = CQuality() self.__metadata_extract_result__ = self.Not_Support self.__metadata_extract_memo__ = '' self.__metadata_text__ = None self.__metadata_xml__ = CXml() self.__metadata_json__ = CJson() self.__metadata_type__ = self.MetaDataFormat_Text self.__metadata_bus_extract_result__ = self.Not_Support self.__metadata_bus_extract_memo__ = '' self.__metadata_bus_text__ = None self.__metadata_bus_xml__ = CXml() self.__metadata_bus_json__ = CJson() self.__metadata_bus_type__ = self.MetaDataFormat_Text self.__thumb_img_file_name__ = '' self.__browse_img_file_name__ = '' self.__metadata_view_extract_result__ = self.Not_Support self.__metadata_view_extract_memo__ = '' self.__time_information__ = CJson() self.__metadata_time_extract_result__ = self.Not_Support self.__metadata_time_extract_memo__ = '' self.__metadata_spatial_extract_result__ = self.Not_Support self.__metadata_spatial_extract_memo__ = '' self.__metadata_spatial__ = CMDSpatial() def metadata_time(self): if self.__metadata_time_extract_result__ == self.DB_True: return self.__metadata_time_extract_result__, self.__metadata_time_extract_memo__, self.__time_information__.to_json( ) else: return self.__metadata_time_extract_result__, self.__metadata_time_extract_memo__, '' def metadata_view(self): if self.__metadata_view_extract_result__ == self.DB_True: return self.__metadata_view_extract_result__, self.__metadata_view_extract_memo__, self.__thumb_img_file_name__, self.__browse_img_file_name__ else: return self.__metadata_view_extract_result__, self.__metadata_view_extract_memo__, '', '' def metadata_spatial(self): if self.__metadata_spatial_extract_result__ == self.DB_True: return self.__metadata_spatial_extract_result__, self.__metadata_spatial_extract_memo__, self.__metadata_spatial__ else: return self.__metadata_spatial_extract_result__, self.__metadata_spatial_extract_memo__, self.__metadata_spatial__ @property def quality(self): return self.__quality__ @property def thumb_img_file_name(self): return self.__thumb_img_file_name__ @thumb_img_file_name.setter def thumb_img_file_name(self, value): self.__thumb_img_file_name__ = value @property def browse_img_file_name(self): return self.__browse_img_file_name__ @browse_img_file_name.setter def browse_img_file_name(self, value): self.__browse_img_file_name__ = value @property def time_information(self) -> CJson: return self.__time_information__ @property def metadata_extract_result(self): return self.__metadata_extract_result__ @property def metadata_bus_extract_result(self): return self.__metadata_bus_extract_result__ @property def metadata_view_extract_result(self): return self.__metadata_view_extract_result__ @property def metadata_time_extract_result(self): return self.__metadata_time_extract_result__ @property def metadata_spatial_extract_result(self): return self.__metadata_spatial_extract_result__ def metadata(self): if self.__metadata_extract_result__ != self.DB_True: return self.__metadata_extract_result__, self.__metadata_extract_memo__, self.__metadata_type__, None elif self.__metadata_type__ == self.MetaDataFormat_Json: return self.__metadata_extract_result__, self.__metadata_extract_memo__, self.__metadata_type__, self.__metadata_json__.to_json( ) elif self.__metadata_type__ == self.MetaDataFormat_XML: return self.__metadata_extract_result__, self.__metadata_extract_memo__, self.__metadata_type__, self.__metadata_xml__.to_xml( ) else: return self.__metadata_extract_result__, self.__metadata_extract_memo__, self.__metadata_type__, self.__metadata_text__ @property def metadata_type(self): return self.__metadata_type__ @property def metadata_bus_type(self): return self.__metadata_bus_type__ def metadata_xml(self) -> CXml: return self.__metadata_xml__ def metadata_json(self) -> CJson: return self.__metadata_json__ def metadata_bus_xml(self) -> CXml: return self.__metadata_bus_xml__ def metadata_bus_json(self) -> CJson: return self.__metadata_bus_json__ def metadata_spatial_obj(self) -> CMDSpatial: return self.__metadata_spatial__ def set_metadata_spatial(self, result: int, memo: str, spatial_metadata_type=None, spatial_metadata=None): self.__metadata_spatial_extract_result__ = result self.__metadata_spatial_extract_memo__ = memo if spatial_metadata_type is None: return if spatial_metadata_type == CResource.Spatial_MetaData_Type_Native_Center: self.__metadata_spatial__.native_center = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Native_BBox: self.__metadata_spatial__.native_box = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Native_Geom: self.__metadata_spatial__.native_geom = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Wgs84_Center: self.__metadata_spatial__.wgs84_center = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Wgs84_BBox: self.__metadata_spatial__.wgs84_bbox = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Wgs84_Geom: self.__metadata_spatial__.wgs84_geom = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Prj_Wkt: self.__metadata_spatial__.prj_wkt = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Prj_Proj4: self.__metadata_spatial__.prj_proj4 = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Prj_Project: self.__metadata_spatial__.prj_project = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Prj_Coordinate: self.__metadata_spatial__.prj_coordinate = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Prj_Degree: self.__metadata_spatial__.prj_degree = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Prj_Zone: self.__metadata_spatial__.prj_zone = spatial_metadata elif spatial_metadata_type == CResource.Spatial_MetaData_Type_Prj_Source: self.__metadata_spatial__.prj_source = spatial_metadata else: pass def set_metadata_view(self, result: int, memo: str, view_metadata_type=None, view_metadata=None): self.__metadata_view_extract_result__ = result self.__metadata_view_extract_memo__ = memo if view_metadata_type is None: self.__browse_img_file_name__ = None self.__thumb_img_file_name__ = None return if view_metadata_type == CResource.View_MetaData_Type_Browse: self.__browse_img_file_name__ = view_metadata elif view_metadata_type == CResource.View_MetaData_Type_Thumb: self.__thumb_img_file_name__ = view_metadata else: pass def set_metadata_time(self, result: int, memo: str, time_attr_name=None, time_attr_value=None): self.__metadata_time_extract_result__ = result self.__metadata_time_extract_memo__ = memo if time_attr_name is not None: self.__time_information__.set_value_of_name( CUtils.any_2_str(time_attr_name), time_attr_value) def set_metadata(self, metadata_extract_result: int, metadata_extract_memo: str, metadata_type: int, metadata_text): self.__metadata_extract_result__ = metadata_extract_result self.__metadata_extract_memo__ = metadata_extract_memo self.__metadata_type__ = metadata_type if self.__metadata_type__ == self.MetaDataFormat_Json: self.__metadata_text__ = '' self.__metadata_xml__ = CXml() self.__metadata_json__.load_json_text(metadata_text) elif self.__metadata_type__ == self.MetaDataFormat_XML: self.__metadata_text__ = '' self.__metadata_xml__.load_xml(metadata_text) self.__metadata_json__ = CJson() else: self.__metadata_text__ = metadata_text self.__metadata_xml__ = CXml() self.__metadata_json__ = CJson() def set_metadata_file(self, metadata_extract_result: int, metadata_extract_memo: str, metadata_type: int, file_name): self.__metadata_extract_result__ = metadata_extract_result self.__metadata_extract_memo__ = metadata_extract_memo self.__metadata_type__ = metadata_type if self.__metadata_type__ == self.MetaDataFormat_Json: self.__metadata_text__ = '' self.__metadata_xml__ = CXml() self.__metadata_json__.load_file(file_name) elif self.__metadata_type__ == self.MetaDataFormat_XML: self.__metadata_text__ = '' self.__metadata_xml__.load_file(file_name) self.__metadata_json__ = CJson() else: self.__metadata_text__ = CFile.file_2_str(file_name) self.__metadata_xml__ = CXml() self.__metadata_json__ = CJson() def metadata_bus(self): if self.__metadata_bus_extract_result__ != self.DB_True: return self.__metadata_bus_extract_result__, self.__metadata_bus_extract_memo__, self.__metadata_bus_type__, None elif self.__metadata_bus_type__ == self.MetaDataFormat_Json: return self.__metadata_bus_extract_result__, self.__metadata_bus_extract_memo__, self.__metadata_bus_type__, self.__metadata_bus_json__.to_json( ) elif self.__metadata_bus_type__ == self.MetaDataFormat_XML: return self.__metadata_bus_extract_result__, self.__metadata_bus_extract_memo__, self.__metadata_bus_type__, self.__metadata_bus_xml__.to_xml( ) else: return self.__metadata_bus_extract_result__, self.__metadata_bus_extract_memo__, self.__metadata_bus_type__, self.__metadata_bus_text__ def set_metadata_bus(self, metadata_bus_extract_result: int, metadata_bus_extract_memo: str, metadata_bus_type: int, metadata_bus_text): self.__metadata_bus_extract_result__ = metadata_bus_extract_result self.__metadata_bus_extract_memo__ = metadata_bus_extract_memo self.__metadata_bus_type__ = metadata_bus_type if self.__metadata_bus_type__ == self.MetaDataFormat_Json: self.__metadata_bus_text__ = '' self.__metadata_bus_xml__ = CXml() self.__metadata_bus_json__.load_json_text(metadata_bus_text) elif self.__metadata_bus_type__ == self.MetaDataFormat_XML: self.__metadata_bus_text__ = '' self.__metadata_bus_xml__.load_xml(metadata_bus_text) self.__metadata_bus_json__ = CJson() else: self.__metadata_bus_text__ = metadata_bus_text self.__metadata_bus_xml__ = CXml() self.__metadata_bus_json__ = CJson() def set_metadata_bus_file(self, metadata_bus_extract_result: int, metadata_bus_extract_memo: str, metadata_type: int, file_name): self.__metadata_bus_extract_result__ = metadata_bus_extract_result self.__metadata_bus_extract_memo__ = metadata_bus_extract_memo self.__metadata_bus_type__ = metadata_type if self.__metadata_bus_type__ == self.MetaDataFormat_Json: self.__metadata_bus_text__ = '' self.__metadata_bus_xml__ = CXml() self.__metadata_bus_json__.load_file(file_name) elif self.__metadata_bus_type__ == self.MetaDataFormat_XML: self.__metadata_bus_text__ = '' self.__metadata_bus_xml__.load_file(file_name) self.__metadata_bus_json__ = CJson() else: self.__metadata_bus_text__ = CFile.file_2_str(file_name) self.__metadata_bus_xml__ = CXml() self.__metadata_bus_json__ = CJson()
def process_mission(self, dataset) -> str: """ 详细算法复杂, 参见readme.md中[##### 服务发布调度]章节 :param dataset: :return: """ layer_id = dataset.value_by_name(0, 'dpid', '') layer_name = dataset.value_by_name(0, 'dplayer_id', '') layer_title = dataset.value_by_name(0, 'dplayer_name', '') layer_service_name = dataset.value_by_name(0, 'dpname', '') layer_service_title = dataset.value_by_name(0, 'dptitle', '') layer_object = CUtils.any_2_str(dataset.value_by_name(0, 'dplayer_object', None)) CLogger().debug( '即将更新服务[{0}.{1}]的图层[{2}.{3}.{4}]...'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title ) ) object_da_result = CJson() try: self.__layer_init(layer_id) object_da_result.load_json_text(layer_object) object_catalog = CMDObjectCatalog(self.get_mission_db_id()) object_dataset = object_catalog.search( self.ModuleName_Data2Service, object_da_result ) if object_dataset.is_empty(): self.__layer_file_empty(layer_id) result = CResult.merge_result( self.Success, '服务[{0}.{1}]的图层[{2}.{3}.{4}]检查更新成功完成'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title ) ) return result CLogger().debug( '服务[{0}.{1}]的图层[{2}.{3}.{4}], 发现[{5}]个符合要求的数据对象!'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title, object_dataset.size() ) ) for data_index in range(object_dataset.size()): object_id = object_dataset.value_by_name(data_index, 'object_id', '') object_name = object_dataset.value_by_name(data_index, 'object_name', '') CLogger().debug( '服务[{0}.{1}]的图层[{2}.{3}.{4}], 发现[{5}]个符合要求的数据对象!\n第[{6}]个可用的对象为[{7}.{8}]'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title, object_dataset.size(), data_index, object_id, object_name ) ) layer_file_id = self.__layer_object_id(layer_id, object_id) if layer_file_id is None: layer_file_id = CUtils.one_id() object_full_name = object_catalog.object_full_name_by_id(object_id) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' insert into dp_v_qfg_layer_file( dpdf_id, dpdf_layer_id, dpdf_group_id, dpdf_object_id , dpdf_object_fullname, dpdf_object_title, dpdf_object_size, dpdf_object_date) values(:layer_file_id, :layer_id, :group_id, :object_id , :object_fullname, :object_title, :object_size, :object_date) ''', { 'object_id': object_id, 'object_title': object_name, 'object_fullname': object_full_name, 'object_date': object_dataset.value_by_name(data_index, 'object_lastmodifytime', None), 'object_size': object_dataset.value_by_name(data_index, 'object_size', 0), 'layer_file_id': layer_file_id, 'layer_id': layer_id, 'group_id': layer_id } ) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_v_qfg_layer_file set dpdf_object_size = :object_size , dpdf_object_date = :object_date where dpdf_id = :layer_file_id ''', { 'object_date': object_dataset.value_by_name(data_index, 'object_lastmodifytime', None), 'object_size': object_dataset.value_by_name(data_index, 'object_size', 0), 'layer_file_id': layer_file_id } ) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_v_qfg_layer_file set dpdf_object_fp = MD5( coalesce(dpdf_object_title, '')||'-'|| coalesce(dpdf_object_size, 0)::text||'-'|| coalesce(dpdf_object_date, now())::text ) where dpdf_id = :layer_file_id ''', {'layer_file_id': layer_file_id} ) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_v_qfg_layer_file set dpdf_processtype = :process_type where dpdf_id = :layer_file_id and dpdf_object_fp = dpdf_object_fp_lastdeploy ''', {'layer_file_id': layer_file_id, 'process_type': self.ProcType_Same} ) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_v_qfg_layer_file set dpdf_processtype = :process_type where dpdf_id = :layer_file_id and ( dpdf_object_fp <> dpdf_object_fp_lastdeploy or dpdf_object_fp_lastdeploy is null) ''', {'layer_file_id': layer_file_id, 'process_type': self.ProcType_Update} ) self.__layer_clear(layer_id) self.__layer_re_calc_group(layer_id) result = CResult.merge_result( self.Success, '服务[{0}.{1}]的图层[{2}.{3}.{4}]检查更新成功完成'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title ) ) self.__update_layer_update_result(layer_id, result) return result except Exception as error: result = CResult.merge_result( self.Failure, '服务[{0}.{1}]的图层[{2}.{3}.{4}]检查更新失败, 错误原因为: {5}'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title, error.__str__() ) ) self.__update_layer_update_result(layer_id, result) return result
def execute(self) -> str: inbound_storage_list = CFactory().give_me_db(self.get_mission_db_id()).all_row( ''' select dstid, dsttitle, dstwatchoption, dstscanlasttime from dm2_storage where dstwatch = {0} and dsttype = '{1}' and dstscanstatus = {2} '''.format(self.DB_True, self.Storage_Type_InBound, self.ProcStatus_Finished) ) if inbound_storage_list.is_empty(): return CResult.merge_result(CResult.Success, '本次没有需要检查的定时任务!') for data_index in range(inbound_storage_list.size()): storage_id = inbound_storage_list.value_by_name(data_index, 'dstid', '') storage_title = inbound_storage_list.value_by_name(data_index, 'dsttitle', '') storage_scan_last_time = inbound_storage_list.value_by_name(data_index, 'dstscanlasttime', None) storage_scan_option = CUtils.any_2_str( inbound_storage_list.value_by_name(data_index, 'dstwatchoption', None)) CLogger().debug('正在检查存储[{0}]的定时器, 分析目前是否需要启动扫描...'.format(storage_title)) try: # 如果最后一次扫描时间为空, 则立即启动扫描 if CUtils.equal_ignore_case(storage_scan_last_time, ''): self.start_storage_scan_immediately(storage_id) else: # 扫描周期 json_storage_scan_option = CJson() json_storage_scan_option.load_json_text(storage_scan_option) storage_scan_period_type = json_storage_scan_option.xpath_one( self.Name_Period, self.Scan_Period_Hour ) if CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Minute): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Minute, 15) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} minute') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) elif CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Hour): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Hour, 1) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} hour') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) elif CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Day): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Day, 1) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} day') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) elif CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Week): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Week, 1) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} week') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) elif CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Month): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Month, 1) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} month') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) else: # CUtils.equal_ignore_case(storage_scan_period_type, self.Scan_Period_Year): storage_scan_period = json_storage_scan_option.xpath_one(self.Scan_Period_Year, 1) over_time = CFactory().give_me_db(self.get_mission_db_id()).one_value( ''' select trunc(extract(epoch FROM ((now()::timestamp + '-{0} year') - '{1}'::timestamp))::numeric) as over_time '''.format(storage_scan_period, storage_scan_last_time) ) if over_time >= 0: CLogger().debug('存储[{0}]的定时器将启动...'.format(storage_title)) self.start_storage_scan_immediately(storage_id) except Exception as error: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage set dstlastmodifytime=now() , dstscanmemo=:message where dstid = :storage_id ''', {'storage_id': storage_id, 'message': '系统分析定时扫描条件过程中发现错误, 详细信息为: {0}!'.format(error.__str__())} ) continue return CResult.merge_result(self.Success, '本次分析定时扫描任务成功结束!')