def create_inbound_mission(self, storage_id): database = CFactory().give_me_db(self.get_mission_db_id()) new_batch_no = database.seq_next_value(self.Seq_Type_Date_AutoInc) database.execute( ''' insert into dm2_storage_inbound(dsiid, dsistorageid, dsidirectory, dsibatchno, dsidirectoryid, dsistatus) VALUES(:dsiid, :storageid, :directory, :batch_no, :directory_id, :status) ''', { 'dsiid': CUtils.one_id(), 'storageid': storage_id, 'directory': '', 'batch_no': new_batch_no, 'directory_id': CUtils.one_id(), 'status': self.IB_Status_QI_InQueue })
def get_mission_info(self): mission_flag = CUtils.one_id() mission_seize_sql = self.get_mission_seize_sql() mission_info_sql = self.get_mission_info_sql() if mission_seize_sql is None: return None if mission_info_sql is None: return None mission_seize_sql = mission_seize_sql.replace( self.SYSTEM_NAME_MISSION_ID, mission_flag) mission_info_sql = mission_info_sql.replace( self.SYSTEM_NAME_MISSION_ID, mission_flag) try: factory = CFactory() db = factory.give_me_db(self.get_mission_db_id()) db.execute(mission_seize_sql) except: CLogger().debug( '任务抢占查询语句有误, 请修正! 详细错误信息为: {0}'.format(mission_seize_sql)) return CDataSet() try: return db.one_row(mission_info_sql) except: CLogger().debug( '任务抢占查询语句有误, 请修正! 详细错误信息为: {0}'.format(mission_info_sql)) return CDataSet()
def custom_init(self): """ 自定义初始化方法 :return: """ super().custom_init() engine = CFactory().give_me_db(self.db_server_id) if self.my_id is None: self._ds_file_or_path = engine.one_row( 'select dsdid, dsdparentid, dsddirectory, dsddirtype, dsddirectoryname, dsd_object_type, \ dsd_object_confirm, dsd_object_id, dsd_directory_valid, dsdpath, dsddircreatetime, \ dsddirlastmodifytime, dsdparentobjid, dsdscanrule, dsd_ib_id from dm2_storage_directory \ where dsdstorageid = :dsdStorageID and dsddirectory = :dsdDirectory', { 'dsdStorageID': self.storage_id, 'dsdDirectory': self.file_name_with_rel_path }) if not self.ds_file_or_path.is_empty(): self.my_id = self.ds_file_or_path.value_by_name( 0, 'dsdid', None) if self.my_id is None: self.my_id = CUtils.one_id() else: self._ds_file_or_path = engine.one_row( 'select dsdid, dsdparentid, dsddirectory, dsddirtype, dsddirectoryname, dsd_object_type, \ dsd_object_confirm, dsd_object_id, dsd_directory_valid, dsdpath, dsddircreatetime, \ dsddirlastmodifytime, dsdparentobjid, dsdscanrule, dsd_ib_id from dm2_storage_directory \ where dsdid = :dsdID', {'dsdid': self.my_id})
def custom_init(self): """ 自定义初始化方法 :return: """ super().custom_init() engine = CFactory().give_me_db(self.db_server_id) if self.my_id is None: self._ds_file_or_path = engine.one_row(''' select dsfid, dsfstorageid, dsfdirectoryid, dsffilerelationname, dsffilename, dsffilemainname, dsfext, dsffilecreatetime, dsffilemodifytime, dsffilevalid, dsf_object_type, dsf_object_confirm, dsf_object_id, dsffilesize, dsfparentobjid, dsf_ib_id from dm2_storage_file where dsfstorageid = :dsfStorageID and dsfdirectoryid = :dsfDirectoryId and dsffilerelationname = :dsfFileRelationName ''', {'dsfStorageID': self.storage_id, 'dsfDirectoryId': self.parent_id, 'dsfFileRelationName': CFile.unify(self.file_name_with_rel_path)}) if not self.ds_file_or_path.is_empty(): self.my_id = self.ds_file_or_path.value_by_name(0, 'dsfid', None) if self.my_id is None: self.my_id = CUtils.one_id() else: self._ds_file_or_path = engine.one_row(''' select dsfid, dsfstorageid, dsfdirectoryid, dsffilerelationname, dsffilename, dsffilemainname, dsfext, dsffilecreatetime, dsffilemodifytime, dsffilevalid, dsf_object_type, dsf_object_confirm, dsf_object_id, dsffilesize, dsfparentobjid, dsf_ib_id from dm2_storage_file where dsfid = :dsfID ''', {'dsfid': self.my_id})
def __inbound_object_detail_of_schema(self, list_file_fullname): sql_detail_insert = ''' INSERT INTO dm2_storage_obj_detail( dodid, dodobjectid, dodfilename, dodfileext, dodfilesize, dodfilecreatetime, dodfilemodifytime, dodlastmodifytime, dodfiletype) VALUES ( :dodid, :dodobjectid, :dodfilename, :dodfileext, :dodfilesize, :dodfilecreatetime, :dodfilemodifytime, now(), :dodfiletype) ''' sql_detail_insert_params_list = [] # query_storage_id = self.file_info.storage_id query_file_relation_name = self.file_info.file_name_with_rel_path for item_file_name_with_path in list_file_fullname: CLogger().debug(item_file_name_with_path) if not CFile.file_or_path_exist(item_file_name_with_path): continue params = dict() file_relation_name = CFile.file_relation_path( item_file_name_with_path, self.file_info.root_path) if CUtils.equal_ignore_case(query_file_relation_name, file_relation_name): params['dodid'] = self.object_id else: params['dodid'] = CUtils.one_id() # 文件类型 params['dodfiletype'] = self.FileType_File if CFile.is_dir(item_file_name_with_path): params['dodfiletype'] = self.FileType_Dir params['dodobjectid'] = self.object_id params['dodfilename'] = CFile.unify(file_relation_name) params['dodfileext'] = CFile.file_ext(item_file_name_with_path) params['dodfilesize'] = CFile.file_size(item_file_name_with_path) params['dodfilecreatetime'] = CFile.file_create_time( item_file_name_with_path) params['dodfilemodifytime'] = CFile.file_modify_time( item_file_name_with_path) # params['dodstorageid'] = query_storage_id # params['dodfilerelationname'] = CFile.file_relation_path( # item_file_name_with_path, # self.file_info.root_path) sql_params_tuple = (sql_detail_insert, params) sql_detail_insert_params_list.append(sql_params_tuple) if len(sql_detail_insert_params_list) > 0: try: CFactory().give_me_db( self.file_info.db_server_id).execute_batch( sql_detail_insert_params_list) except Exception as error: CLogger().warning('数据库处理出现异常, 错误信息为: {0}'.format( error.__str__())) return CResult.merge_result(self.Failure, '处理失败!') return CResult.merge_result(self.Success, '处理完毕!')
def start_process(): """ 新增入库跟踪情况的表 dm2_import_step,仅用于测试监控,实际发布时,这个表就不用了: 内容:单独的调度,每隔30秒,统计各种个数 个数:目录个数,文件个数,数据个数,附属文件个数,已挂接标签个数,未挂接标签个数 """ while True: try: db = CFactory().give_me_db() sql_result_count = ''' SELECT now() as query_time, ( SELECT COUNT ( * ) FROM dm2_storage_directory ) AS count_dir, ( SELECT COUNT ( * ) FROM dm2_storage_file ) AS count_file, ( SELECT COUNT ( * ) FROM dm2_storage_object ) AS count_object, ( SELECT COUNT ( * ) FROM dm2_storage_obj_detail ) AS count_obj_detail, ( SELECT COUNT ( * ) FROM dm2_storage_object WHERE dsotags IS NULL ) AS count_object_tag, ( SELECT COUNT ( * ) FROM dm2_storage_object WHERE dsotags IS NOT NULL ) AS count_object_notag ''' count_dataset = db.one_row(sql_result_count) dis_query_time = count_dataset.value_by_name(0, 'query_time', '') dis_directory_count = count_dataset.value_by_name( 0, 'count_dir', '') dis_file_count = count_dataset.value_by_name(0, 'count_file', '') dis_object_count = count_dataset.value_by_name( 0, 'count_object', '') dis_detail_count = count_dataset.value_by_name( 0, 'count_obj_detail', '') dis_object_tag_count = count_dataset.value_by_name( 0, 'count_object_tag', '') dis_object_notag_count = count_dataset.value_by_name( 0, 'count_object_notag', '') sql_insert = ''' insert into dm2_import_step ("dis_query_time","dis_id","dis_directory_count","dis_file_count","dis_object_count","dis_detail_count", "dis_object_tag_count","dis_object_notag_count","dis_addtime") values (:query_time,:disid,:directory_count,:file_count,:object_count,:detail_count, :object_tag_count,:object_notag_count,now()) ''' db.execute( sql_insert, { 'query_time': dis_query_time, 'disid': CUtils.one_id(), 'directory_count': dis_directory_count, 'file_count': dis_file_count, 'object_count': dis_object_count, 'detail_count': dis_detail_count, 'object_tag_count': dis_object_tag_count, 'object_notag_count': dis_object_notag_count }) time.sleep(30) except Exception as error: raise Exception(error.__str__())
def __stat_object_detail_of_schema(self) -> str: """ 将数据附属文件的统计信息入库 . 仅适用于Directory_Itself模式 :return: """ result_sub_dir_count, result_file_count, result_file_size_sum = CFile.stat_of_path( self.__detail_file_path__, self.__detail_file_recurse__, self.__detail_file_match_text__, self.__detail_file_match_type__) query_file_relation_name = self.file_info.file_name_with_rel_path params = dict() file_relation_name = CFile.file_relation_path( self.__detail_file_path__, self.file_info.root_path) if CUtils.equal_ignore_case(query_file_relation_name, file_relation_name): params['dodid'] = self.object_id else: params['dodid'] = CUtils.one_id() params['dodfiletype'] = self.FileType_Dir params['dodfileext'] = None if CFile.is_file(self.__detail_file_path__): params['dodfiletype'] = self.FileType_File params['dodfileext'] = CFile.file_ext(self.__detail_file_path__) params['dodobjectid'] = self.object_id params['dodfilename'] = CFile.unify(file_relation_name) params['doddircount'] = result_sub_dir_count params['dodfilecount'] = result_file_count params['dodfilesize'] = result_file_size_sum params['dodfilecreatetime'] = CFile.file_create_time( self.__detail_file_path__) params['dodfilemodifytime'] = CFile.file_modify_time( self.__detail_file_path__) try: CFactory().give_me_db(self.file_info.db_server_id).execute( ''' INSERT INTO dm2_storage_obj_detail( dodid, dodobjectid, dodfilename, dodfileext, dodfilesize, doddircount, dodfilecount, dodfilecreatetime, dodfilemodifytime, dodlastmodifytime, dodfiletype) VALUES ( :dodid, :dodobjectid, :dodfilename, :dodfileext, :dodfilesize, :doddircount, :dodfilecount, :dodfilecreatetime, :dodfilemodifytime, now(), :dodfiletype) ''', params) return CResult.merge_result(self.Success, '处理完毕!') except Exception as error: CLogger().warning('数据库处理出现异常, 错误信息为: {0}'.format(error.__str__())) return CResult.merge_result( self.Failure, '数据库处理出现异常, 错误信息为: {0}'.format(error.__str__()))
def execute(self) -> str: try: db = CFactory().give_me_db() sql_result_count = ''' SELECT now() as query_time, ( SELECT COUNT ( * ) FROM dm2_storage_directory ) AS count_dir, ( SELECT COUNT ( * ) FROM dm2_storage_file ) AS count_file, ( SELECT COUNT ( * ) FROM dm2_storage_object ) AS count_object, ( SELECT COUNT ( * ) FROM dm2_storage_obj_detail ) AS count_obj_detail, ( SELECT COUNT ( * ) FROM dm2_storage_object WHERE dsotags IS NULL ) AS count_object_tag, ( SELECT COUNT ( * ) FROM dm2_storage_object WHERE dsotags IS NOT NULL ) AS count_object_notag ''' count_dataset = db.one_row(sql_result_count) dis_query_time = count_dataset.value_by_name(0, 'query_time', '') dis_directory_count = count_dataset.value_by_name( 0, 'count_dir', '') dis_file_count = count_dataset.value_by_name(0, 'count_file', '') dis_object_count = count_dataset.value_by_name( 0, 'count_object', '') dis_detail_count = count_dataset.value_by_name( 0, 'count_obj_detail', '') dis_object_tag_count = count_dataset.value_by_name( 0, 'count_object_tag', '') dis_object_notag_count = count_dataset.value_by_name( 0, 'count_object_notag', '') sql_insert = ''' insert into dm2_import_step ("dis_query_time","dis_id","dis_directory_count","dis_file_count","dis_object_count","dis_detail_count", "dis_object_tag_count","dis_object_notag_count","dis_addtime") values (:query_time,:disid,:directory_count,:file_count,:object_count,:detail_count, :object_tag_count,:object_notag_count,now()) ''' db.execute( sql_insert, { 'query_time': dis_query_time, 'disid': CUtils.one_id(), 'directory_count': dis_directory_count, 'file_count': dis_file_count, 'object_count': dis_object_count, 'detail_count': dis_detail_count, 'object_tag_count': dis_object_tag_count, 'object_notag_count': dis_object_notag_count }) return CResult.merge_result(self.Success, '本次分析定时扫描任务成功结束!') except Exception as error: raise Exception(error.__str__())
def get_test_obj(self, file_type, test_file_with_full_path): file_info = CDMFilePathInfoEx( file_type, test_file_with_full_path, None, # storage_id None, # file_id None, # file_parent_id None, # owner_id self.DB_Server_ID_Default, None ) plugins_obj = self.create_plugins(file_info) plugins_obj.classified() plugins_obj.create_virtual_content() metadata_parser = CMetaDataParser( CUtils.one_id(), test_file_with_full_path.replace(self._test_file_parent_path, ''), file_info, plugins_obj.file_content, plugins_obj.get_information() ) return file_info, plugins_obj, metadata_parser
def parser_metadata_custom(self, parser: CMetaDataParser) -> str: """ 自定义的元数据解析, 在所有质检和其他处理之后触发 :param parser: :return: """ meta_data_json = parser.metadata.metadata_json() if meta_data_json is None: return CResult.merge_result( self.Success, '数据[{0}]的质检和空间等元数据解析完毕, 但子图层解析有误, 无法获取JSON格式的元数据! '.format( self.file_info.file_name_with_full_path, ) ) json_data_source = meta_data_json.xpath_one('datasource', None) layer_list = meta_data_json.xpath_one(self.Name_Layers, None) if layer_list is None: return CResult.merge_result( self.Success, '数据[{0}]的质检和空间等元数据解析完毕, 但子图层解析有误, 元数据中无法找到layers节点! '.format( self.file_info.file_name_with_full_path, ) ) mdb_ib_id = CFactory().give_me_db(self.file_info.db_server_id).one_value( ''' select dso_ib_id from dm2_storage_object where dsoid = :object_id ''', { 'object_id': parser.object_id } ) error_message_list = [] table = CTable() table.load_info(self.file_info.db_server_id, self.TableName_DM_Object) for layer in layer_list: layer_name = CUtils.dict_value_by_name(layer, self.Name_Name, '') if CUtils.equal_ignore_case(layer_name, ''): continue layer_alias_name = CUtils.dict_value_by_name(layer, self.Name_Description, layer_name) layer_metadata_json = CJson() layer_metadata_json.set_value_of_name('datasource', json_data_source) layer_metadata_json.set_value_of_name('layer_count', 1) layer_metadata_json.set_value_of_name('layers', [layer]) layer_metadata_text = layer_metadata_json.to_json() try: sql_find_layer_existed = ''' select dsoid as layer_id_existed from dm2_storage_object where upper(dsoobjectname) = upper(:layer_name) and dsoparentobjid = :object_id ''' layer_id_existed = CFactory().give_me_db(self.file_info.db_server_id).one_value( sql_find_layer_existed, { 'layer_name': layer_name, 'object_id': parser.object_id } ) if layer_id_existed is None: layer_id_existed = CUtils.one_id() table.column_list.reset() table.column_list.column_by_name('dsoid').set_value(layer_id_existed) table.column_list.column_by_name('dsoobjectname').set_value(layer_name) table.column_list.column_by_name('dsoobjecttype').set_value( CUtils.dict_value_by_name( self.get_information(), self.Plugins_Info_Child_Layer_Plugins_Name, '' ) ) table.column_list.column_by_name('dsodatatype').set_value( CUtils.dict_value_by_name( self.get_information(), self.Plugins_Info_Child_Layer_Data_Type, '' ) ) table.column_list.column_by_name('dsoalphacode').set_value(CUtils.alpha_text(layer_name)) table.column_list.column_by_name('dsoaliasname').set_value(layer_alias_name) table.column_list.column_by_name('dsoparentobjid').set_value(parser.object_id) table.column_list.column_by_name('dso_ib_id').set_value(mdb_ib_id) table.column_list.column_by_name('dsometadatatext').set_value(layer_metadata_text) table.column_list.column_by_name('dsometadatajson').set_value(layer_metadata_text) table.column_list.column_by_name('dsometadataparsestatus').set_value(self.ProcStatus_InQueue) table.column_list.column_by_name('dsotagsparsestatus').set_value(self.ProcStatus_InQueue) table.column_list.column_by_name('dsodetailparsestatus').set_value(self.ProcStatus_InQueue) result = table.save_data() if not CResult.result_success(result): error_message_list.append( '图层[{0}]的创建过程出现错误, 详细信息为: {1}'.format( layer_name, CResult.result_message(result) ) ) except Exception as error: error_message_list.append('图层[{0}]的创建过程出现错误, 详细信息为: {1}'.format(layer_name, error.__str__())) if len(error_message_list) > 0: return CResult.merge_result( self.Failure, '数据[{0}]的质检和空间等元数据解析完毕, 但子图层解析有误, 详细情况如下: \n{1}'.format( self.file_info.file_name_with_full_path, CUtils.list_2_str(error_message_list, '', '\n', '', True) ) ) else: return CResult.merge_result( self.Success, '数据[{0}]的自定义元数据解析完毕! '.format( self.file_info.file_name_with_full_path, ) )
def process_mission(self, dataset) -> str: """ :param dataset: :return: """ ds_ib_id = dataset.value_by_name(0, 'query_ib_id', '') ds_storage_id = dataset.value_by_name(0, 'query_storage_id', '') ds_storage_title = dataset.value_by_name(0, 'query_storage_title', '') ds_storage_root_dir = dataset.value_by_name(0, 'query_rootpath', '') ds_ib_directory_name = dataset.value_by_name(0, 'query_ib_relation_dir', '') ds_ib_directory_id = dataset.value_by_name(0, 'query_ib_relation_dir_id', '') ds_ib_batch_no = dataset.value_by_name(0, 'query_ib_batchno', '') # 按需要再开启 # ds_ib_option = CUtils.any_2_str(dataset.value_by_name(0, 'query_ib_option', '')) if not CUtils.equal_ignore_case(ds_ib_directory_name, ''): CLogger().debug('正在入库的是存储[{0}]下的目录[{1}]'.format( ds_storage_title, CFile.join_file(ds_storage_root_dir, ds_ib_directory_name))) else: CLogger().debug('正在入库的是存储[{0}]下的目录[{1}]'.format( ds_storage_title, ds_storage_root_dir)) try: ds_ib_information_updated = False # 检查目录名格式并自动修正 if not CUtils.equal_ignore_case(ds_ib_directory_name, ''): ds_ib_directory = CFile.unify( CFile.add_prefix(ds_ib_directory_name)) if not CUtils.equal_ignore_case(ds_ib_directory, ds_ib_directory_name): ds_ib_directory_name = ds_ib_directory ds_ib_information_updated = True if CUtils.equal_ignore_case(ds_ib_batch_no, ''): ds_ib_batch_no = CFactory().give_me_db( self.get_mission_db_id()).seq_next_value( self.Seq_Type_Date_AutoInc) ds_ib_information_updated = True if CUtils.equal_ignore_case(ds_ib_directory_id, ''): ds_ib_directory_id = CUtils.one_id() ds_ib_information_updated = True if ds_ib_information_updated: self.correct_ib_information(ds_ib_id, ds_ib_directory_name, ds_ib_batch_no, ds_ib_directory_id) if not CUtils.equal_ignore_case(ds_ib_directory_name, ''): ib_full_directory = CFile.join_file(ds_storage_root_dir, ds_ib_directory_name) else: ib_full_directory = ds_storage_root_dir self.clear_anything_in_directory(ds_ib_id) metadata_rule_file_name = CFile.join_file( ib_full_directory, self.FileName_MetaData_Rule) metadata_rule_content = '' if CFile.file_or_path_exist(metadata_rule_file_name): try: metadata_rule_content = CXml.file_2_str( metadata_rule_file_name) CLogger().debug('在目录[{0}]下发现元数据规则文件, 它的内容为[{1}]'.format( ib_full_directory, metadata_rule_content)) except Exception as error: result = CResult.merge_result( self.Failure, '在目录[{0}]下发现元数据规则文件, 但它的格式不合法, 详细错误为: [{1}]'.format( ib_full_directory, error.__str__())) self.update_inbound_qi_result(ds_ib_id, result) return result path_obj = CDMPathInfo(self.FileType_Dir, ib_full_directory, ds_storage_id, ds_ib_directory_id, ds_storage_id, None, self.get_mission_db_id(), metadata_rule_content) if path_obj.white_black_valid(): path_obj.db_check_and_update(ds_ib_id) result = CResult.merge_result( self.Success, '目录[{0}]的入库质检任务创建成功, 系统正在质检, 请稍后...'.format( ib_full_directory)) else: result = CResult.merge_result( self.Failure, '目录[{0}]未通过黑白名单检验, 不允许入库! '.format(ib_full_directory)) self.update_inbound_qi_result(ds_ib_id, result) return result except Exception as error: result = CResult.merge_result( self.Failure, '目录[{0}]的入库质检任务创建过程出现错误, 详细错误为: [{1}]'.format( CFile.join_file(ds_storage_root_dir, ds_ib_directory_name), error.__str__())) self.update_inbound_qi_result(ds_ib_id, result) return result
def __init__(self, target_name): self.__target_name__ = target_name self.__temp_subpath_name__ = CUtils.one_id() self.__work_root_dir__ = CFile.join_file(CSys.get_work_root_dir(), self.__temp_subpath_name__) self.__view_root_dir__ = CSys.get_metadata_view_root_dir()
def db_file2object(self): """ :return: """ db_object_confirm = self.ds_file_or_path.value_by_name(0, 'dsf_object_confirm', self.Object_Confirm_IUnKnown) db_object_id = CUtils.one_id() if (db_object_confirm == self.Object_Confirm_IKnown) or (db_object_confirm == self.Object_Confirm_Maybe): db_object_size = self.ds_file_or_path.value_by_name(0, 'dsffilesize', 0) db_path_modify_time = self.ds_file_or_path.value_by_name(0, 'dsffilemodifytime', '') if CUtils.equal_ignore_case(CUtils.any_2_str(db_path_modify_time), CUtils.any_2_str(self.file_modify_time)) and ( db_object_size == self.file_size): CLogger().info('文件[{0}]的大小和最后修改时间, 和库中登记的都没有变化, 对象识别将被忽略! '.format(self.file_name_with_full_path)) return else: # 删除对象记录, 清理对象字段 db_object_id = self.ds_file_or_path.value_by_name(0, 'dsf_object_id', '') db_object_type = self.ds_file_or_path.value_by_name(0, 'dsf_object_type', '') CLogger().debug( '系统发现文件[{0}]的大小或最后修改时间有变化, 将删除它关联的对象{1}.{2}, 重新识别'.format( self.file_main_name, db_object_type, db_object_id ) ) self.db_delete_object_by_id(db_object_id) object_confirm = self.Object_Confirm_IUnKnown object_name = None object_type = None classified_obj = CPluginsMng.plugins_classified(self) if classified_obj is not None: object_confirm = classified_obj.classified_object_confirm() object_name = classified_obj.classified_object_name() object_type = classified_obj.get_id() if (object_confirm == self.Object_Confirm_IUnKnown) or (object_confirm == self.Object_Confirm_IKnown_Not): sql_update_file_object = ''' update dm2_storage_file set dsf_object_confirm = :dsf_object_confirm, dsf_object_id = null, dsf_object_type = null , dsffilevalid = -1, dsffilesize = :dsfFileSize, dsffilemodifytime = :fileModifyTime where dsfid = :dsfid ''' CFactory().give_me_db(self.db_server_id).execute( sql_update_file_object, { 'dsfid': self.my_id, 'dsf_object_confirm': object_confirm, 'dsfFileSize': self.file_size, 'fileModifyTime': CUtils.any_2_str(self.file_modify_time) } ) else: sql_insert_object = ''' insert into dm2_storage_object(dsoid, dsoobjectname, dsoobjecttype, dsodatatype, dsoalphacode, dsoaliasname, dsoparentobjid, dso_ib_id) values(:dsoid, :dsoobjectname, :dsoobjecttype, :dsodatatype, :dsoalphacode, :dsoaliasname, :dsoparentobjid, :dso_ib_id) ''' new_dso_id = db_object_id sql_update_file_object = ''' update dm2_storage_file set dsf_object_confirm = :dsf_object_confirm, dsf_object_id = :dsf_object_id, dsf_object_type = :dsf_object_type , dsffilevalid = -1, dsffilesize = :dsfFileSize, dsffilemodifytime = :fileModifyTime where dsfid = :dsfid ''' engine = CFactory().give_me_db(self.db_server_id) session = engine.give_me_session() try: params = dict() params['dsoid'] = new_dso_id params['dsoobjectname'] = object_name params['dsoobjecttype'] = object_type params['dsodatatype'] = self.FileType_File params['dsoalphacode'] = CUtils.alpha_text(object_name) params['dsoaliasname'] = object_name params['dsoparentobjid'] = self.owner_obj_id params['dso_ib_id'] = self.ds_file_or_path.value_by_name(0, 'dsf_ib_id', None) engine.session_execute(session, sql_insert_object, params) params = dict() params['dsfid'] = self.my_id params['dsf_object_confirm'] = object_confirm params['dsf_object_id'] = new_dso_id params['dsf_object_type'] = object_type params['dsfFileSize'] = self.file_size params['fileModifyTime'] = CUtils.any_2_str(self.file_modify_time) engine.session_execute(session, sql_update_file_object, params) engine.session_commit(session) except Exception as error: CLogger().warning('数据库处理出现异常, 错误信息为: {0}'.format(error.__str__())) engine.session_rollback(session) finally: engine.session_close(session)
def execute(self) -> str: dm2_storage_list = CFactory().give_me_db(self.get_mission_db_id()).all_row( ''' SELECT dstid, dsttitle, dstunipath, dstscanlasttime, dstlastmodifytime, dstotheroption, dstotheroption -> 'mount' ->> 'username' AS dm2_username, dstotheroption -> 'mount' ->> 'password' AS dm2_password FROM dm2_storage WHERE dstscanstatus = 0 AND ( dsttype = 'mix' OR dsttype = 'core' ) ''' ) gis_server_list = CFactory().give_me_db(self.get_mission_db_id()).all_row( ''' select dgsid, dgstitle, dgslastmodifytime from dp_gis_server ''' ) if dm2_storage_list.is_empty(): return CResult.merge_result(CResult.Success, '本次没有需要检查的入库存储!') if gis_server_list.is_empty(): return CResult.merge_result(CResult.Success, '本次没有需要检查的服务!') hostname = settings.application.xpath_one('data2service.system.connect.host', None) port = settings.application.xpath_one('data2service.system.connect.port', None) username = settings.application.xpath_one('data2service.system.connect.username', None) password = settings.application.xpath_one('data2service.system.connect.password', None) for data_index in range(dm2_storage_list.size()): storage_id = dm2_storage_list.value_by_name(data_index, 'dstid', '') storage_title = dm2_storage_list.value_by_name(data_index, 'dsttitle', '') storage_dstscanlasttime = dm2_storage_list.value_by_name(data_index, 'dstscanlasttime', None) storage_dstunipath = dm2_storage_list.value_by_name(data_index, 'dstunipath', '') storage_username = dm2_storage_list.value_by_name(data_index, 'dm2_username', '') storage_password = dm2_storage_list.value_by_name(data_index, 'dm2_password', '') CLogger().debug('正在检查和启动存储[{0}.{1}]的定时扫描...'.format(storage_id, storage_title)) for data_index in range(gis_server_list.size()): server_id = gis_server_list.value_by_name(data_index, 'dgsid', '') server_title = gis_server_list.value_by_name(data_index, 'dgstitle', '') server_dgslastmodifytime = gis_server_list.value_by_name(data_index, 'dgslastmodifytime', '') CLogger().debug('正在检查和启动存储[{0}.{1}]的定时扫描...'.format(server_id, server_title)) if storage_dstscanlasttime is not None: gis_storage_list = CFactory().give_me_db(self.get_mission_db_id()).all_row( ''' select dgsid, dgsserverid, dgsstorageid, dgsstoragelastcfgtime from dp_gis_storage where dgsstorageid = :storage_id and dgsserverid = :server_id ''', {'storage_id': storage_id, 'server_id': server_id} ) gis_storage_id = gis_storage_list.value_by_name(data_index, 'dgsid', '') # gis_storage_server_id = gis_storage_list.value_by_name(data_index, 'dgsserverid', '') # gis_storage_storage_id = gis_storage_list.value_by_name(data_index, 'dgsstorageid', '') gis_storage_dgslastmodifytime = gis_storage_list.value_by_name(data_index, 'dgslastmodifytime', '') if gis_storage_list.is_empty(): gis_storage_id = CUtils.one_id() database = CFactory().give_me_db(self.get_mission_db_id()) database.execute( ''' insert into dp_gis_storage( dgsid, dgsserverid, dgsstorageid, dgsstoragelastcfgtime, dgsstatus, dgsmountprocid, dgsmounturl, dgsmountmemo, dgsmemo, dgsdefinetype, dgsdefine, dgslastmodifytime ) VALUES( :dgsid, :dgsserverid, :dgsstorageid, :dgsstoragelastcfgtime, :dgsstatus, :dgsmountprocid, :dgsmounturl, :dgsmountmemo, :dgsmemo, :dgsdefinetype, :dgsdefine, :dgslastmodifytime ) ''', { 'dgsid': gis_storage_id, 'dgsserverid': server_id, 'dgsstorageid': storage_id, 'dgsstoragelastcfgtime': None, 'dgsstatus': 2, 'dgsmountprocid': None, 'dgsmounturl': None, 'dgsmountmemo': None, 'dgsmemo': None, 'dgsdefinetype': None, 'dgsdefine': None, 'dgslastmodifytime': None } ) CLogger().info("--------------begin mount---------------") try: mountcmd = "mount -t cifs {0} {1} -o username={2},password={3} " # 挂接 fstab_line = "echo '{0} {1} cifs defaults,username={2},password={3} 0 0' >> /etc/fstab" # 挂接 client = paramiko.SSHClient() # 启动ssh客户端 client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # 连接linux服务器 client.connect(hostname=hostname, port=port, username=username, password=password) # 创建挂载点 win_path = storage_dstunipath.replace('\\', '/') linux_path = '/mnt/store_' + storage_id if not os.path.exists(linux_path): CLogger().info("to make dir " + linux_path) cmdin, cmdout, cmderr = client.exec_command("mkdir " + linux_path) if cmderr != None: error_msg = cmderr.read().decode('utf-8') if len(error_msg) > 1: CLogger().error(error_msg) # check if the linux_path mounted 看是否挂接 CLogger().info("check if the linux_path mounted " + linux_path) cmdin, cmdout, cmderr = client.exec_command('mount') if cmdout != None: mount_info = cmdout.read().decode('utf-8') if linux_path in mount_info: CLogger().info("the linux_path mounted " + linux_path) continue CLogger().info("to " + mountcmd.format(win_path, linux_path, storage_username, storage_password)) # 开始挂接 cmdin, cmdout, cmderr = client.exec_command( mountcmd.format(win_path, linux_path, storage_username, storage_password)) if cmderr != None: error_msg = cmderr.read().decode('utf-8') if len(error_msg) > 1: CLogger().error(error_msg) message = error_msg # echo into fstab CLogger().info(fstab_line.format(win_path, linux_path, username, password)) # 向fstab中写配置文件 cmdin, cmdout, cmderr = client.exec_command( fstab_line.format(win_path, linux_path, username, password)) message = 'mount success' CLogger().info('mount success') # 更新挂接状态 lastmodifytime = CTime.now() CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_gis_storage set dgsstatus = 1, dgsserverid = :server_id, dgsmounturl = :linux_path, dgslastmodifytime = :lastmodifytime, dgsmountmemo = :message where dgsid = :gis_storage_id ''', {'server_id': server_id, 'linux_path': linux_path, 'gis_storage_id': gis_storage_id, 'lastmodifytime': lastmodifytime, 'message': message} ) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage set dstlastmodifytime = :lastmodifytime where dstid = :storage_id ''', {'storage_id': storage_id, 'lastmodifytime': lastmodifytime} ) except Exception as error: message = 'mount服务[{0}.{1}]的状态过程出现异常! 错误信息为: {2}'.format(server_id, server_title, error.__str__()) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_gis_storage set dgsstatus = 3, dgsmountmemo = :message where dgsid = :gis_storage_id ''', {'gis_storage_id': gis_storage_id, 'message': message} ) CLogger().error(message) else: if server_dgslastmodifytime is not None: if CUtils.equal_ignore_case(storage_dstscanlasttime, gis_storage_dgslastmodifytime): pass else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_gis_storage set dgsstatus = 2 where dgsid = :gis_storage_id ''', {'gis_storage_id': gis_storage_id} ) try: mountcmd = "mount -t cifs {0} {1} -o username={2},password={3} " # 挂接 fstab_line = "echo '{0} {1} cifs defaults,username={2},password={3} 0 0' >> /etc/fstab" # 挂接 client = paramiko.SSHClient() # 启动ssh客户端 client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # 连接linux服务器 client.connect(hostname=hostname, port=port, username=username, password=password) # 创建挂载点 win_path = storage_dstunipath.replace('\\', '/') linux_path = '/mnt/store_' + storage_id if not os.path.exists(linux_path): CLogger().info("to make dir " + linux_path) cmdin, cmdout, cmderr = client.exec_command("mkdir " + linux_path) if cmderr != None: error_msg = cmderr.read().decode('utf-8') if len(error_msg) > 1: CLogger().debug(error_msg) # self.report_error(start_response, error_msg) # check if the linux_path mounted 看是否挂接 CLogger().info("check if the linux_path mounted " + linux_path) cmdin, cmdout, cmderr = client.exec_command('mount') if cmdout != None: mount_info = cmdout.read().decode('utf-8') if linux_path in mount_info: CLogger().info("the linux_path mounted " + linux_path) continue CLogger().info("to " + mountcmd.format(win_path, linux_path, storage_username, storage_password)) # 开始挂接 cmdin, cmdout, cmderr = client.exec_command( mountcmd.format(win_path, linux_path, storage_username, storage_password)) if cmderr != None: error_msg = cmderr.read().decode('utf-8') if len(error_msg) > 1: CLogger().error(error_msg) # echo into fstab CLogger().info(fstab_line.format(win_path, linux_path, username, password)) # 向fstab中写配置文件 cmdin, cmdout, cmderr = client.exec_command( fstab_line.format(win_path, linux_path, username, password)) message = 'mount success' # 更新挂接状态 lastmodifytime = CTime.now() CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_gis_storage set dgsstatus = 1, dgsserverid = :server_id, dgsmounturl = :linux_path, dgslastmodifytime = :lastmodifytime, dgsmountmemo = :message where dgsid = :gis_storage_id ''', {'server_id': server_id, 'linux_path': linux_path, 'gis_storage_id': gis_storage_id, 'lastmodifytime': lastmodifytime, 'message': message} ) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage set dstlastmodifytime = :lastmodifytime where dstid = :storage_id ''', {'storage_id': storage_id, 'lastmodifytime': lastmodifytime} ) CLogger().info(message) except Exception as error: message = 'mount服务[{0}.{1}]的状态过程出现异常! 错误信息为: {2}'.format(server_id, server_title, error.__str__()) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_gis_storage set dgsstatus = 3, dgsmountmemo = :message where dgsid = :gis_storage_id ''', {'gis_storage_id': gis_storage_id, 'message': message} ) CLogger().debug(message) return CResult.merge_result(CResult.Success, '服务mount监控任务执行成功结束!')
def process_mission(self, dataset) -> str: """ 详细算法复杂, 参见readme.md中[##### 服务发布调度]章节 :param dataset: :return: """ layer_id = dataset.value_by_name(0, 'dpid', '') layer_name = dataset.value_by_name(0, 'dplayer_id', '') layer_title = dataset.value_by_name(0, 'dplayer_name', '') layer_service_name = dataset.value_by_name(0, 'dpname', '') layer_service_title = dataset.value_by_name(0, 'dptitle', '') layer_object = CUtils.any_2_str(dataset.value_by_name(0, 'dplayer_object', None)) CLogger().debug( '即将更新服务[{0}.{1}]的图层[{2}.{3}.{4}]...'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title ) ) object_da_result = CJson() try: self.__layer_init(layer_id) object_da_result.load_json_text(layer_object) object_catalog = CMDObjectCatalog(self.get_mission_db_id()) object_dataset = object_catalog.search( self.ModuleName_Data2Service, object_da_result ) if object_dataset.is_empty(): self.__layer_file_empty(layer_id) result = CResult.merge_result( self.Success, '服务[{0}.{1}]的图层[{2}.{3}.{4}]检查更新成功完成'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title ) ) return result CLogger().debug( '服务[{0}.{1}]的图层[{2}.{3}.{4}], 发现[{5}]个符合要求的数据对象!'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title, object_dataset.size() ) ) for data_index in range(object_dataset.size()): object_id = object_dataset.value_by_name(data_index, 'object_id', '') object_name = object_dataset.value_by_name(data_index, 'object_name', '') CLogger().debug( '服务[{0}.{1}]的图层[{2}.{3}.{4}], 发现[{5}]个符合要求的数据对象!\n第[{6}]个可用的对象为[{7}.{8}]'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title, object_dataset.size(), data_index, object_id, object_name ) ) layer_file_id = self.__layer_object_id(layer_id, object_id) if layer_file_id is None: layer_file_id = CUtils.one_id() object_full_name = object_catalog.object_full_name_by_id(object_id) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' insert into dp_v_qfg_layer_file( dpdf_id, dpdf_layer_id, dpdf_group_id, dpdf_object_id , dpdf_object_fullname, dpdf_object_title, dpdf_object_size, dpdf_object_date) values(:layer_file_id, :layer_id, :group_id, :object_id , :object_fullname, :object_title, :object_size, :object_date) ''', { 'object_id': object_id, 'object_title': object_name, 'object_fullname': object_full_name, 'object_date': object_dataset.value_by_name(data_index, 'object_lastmodifytime', None), 'object_size': object_dataset.value_by_name(data_index, 'object_size', 0), 'layer_file_id': layer_file_id, 'layer_id': layer_id, 'group_id': layer_id } ) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_v_qfg_layer_file set dpdf_object_size = :object_size , dpdf_object_date = :object_date where dpdf_id = :layer_file_id ''', { 'object_date': object_dataset.value_by_name(data_index, 'object_lastmodifytime', None), 'object_size': object_dataset.value_by_name(data_index, 'object_size', 0), 'layer_file_id': layer_file_id } ) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_v_qfg_layer_file set dpdf_object_fp = MD5( coalesce(dpdf_object_title, '')||'-'|| coalesce(dpdf_object_size, 0)::text||'-'|| coalesce(dpdf_object_date, now())::text ) where dpdf_id = :layer_file_id ''', {'layer_file_id': layer_file_id} ) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_v_qfg_layer_file set dpdf_processtype = :process_type where dpdf_id = :layer_file_id and dpdf_object_fp = dpdf_object_fp_lastdeploy ''', {'layer_file_id': layer_file_id, 'process_type': self.ProcType_Same} ) CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dp_v_qfg_layer_file set dpdf_processtype = :process_type where dpdf_id = :layer_file_id and ( dpdf_object_fp <> dpdf_object_fp_lastdeploy or dpdf_object_fp_lastdeploy is null) ''', {'layer_file_id': layer_file_id, 'process_type': self.ProcType_Update} ) self.__layer_clear(layer_id) self.__layer_re_calc_group(layer_id) result = CResult.merge_result( self.Success, '服务[{0}.{1}]的图层[{2}.{3}.{4}]检查更新成功完成'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title ) ) self.__update_layer_update_result(layer_id, result) return result except Exception as error: result = CResult.merge_result( self.Failure, '服务[{0}.{1}]的图层[{2}.{3}.{4}]检查更新失败, 错误原因为: {5}'.format( layer_service_name, layer_service_title, layer_id, layer_name, layer_title, error.__str__() ) ) self.__update_layer_update_result(layer_id, result) return result
def __prepare_update_data(self) -> list: sql_list = [] temp_helper_code_list = [] sql_update_set = '' sql_update_params = dict() for column_index in range(self.__column_list.size()): column = self.__column_list.column_by_index(column_index) if column.is_primary_key or (column.value is None): continue try: column_type = self.__database.db_column_type_by_name( column.db_column_type) column_value_type = CUtils.dict_value_by_name( column.value, self.Name_Type, self.DataValueType_SQL) column_value_as_text = CUtils.any_2_str( CUtils.dict_value_by_name(column.value, self.Name_Text, '')) # 如果值为原生sql, 则不管字段类型为何值, 都直接把sql存入insert_data语句中 if CUtils.equal_ignore_case(column_value_type, self.DataValueType_SQL): column_update_set = '{0}={1}'.format( column.name, column_value_as_text) elif CUtils.equal_ignore_case(column_value_type, self.DataValueType_File): column_update_set = '{0}={1}'.format( column.name, ':{0}'.format(self.__param_name(column.name))) self.__database.file2param(sql_update_params, self.__param_name(column.name), column_value_as_text) else: if CUtils.equal_ignore_case( column_type.set_value_method, self.DB_Column_Set_Method_Function): if len(column_value_as_text ) > column_type.function_param_max_size >= 0: column_data_id = CUtils.one_id() temp_helper_code_list.append(column_data_id) sql_exchange = ''' insert into ro_global_spatialhandle(code, data) values(:code, :data) ''' param_exchange = { 'code': column_data_id, 'data': column_value_as_text } sql_list.append((sql_exchange, param_exchange)) column_update_set = '{0}={1}'.format( column.name, CUtils.replace_placeholder( column_type.set_value_template, dict({ self.Name_Value: "(select data from ro_global_spatialhandle where code = '{0}')" .format(column_data_id) }))) else: if column_type.function_param_quoted: column_value_as_text = CUtils.quote( column_value_as_text) column_update_set = '{0}={1}'.format( column.name, CUtils.replace_placeholder( column_type.set_value_template, dict({ self.Name_Value: column_value_as_text }))) elif CUtils.equal_ignore_case( column_type.set_value_method, self.DB_Column_Set_Method_Geometry): if len(column_value_as_text ) > column_type.function_param_max_size >= 0: column_data_id = CUtils.one_id() temp_helper_code_list.append(column_data_id) sql_exchange = ''' insert into ro_global_spatialhandle(code, data) values(:code, :data) ''' param_exchange = { 'code': column_data_id, 'data': column_value_as_text } sql_list.append((sql_exchange, param_exchange)) column_update_set = '{0}={1}'.format( column.name, CUtils.replace_placeholder( column_type.set_value_template, dict({ self.Name_Value: "(select data from ro_global_spatialhandle where code = '{0}')" .format(column_data_id), self.Name_Srid: CUtils.dict_value_by_name( column.value, self.Name_Srid, settings.application.xpath_one( self.Path_Setting_Spatial_Srid, self.SRID_WGS84)) }))) else: if column_type.function_param_quoted: column_value_as_text = CUtils.quote( column_value_as_text) column_update_set = '{0}={1}'.format( column.name, CUtils.replace_placeholder( column_type.set_value_template, dict({ self.Name_Value: column_value_as_text, self.Name_Srid: CUtils.dict_value_by_name( column.value, self.Name_Srid, settings.application.xpath_one( self.Path_Setting_Spatial_Srid, self.SRID_WGS84)) }))) else: # if CUtils.equal_ignore_case(column_type.set_value_method, self.DB_Column_Set_Method_Param): column_update_set = '{0}={1}'.format( column.name, ':{0}'.format(self.__param_name(column.name))) sql_update_params[self.__param_name( column.name)] = column_value_as_text sql_update_set = CUtils.str_append(sql_update_set, column_update_set, ', ') except Exception as error: print(error.__str__()) raise sql_where = '' for column_index in range(self.__column_list.size()): column = self.__column_list.column_by_index(column_index) if column.is_primary_key: sql_where, sql_update_params = self.__prepare_where_condition( column, sql_where, sql_update_params) if not CUtils.equal_ignore_case(sql_where, ''): sql_where = CUtils.str_append(' where ', sql_where, ' ') sql_update = 'update {0} set {1} {2}'.format(self.__table_name, sql_update_set, sql_where) sql_list.append((sql_update, sql_update_params)) for temp_helper_code in temp_helper_code_list: sql_list.append( ("delete from ro_global_spatialhandle where code = '{0}'". format(temp_helper_code), None)) return sql_list
def sync(self, object_access, obj_id, obj_name, obj_type, quality) -> str: if CUtils.equal_ignore_case(self.DataAccess_Pass, object_access): sql_query = ''' SELECT ddad_id, ddad_title, ddad_datatype ->> 'dsodid' AS dsodid, ddad_datatype ->> 'dsodtype' AS dsodtype, ddad_datatype ->> 'dsodgroup' AS dsodgroup, ddad_datatype ->> 'dsodcatalog' AS dsodcatalog, ddad_startdate, ddad_enddate, ddad_spatial FROM dp_dm2_auto_deploy ''' db_id = self._db_id dataset = CFactory().give_me_db(db_id).all_row(sql_query) sql_query = ''' SELECT dso_time ->> 'start_time' AS dm2_start_time, dso_time ->> 'end_time' AS dm2_end_time, dso_geo_wgs84 AS dpservicegeom, dso_prj_wkt AS dpproject FROM dm2_storage_object WHERE dsoid = '{0}' '''.format(obj_id) objectset = CFactory().give_me_db(db_id).one_row(sql_query) dm2_start_time = objectset.value_by_name(0, 'dm2_start_time', None) dm2_end_time = objectset.value_by_name(0, 'dm2_end_time', None) # dm2_start_time = CTime.from_datetime_str(dm2_start_time, '%Y-%m-%d %H:%M:%S') # dm2_end_time = CTime.from_datetime_str(dm2_end_time, '%Y-%m-%d %H:%M:%S') dm2_dpproject = objectset.value_by_name(0, 'dpproject', None) dm2_dpservicegeom = objectset.value_by_name( 0, 'dpservicegeom', None) sql_query = ''' SELECT dsodid AS dsodid, dsodtype AS dsodtype, dsodgroup AS dsodgroup, dsodcatalog AS dsodcatalog FROM dm2_storage_object_def WHERE dsodid = '{0}' '''.format(obj_type) object_def_set = CFactory().give_me_db(db_id).one_row(sql_query) dsodid = object_def_set.value_by_name(0, 'dsodid', '') dsodtype = object_def_set.value_by_name(0, 'dsodtype', '') dsodgroup = object_def_set.value_by_name(0, 'dsodgroup', '') dsodcatalog = object_def_set.value_by_name(0, 'dsodcatalog', '') obj_type_list = [] obj_type_list.append(dsodid) obj_type_list.append(dsodtype) obj_type_list.append(dsodgroup) obj_type_list.append(dsodcatalog) sql_query = ''' SELECT * FROM dp_v_qfg WHERE dptitle = '{0}' '''.format(obj_id) dp_v_qfg_set = CFactory().give_me_db(db_id).all_row(sql_query) if not dataset.is_empty(): for data_index in range(dataset.size()): ddad_dsodid = dataset.value_by_name( data_index, 'dsodid', '') ddad_dsodtype = dataset.value_by_name( data_index, 'dsodtype', '') ddad_dsodgroup = dataset.value_by_name( data_index, 'dsodgroup', '') ddad_dsodcatalog = dataset.value_by_name( data_index, 'dsodcatalog', '') if ddad_dsodid is not None: ddad_datatype = ddad_dsodid elif ddad_dsodtype is not None: ddad_datatype = ddad_dsodtype elif ddad_dsodgroup is not None: ddad_datatype = ddad_dsodgroup elif ddad_dsodcatalog is not None: ddad_datatype = ddad_dsodcatalog else: ddad_datatype = None # ddad_id = dataset.value_by_name(data_index, 'ddad_id', '') ddad_startdate = dataset.value_by_name( data_index, 'ddad_startdate', None) # ddad_startdate = CTime.from_datetime_str(ddad_startdate, '%Y-%m-%d %H:%M:%S') ddad_enddate = dataset.value_by_name( data_index, 'ddad_enddate', None) # ddad_enddate = CTime.from_datetime_str(ddad_enddate, '%Y-%m-%d %H:%M:%S') # ddad_spatial = dataset.value_by_name(data_index, 'ddad_spatial', '') if ddad_datatype in obj_type_list \ or ddad_startdate > dm2_start_time \ or ddad_enddate < dm2_end_time: if dp_v_qfg_set.is_empty(): dpid = CUtils.one_id() layer_dpid = CUtils.one_id() CFactory().give_me_db(db_id).execute( ''' insert into dp_v_qfg( dpid, dpstatus, dpprocesstype, dpschemaid, dptitle, dpname, dpdeploydir, dpproject, dpservicetype, dpservicegeom ) values( :dpid, :dpstatus, :dpprocesstype, :dpschemaid, :dptitle, :dpname, :dpdeploydir, :dpproject, :dpservicetype, :dpservicegeom ) ''', { 'dpid': dpid, 'dpstatus': 5, 'dpprocesstype': 'new', 'dpschemaid': '', 'dptitle': obj_id, 'dpname': obj_name, 'dpdeploydir': None, 'dpproject': dm2_dpproject, 'dpservicetype': None, 'dpservicegeom': dm2_dpservicegeom }) CFactory().give_me_db(self._db_id).execute( ''' insert into dp_v_qfg_layer( dpid, dpservice_id, dplayerschema_id, dpprocesstype, dplayer_id, dplayer_name, dplayer_datatype, dplayer_resultfields ) values( :dpid, :dpservice_id, :dplayerschema_id, :dpprocesstype, :dplayer_id, :dplayer_name, :dplayer_datatype, :dplayer_resultfields ) ''', { 'dpid': layer_dpid, 'dpservice_id': dpid, 'dplayerschema_id': '', 'dpprocesstype': 'new', 'dplayer_id': obj_id, 'dplayer_name': obj_name, 'dplayer_datatype': 'Raster', 'dplayer_resultfields': '' }) CFactory().give_me_db(self._db_id).execute( ''' insert into dp_v_qfg_layer_file( dpdf_id, dpdf_layer_id, dpdf_object_id, dpdf_service_filepath, dpdf_processtype, dpdf_publish_filename ) values( :dpdf_id, :dpdf_layer_id, :dpdf_object_id, :dpdf_service_filepath, :dpdf_processtype, :dpdf_publish_filename ) ''', { 'dpdf_id': CUtils.one_id(), 'dpdf_layer_id': layer_dpid, 'dpdf_object_id': obj_id, 'dpdf_service_filepath': None, 'dpdf_processtype': 'new', 'dpdf_publish_filename': obj_name }) else: CFactory().give_me_db(db_id).execute( ''' update dp_v_qfg set dpstatus = 5, dpprocesstype = :dpprocesstype, dpproject = :dm2_dpproject, dpservicetype = dpservicetype, dpservicegeom = :dm2_dpservicegeom where dptitle = :obj_id ''', { 'dpprocesstype': 'updata', 'dm2_dpproject': dm2_dpproject, 'dpservicetype': None, 'dm2_dpservicegeom': dm2_dpservicegeom, 'obj_id': obj_id }) CFactory().give_me_db(db_id).execute( ''' update dp_v_qfg_layer set dpprocesstype = :dpprocesstype where dplayer_id = :obj_id ''', { 'dpprocesstype': 'updata', 'obj_id': obj_id }) CFactory().give_me_db(db_id).execute( ''' update dp_v_qfg_layer_file set dpdf_service_filepath = :dpdf_service_filepath, dpdf_processtype = :dpprocesstype, dpdf_publish_filename = :obj_name where dpdf_object_id = :obj_id ''', { 'dpdf_service_filepath': None, 'dpprocesstype': 'updata', 'obj_name': obj_name, 'obj_id': obj_id }) else: message = '没有对应的规则, 直接通过!' result = CResult.merge_result(self.Success, message) return result else: pass