def custom_init(self): """ 自定义初始化方法 :return: """ super().custom_init() engine = CFactory().give_me_db(self.db_server_id) if self.my_id is None: self._ds_file_or_path = engine.one_row( 'select dsdid, dsdparentid, dsddirectory, dsddirtype, dsddirectoryname, dsd_object_type, \ dsd_object_confirm, dsd_object_id, dsd_directory_valid, dsdpath, dsddircreatetime, \ dsddirlastmodifytime, dsdparentobjid, dsdscanrule, dsd_ib_id from dm2_storage_directory \ where dsdstorageid = :dsdStorageID and dsddirectory = :dsdDirectory', { 'dsdStorageID': self.storage_id, 'dsdDirectory': self.file_name_with_rel_path }) if not self.ds_file_or_path.is_empty(): self.my_id = self.ds_file_or_path.value_by_name( 0, 'dsdid', None) if self.my_id is None: self.my_id = CUtils.one_id() else: self._ds_file_or_path = engine.one_row( 'select dsdid, dsdparentid, dsddirectory, dsddirtype, dsddirectoryname, dsd_object_type, \ dsd_object_confirm, dsd_object_id, dsd_directory_valid, dsdpath, dsddircreatetime, \ dsddirlastmodifytime, dsdparentobjid, dsdscanrule, dsd_ib_id from dm2_storage_directory \ where dsdid = :dsdID', {'dsdid': self.my_id})
def should_stop(self) -> bool: if self.__application_id is None: sql = ''' select scmid from sch_center_mission where (scmcommand <> '{0}' or scmstatus <> {1}) and ( (scmcenterid is null) or (scmcenterid in (select scid from sch_center where scserver is null)) ) limit 1 '''.format(self.CMD_SHUTDOWN, self.Status_Finish) else: sql = ''' select scmid from sch_center_mission where (scmcommand <> '{0}' or scmstatus <> {1}) and (scmcenterid in (select scid from sch_center where scserver = '{2}')) limit 1 '''.format(self.CMD_SHUTDOWN, self.Status_Finish, CUtils.any_2_str(self.__application_id)) try: factory = CFactory() db = factory.give_me_db() return not db.if_exists(sql) except: return True
def get_mission_info(self): mission_flag = CUtils.one_id() mission_seize_sql = self.get_mission_seize_sql() mission_info_sql = self.get_mission_info_sql() if mission_seize_sql is None: return None if mission_info_sql is None: return None mission_seize_sql = mission_seize_sql.replace( self.SYSTEM_NAME_MISSION_ID, mission_flag) mission_info_sql = mission_info_sql.replace( self.SYSTEM_NAME_MISSION_ID, mission_flag) try: factory = CFactory() db = factory.give_me_db(self.get_mission_db_id()) db.execute(mission_seize_sql) except: CLogger().debug( '任务抢占查询语句有误, 请修正! 详细错误信息为: {0}'.format(mission_seize_sql)) return CDataSet() try: return db.one_row(mission_info_sql) except: CLogger().debug( '任务抢占查询语句有误, 请修正! 详细错误信息为: {0}'.format(mission_info_sql)) return CDataSet()
def get_plugins_instance_by_object_id(cls, db_id, object_id): """ 根据对应object_id获取识别的插件对象 """ sql_query = ''' SELECT dsoobjecttype, dsodatatype FROM dm2_storage_object WHERE dsoid = '{0}' '''.format(object_id) dataset = CFactory().give_me_db(db_id).one_row(sql_query) object_plugin_file_main_name = dataset.value_by_name(0, 'dsoobjecttype', '') # plugins_8000_dom_10 object_plugin_type = dataset.value_by_name(0, 'dsodatatype', '') # 数据类型:dir-目录;file-文件 class_classified_obj_real = None # 构建数据对象object对应的识别插件 plugins_root_package_name = '{0}.{1}'.format(CSys.get_plugins_package_root_name(), object_plugin_type) # 判断插件是否存在 plugins_root_dir = CSys.get_plugins_root_dir() plugins_type_root_dir = CFile.join_file(plugins_root_dir, object_plugin_type) plugins_file = CFile.join_file(plugins_type_root_dir, '{0}.py'.format(object_plugin_file_main_name)) if CFile.file_or_path_exist(plugins_file): class_classified_obj = cls.create_plugins_instance( plugins_root_package_name, object_plugin_file_main_name, None ) class_classified_obj_real = class_classified_obj return class_classified_obj_real
def metadata_bus_dict_process_custom(self, metadata_bus_dict): """ 对部分需要进行运算的数据进行处理 """ super().metadata_bus_dict_process_custom(metadata_bus_dict) centerlatitude = CUtils.dict_value_by_name(metadata_bus_dict, 'centerlatitude', None) centerlongitude = CUtils.dict_value_by_name(metadata_bus_dict, 'centerlongitude', None) if (not CUtils.equal_ignore_case(centerlatitude, '')) and (not CUtils.equal_ignore_case(centerlongitude, '')): try: try: db_id = self.file_info.db_server_id except: db_id = self.DB_Server_ID_Distribution if CUtils.equal_ignore_case(db_id, ''): db_id = self.DB_Server_ID_Distribution db = CFactory().give_me_db(db_id) wkt = db.one_row( ''' select st_astext(st_envelope(st_geomfromewkt(st_astext(st_buffer(st_geographyfromtext( 'POINT({0} {1})'), 5000))))) as wkt '''.format(centerlatitude, centerlongitude)).value_by_name(0, 'wkt', None) wkt = wkt.replace('POLYGON((', '').replace('))', '').strip() coordinates_list = re.split(r'[,]|\s+', wkt) metadata_bus_dict['bottomleftlatitude'] = coordinates_list[0] metadata_bus_dict['bottomleftlongitude'] = coordinates_list[1] metadata_bus_dict['topleftlatitude'] = coordinates_list[2] metadata_bus_dict['topleftlongitude'] = coordinates_list[3] metadata_bus_dict['toprightlatitude'] = coordinates_list[4] metadata_bus_dict['toprightlongitude'] = coordinates_list[5] metadata_bus_dict['bottomrightlatitude'] = coordinates_list[6] metadata_bus_dict['bottomrightlongitude'] = coordinates_list[7] except Exception as error: raise Exception('通过中心点坐标计算四至坐标时出现异常,详细原因为:{0}'.format(error.__str__()))
def register_dm_modules(self): sql_register_dm_metadata_modules_clear = ''' truncate table dm2_modules cascade ''' sql_register_dm_metadata_modules = ''' insert into dm2_modules(dmid, dmtitle) values (:dmid, :dmtitle) ''' CFactory().give_me_db().execute(sql_register_dm_metadata_modules_clear) modules_root_dir = CSys.get_metadata_data_access_modules_root_dir() module_file_name_list = CFile.file_or_subpath_of_path( modules_root_dir, '*.{0}'.format(self.FileExt_Py)) for module_file_name in module_file_name_list: if CFile.is_file( CFile.join_file(modules_root_dir, module_file_name)) and ( not (str(module_file_name)).startswith('_')): module_name = CFile.file_main_name(module_file_name) module_obj = CObject.create_module_instance( CSys.get_metadata_data_access_modules_root_name(), module_name, CResource.DB_Server_ID_Default) module_info = module_obj.information() CFactory().give_me_db().execute( sql_register_dm_metadata_modules, { 'dmid': module_name, 'dmtitle': CUtils.dict_value_by_name( module_info, CResource.Name_Title, module_name) })
def db_update_object_status(self, dso_id, process_result): CLogger().debug(CResult.result_message(process_result)) if CResult.result_success(process_result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsotagsparsestatus = {0} , dsolastmodifytime = now() , dsotagsparsememo = :dsotagsparsememo where dsoid = :dsoid '''.format(self.ProcStatus_Finished), { 'dsoid': dso_id, 'dsotagsparsememo': CResult.result_message(process_result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsotagsparsestatus = {0} , dsolastmodifytime = now() , dsotagsparsememo = :dsotagsparsememo where dsoid = :dsoid '''.format(self.ProcStatus_Error), { 'dsoid': dso_id, 'dsotagsparsememo': CResult.result_message(process_result) })
def add_file_to_detail_list(self, match_name): """ 设定国土行业数据三调的附属文件的验证规则(镶嵌影像) 完成 负责人 王学谦 在这里检验三调的附属文件 :return: """ file_main_name = self._object_name file_path = self.file_info.file_path # 模糊匹配附属文件 if not CUtils.equal_ignore_case(file_path, ''): match_str = '{0}*xq.*'.format(match_name) match_file_list = CFile.file_or_dir_fullname_of_path( file_path, False, match_str, CFile.MatchType_Common) for file_with_path in match_file_list: if not CUtils.equal_ignore_case( CFile.file_main_name(file_with_path), file_main_name): # 去除自身与同名文件 self.add_file_to_details(file_with_path) # 将文件加入到附属文件列表中 try: db = CFactory().give_me_db(self.file_info.db_server_id) metadata_name_before_six_name = db.one_row("select gdstitle from ro_global_dim_space " "where gdsid = '{0}'".format(match_name)) \ .value_by_name(0, 'gdstitle', None) metadata_file_name = '{0}{1}.mdb'.format( match_name, metadata_name_before_six_name) metadata_file_name_with_path = CFile.join_file( file_path, metadata_file_name) if CFile.file_or_path_exist(metadata_file_name_with_path): self.add_file_to_details(metadata_file_name_with_path) except: pass
def get_storage_size(self, ds_ib_id: str, storage_id: str, relation_dir: str, ib_option) -> int: """ 获取指定存储指定路径下的数据所需要的存储量 . file表中未识别为数据的文件, dsf_object_confirm=2 . object表中的存储量 :param ds_ib_id: 入库批次编号: inbound.dsiid :param ib_option: 入库的个性化要求, 其中包含是全部入库, 还是质检通过的入库, 或者人工选择后入库等配置选项, 会影响到存储大小的计算 :param storage_id: :param relation_dir: :return: """ # 文件中不认识的部分, 加上已经识别的对象的大小 inbound_file_size = CFactory().give_me_db( self.get_mission_db_id()).one_value( ''' select sum(dsffilesize) from dm2_storage_file where dsf_ib_id = :ib_id and dsf_object_confirm = 0 ''', {'ib_id': ds_ib_id}, 0) + CFactory().give_me_db( self.get_mission_db_id()).one_value( ''' select sum(dso_volumn_now) from dm2_storage_object where dso_ib_id = :ib_id and dso_bus_status = '{0}' '''.format(self.IB_Bus_Status_InBound), {'ib_id': ds_ib_id}, 0) return inbound_file_size
def custom_init(self): """ 自定义初始化方法 :return: """ super().custom_init() engine = CFactory().give_me_db(self.db_server_id) if self.my_id is None: self._ds_file_or_path = engine.one_row(''' select dsfid, dsfstorageid, dsfdirectoryid, dsffilerelationname, dsffilename, dsffilemainname, dsfext, dsffilecreatetime, dsffilemodifytime, dsffilevalid, dsf_object_type, dsf_object_confirm, dsf_object_id, dsffilesize, dsfparentobjid, dsf_ib_id from dm2_storage_file where dsfstorageid = :dsfStorageID and dsfdirectoryid = :dsfDirectoryId and dsffilerelationname = :dsfFileRelationName ''', {'dsfStorageID': self.storage_id, 'dsfDirectoryId': self.parent_id, 'dsfFileRelationName': CFile.unify(self.file_name_with_rel_path)}) if not self.ds_file_or_path.is_empty(): self.my_id = self.ds_file_or_path.value_by_name(0, 'dsfid', None) if self.my_id is None: self.my_id = CUtils.one_id() else: self._ds_file_or_path = engine.one_row(''' select dsfid, dsfstorageid, dsfdirectoryid, dsffilerelationname, dsffilename, dsffilemainname, dsfext, dsffilecreatetime, dsffilemodifytime, dsffilevalid, dsf_object_type, dsf_object_confirm, dsf_object_id, dsffilesize, dsfparentobjid, dsf_ib_id from dm2_storage_file where dsfid = :dsfID ''', {'dsfid': self.my_id})
def db_update_object_status(self, dso_id, process_result, process_status=None): if process_status is not None: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = {0} , dsolastmodifytime = now() where dsoid = :dsoid '''.format(process_status), {'dsoid': dso_id}) elif CResult.result_success(process_result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = {0} , dsolastmodifytime = now() where dsoid = :dsoid '''.format(self.ProcStatus_Finished), {'dsoid': dso_id}) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = (dsometadataparsestatus / 10 + 1) * 10 + {0} , dsolastmodifytime = now() where dsoid = :dsoid '''.format(self.ProcStatus_InQueue), {'dsoid': dso_id})
def db_update_object_exception(self, dso_id, process_result, process_status=None): if process_status is not None: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = :status , dsometadataparsememo = :dsometadataparsememo , dsolastmodifytime = now() where dsoid = :dsoid '''.format(self.ProcStatus_InQueue), { 'dsoid': dso_id, 'status': process_status, 'dsometadataparsememo': CResult.result_message(process_result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_object set dsometadataparsestatus = (dsometadataparsestatus / 10 + 1) * 10 + {0} , dsometadataparsememo = :dsometadataparsememo , dsolastmodifytime = now() where dsoid = :dsoid '''.format(self.ProcStatus_InQueue), { 'dsoid': dso_id, 'dsometadataparsememo': CResult.result_message(process_result) })
def update_notify_result(self, notify_id, result): if CResult.result_success(result): CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsi_na_status = {0} , dsi_na_proc_id = null , dsi_na_proc_memo = :notify_message , dsiproctime = now() where dsiid = :notify_id '''.format(self.ProcStatus_WaitConfirm), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsi_na_status = {0} , dsi_na_proc_id = null , dsi_na_proc_memo = :notify_message , dsiproctime = now() where dsiid = :notify_id '''.format(self.ProcStatus_Error), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) })
def __a_check_value_in_sql__(cls, result_template: dict, value, title_prefix, db_server_id: str, sql: str): """ 根据规则, 验证值的合法性 注意: 值有可能为None! 完成 负责人 赵宇飞 这里判断值在指定的sql查询必须有结果 :param result_template: 检查结果的模板 :param value: 待检验的值, 可能为None :param title_prefix: 提示文本的前缀 :param db_server_id: 检查value的必须存在于指定的数据库中 :param sql: 检查value的必须存在于指定的sql中, sql中只有一个参数, 注意 :return: """ result_dict = copy.deepcopy(result_template) is_exist_in_sql = False ds = CFactory().give_me_db(db_server_id).one_row(sql) if not ds.is_empty(): # field_count = ds.field_count() for row_index in range(ds.size()): row_value = ds.value_by_index(row_index, 0, "") # 取出第1列的行值 if CUtils.equal_ignore_case(row_value, value): is_exist_in_sql = True break if is_exist_in_sql: result_dict[cls.Name_Message] = '{0}的值在指定的sql查询结果中, 符合要求!'.format( title_prefix) result_dict[cls.Name_Result] = cls.QA_Result_Pass else: result_dict[ cls. Name_Message] = '{0}的值[{1}], 不在指定的sql查询结果中, 请检查修正,sql为【{2}】!'.format( title_prefix, value, sql) return result_dict
def process_mission(self, dataset) -> str: ds_id = dataset.value_by_name(0, 'query_dir_id', '') ds_storage_id = dataset.value_by_name(0, 'query_storage_id', '') inbound_id = dataset.value_by_name(0, 'query_dir_ib_id', None) ds_subpath = dataset.value_by_name(0, 'query_subpath', '') ds_root_path = dataset.value_by_name(0, 'query_rootpath', '') ds_retry_times = dataset.value_by_name(0, 'retry_times', 0) if ds_retry_times >= self.abnormal_job_retry_times(): ds_last_process_memo = CUtils.any_2_str( dataset.value_by_name(0, 'last_process_memo', None)) process_result = CResult.merge_result( self.Failure, '{0}, \n系统已经重试{1}次, 仍然未能解决, 请人工检查修正后重试!'.format( ds_last_process_memo, ds_retry_times)) self.update_dir_status(ds_id, process_result, self.ProcStatus_Error) return process_result if ds_subpath == '': ds_subpath = ds_root_path else: ds_subpath = CFile.join_file(ds_root_path, ds_subpath) # 将所有子目录, 文件的可用性, 都改为未知!!!! self.init_file_or_subpath_valid_unknown(ds_id) try: sql_get_rule = ''' select dsdScanRule from dm2_storage_directory where dsdStorageid = :dsdStorageID and position((dsddirectory || '{0}') in :dsdDirectory) = 1 and dsdScanRule is not null order by dsddirectory desc limit 1 '''.format(CFile.sep()) rule_ds = CFactory().give_me_db(self.get_mission_db_id()).one_row( sql_get_rule, { 'dsdStorageID': ds_storage_id, 'dsdDirectory': ds_subpath }) ds_rule_content = rule_ds.value_by_name(0, 'dsScanRule', '') CLogger().debug('处理的目录为: {0}'.format(ds_subpath)) self.parser_file_or_subpath_of_path(dataset, ds_id, ds_subpath, ds_rule_content, inbound_id) result = CResult.merge_result( self.Success, '目录为[{0}]下的文件和子目录扫描处理成功!'.format(ds_subpath)) self.update_dir_status(ds_id, result) return result except Exception as err: result = CResult.merge_result( self.Failure, '目录为[{0}]下的文件和子目录扫描处理出现错误!错误原因为: {1}'.format( ds_subpath, err.__str__())) self.update_dir_status(ds_id, result) return result finally: self.exchange_file_or_subpath_valid_unknown2invalid(ds_id)
def test_if_exist(self): try: factory = CFactory() db = factory.give_me_db('0') assert not db.if_exists( "select * from dm2_storage where dstid = '0'") except DBException as err: assert False
def abnormal_mission_restart(self): sql = self.get_abnormal_mission_restart_sql() if sql is not None: try: factory = CFactory() db = factory.give_me_db(self.get_mission_db_id()) db.execute(sql) except: pass
def start_process(): """ 新增入库跟踪情况的表 dm2_import_step,仅用于测试监控,实际发布时,这个表就不用了: 内容:单独的调度,每隔30秒,统计各种个数 个数:目录个数,文件个数,数据个数,附属文件个数,已挂接标签个数,未挂接标签个数 """ while True: try: db = CFactory().give_me_db() sql_result_count = ''' SELECT now() as query_time, ( SELECT COUNT ( * ) FROM dm2_storage_directory ) AS count_dir, ( SELECT COUNT ( * ) FROM dm2_storage_file ) AS count_file, ( SELECT COUNT ( * ) FROM dm2_storage_object ) AS count_object, ( SELECT COUNT ( * ) FROM dm2_storage_obj_detail ) AS count_obj_detail, ( SELECT COUNT ( * ) FROM dm2_storage_object WHERE dsotags IS NULL ) AS count_object_tag, ( SELECT COUNT ( * ) FROM dm2_storage_object WHERE dsotags IS NOT NULL ) AS count_object_notag ''' count_dataset = db.one_row(sql_result_count) dis_query_time = count_dataset.value_by_name(0, 'query_time', '') dis_directory_count = count_dataset.value_by_name( 0, 'count_dir', '') dis_file_count = count_dataset.value_by_name(0, 'count_file', '') dis_object_count = count_dataset.value_by_name( 0, 'count_object', '') dis_detail_count = count_dataset.value_by_name( 0, 'count_obj_detail', '') dis_object_tag_count = count_dataset.value_by_name( 0, 'count_object_tag', '') dis_object_notag_count = count_dataset.value_by_name( 0, 'count_object_notag', '') sql_insert = ''' insert into dm2_import_step ("dis_query_time","dis_id","dis_directory_count","dis_file_count","dis_object_count","dis_detail_count", "dis_object_tag_count","dis_object_notag_count","dis_addtime") values (:query_time,:disid,:directory_count,:file_count,:object_count,:detail_count, :object_tag_count,:object_notag_count,now()) ''' db.execute( sql_insert, { 'query_time': dis_query_time, 'disid': CUtils.one_id(), 'directory_count': dis_directory_count, 'file_count': dis_file_count, 'object_count': dis_object_count, 'detail_count': dis_detail_count, 'object_tag_count': dis_object_tag_count, 'object_notag_count': dis_object_notag_count }) time.sleep(30) except Exception as error: raise Exception(error.__str__())
def register_dm_metadata_plugins(self): sql_register_dm_metadata_plugins_clear = ''' truncate table dm2_storage_object_def cascade ''' sql_unregister_dm_metadata_plugins = ''' delete from dm2_storage_object_def where dsodid = :dsodid ''' sql_register_dm_metadata_plugins = ''' insert into dm2_storage_object_def( dsodid, dsodtitle, dsodtype, dsodtypetitle, dsodtypecode, dsodgroup, dsodgrouptitle, dsodcatalog, dsodcatalogtitle, dsod_otheroption) values (:dsodid, :dsodtitle, :dsodtype, :dsodtypetitle, :dsodtypecode, :dsodgroup, :dsodgrouptitle, :dsodcatalog, :dsodcatalogtitle, :dsod_otheroption) ''' CFactory().give_me_db().execute(sql_register_dm_metadata_plugins_clear) plugins_root_dir = CSys.get_plugins_root_dir() plugins_type_list = CFile.file_or_subpath_of_path(plugins_root_dir) for plugins_type in plugins_type_list: if CFile.is_dir(CFile.join_file( plugins_root_dir, plugins_type)) and ( not (str(plugins_type)).startswith('_')): plugins_root_package_name = '{0}.{1}'.format( CSys.get_plugins_package_root_name(), plugins_type) path = CFile.join_file(CSys.get_plugins_root_dir(), plugins_type) plugins_file_list = CFile.file_or_subpath_of_path( path, '{0}_*.{1}'.format(self.Name_Plugins, self.FileExt_Py)) for file_name_without_path in plugins_file_list: file_main_name = CFile.file_main_name( file_name_without_path) class_classified_obj = CObject.create_plugins_instance( plugins_root_package_name, file_main_name, None) plugins_info = class_classified_obj.get_information() json_obj = CJson() json_obj.set_value_of_name( self.Name_Is_Spatial, CUtils.dict_value_by_name( plugins_info, CPlugins.Plugins_Info_Is_Spatial, False)) json_obj.set_value_of_name( self.Name_Is_DataSet, CUtils.dict_value_by_name( plugins_info, CPlugins.Plugins_Info_Is_Dataset, False)) plugins_info['dsod_otheroption'] = json_obj.to_json() print('{0}/{1}:{2}'.format(plugins_type, file_main_name, plugins_info)) CFactory().give_me_db().execute( sql_unregister_dm_metadata_plugins, plugins_info) CFactory().give_me_db().execute( sql_register_dm_metadata_plugins, plugins_info)
def test_fatch_multi_row(self): try: factory = CFactory() db = factory.give_me_db('0') dataset = db.all_row( "select * from dm2_storage where dstid = '01'") assert dataset.size() == 1 except DBException as err: assert False
def test_fatch_one_row(self): try: factory = CFactory() db = factory.give_me_db('0') dataset = db.all_row( "select * from dm2_storage where dstid in :id", {'id': "'01', '02'"}) assert dataset.size() == 2 except DBException as err: assert False
def db_check_and_update(self, ib_id): """ 检查并更新dm2_storage_directory表中记录 :return: """ if not self.ds_file_or_path.is_empty(): # 如果记录已经存在 db_path_modify_time = self.ds_file_or_path.value_by_name( 0, 'dsddirlastmodifytime', '') if CUtils.equal_ignore_case( CUtils.any_2_str(db_path_modify_time), CUtils.any_2_str(self.file_modify_time)): CLogger().info( '目录[{0}]的最后修改时间, 和库中登记的没有变化, 子目录将被设置为忽略刷新! '.format( self.file_name_with_full_path)) CFactory().give_me_db(self.db_server_id).execute( ''' update dm2_storage_directory set dsdScanStatus = 0, dsdScanFileStatus = 0, dsd_directory_valid = -1, dsd_ib_id = :ib_id, dsdscanmemo = :message where dsdid = :dsdid ''', { 'dsdid': self.my_id, 'ib_id': ib_id, 'message': '目录[{0}]的最后修改时间, 和库中登记的没有变化, 子目录将被设置为忽略刷新! '.format( self.file_name_with_full_path) }) else: CLogger().info( '目录[{0}]的最后修改时间, 和库中登记的有变化, 子目录将被设置为重新刷新! '.format( self.file_name_with_full_path)) CFactory().give_me_db(self.db_server_id).execute( ''' update dm2_storage_directory set dsdScanStatus = 1, dsdScanFileStatus = 1, dsd_directory_valid = -1, dsd_ib_id = :ib_id, dsdscanmemo = :message where dsdid = :dsdid ''', { 'dsdid': self.my_id, 'ib_id': ib_id, 'message': '目录[{0}]的最后修改时间, 和库中登记的有变化, 子目录将被设置为重新刷新! '.format( self.file_name_with_full_path) }) else: CLogger().info('目录[{0}]未在库中登记, 系统将登记该记录! '.format( self.file_name_with_full_path)) self.__db_insert(ib_id)
def execute(self) -> str: try: db = CFactory().give_me_db() sql_result_count = ''' SELECT now() as query_time, ( SELECT COUNT ( * ) FROM dm2_storage_directory ) AS count_dir, ( SELECT COUNT ( * ) FROM dm2_storage_file ) AS count_file, ( SELECT COUNT ( * ) FROM dm2_storage_object ) AS count_object, ( SELECT COUNT ( * ) FROM dm2_storage_obj_detail ) AS count_obj_detail, ( SELECT COUNT ( * ) FROM dm2_storage_object WHERE dsotags IS NULL ) AS count_object_tag, ( SELECT COUNT ( * ) FROM dm2_storage_object WHERE dsotags IS NOT NULL ) AS count_object_notag ''' count_dataset = db.one_row(sql_result_count) dis_query_time = count_dataset.value_by_name(0, 'query_time', '') dis_directory_count = count_dataset.value_by_name( 0, 'count_dir', '') dis_file_count = count_dataset.value_by_name(0, 'count_file', '') dis_object_count = count_dataset.value_by_name( 0, 'count_object', '') dis_detail_count = count_dataset.value_by_name( 0, 'count_obj_detail', '') dis_object_tag_count = count_dataset.value_by_name( 0, 'count_object_tag', '') dis_object_notag_count = count_dataset.value_by_name( 0, 'count_object_notag', '') sql_insert = ''' insert into dm2_import_step ("dis_query_time","dis_id","dis_directory_count","dis_file_count","dis_object_count","dis_detail_count", "dis_object_tag_count","dis_object_notag_count","dis_addtime") values (:query_time,:disid,:directory_count,:file_count,:object_count,:detail_count, :object_tag_count,:object_notag_count,now()) ''' db.execute( sql_insert, { 'query_time': dis_query_time, 'disid': CUtils.one_id(), 'directory_count': dis_directory_count, 'file_count': dis_file_count, 'object_count': dis_object_count, 'detail_count': dis_detail_count, 'object_tag_count': dis_object_tag_count, 'object_notag_count': dis_object_notag_count }) return CResult.merge_result(self.Success, '本次分析定时扫描任务成功结束!') except Exception as error: raise Exception(error.__str__())
def create_inbound_mission(self, storage_id): database = CFactory().give_me_db(self.get_mission_db_id()) new_batch_no = database.seq_next_value(self.Seq_Type_Date_AutoInc) database.execute( ''' insert into dm2_storage_inbound(dsiid, dsistorageid, dsidirectory, dsibatchno, dsidirectoryid, dsistatus) VALUES(:dsiid, :storageid, :directory, :batch_no, :directory_id, :status) ''', { 'dsiid': CUtils.one_id(), 'storageid': storage_id, 'directory': '', 'batch_no': new_batch_no, 'directory_id': CUtils.one_id(), 'status': self.IB_Status_QI_InQueue })
def update_inbound_qi_result(self, notify_id, result, storage_type='mix', storage_option=None, ib_option=None): CLogger().debug(CResult.result_message(result)) if CResult.result_success(result): if CUtils.equal_ignore_case(storage_type, self.Storage_Type_InBound): switch_inbound_after_qi_immediately_status = CUtils.equal_ignore_case( settings.application.xpath_one( self.path_switch( self.Path_Setting_MetaData_QI_Switch, self. Switch_Inbound_After_QI_Immediately_Of_IB_Storage), self.Name_ON), self.Name_ON) else: switch_inbound_after_qi_immediately_status = CUtils.equal_ignore_case( settings.application.xpath_one( self.path_switch( self.Path_Setting_MetaData_QI_Switch, self. Switch_Inbound_After_QI_Immediately_Of_MIX_Storage ), self.Name_OFF), self.Name_ON) if switch_inbound_after_qi_immediately_status: next_status = self.IB_Status_IB_InQueue else: next_status = self.IB_Status_QI_Finished CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsiStatus = {0}, dsiprocmemo = :notify_message, dsiproctime = now() where dsiid = :notify_id '''.format(next_status), { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) }) else: CFactory().give_me_db(self.get_mission_db_id()).execute( ''' update dm2_storage_inbound set dsiprocmemo = :notify_message, dsiproctime = now() where dsiid = :notify_id ''', { 'notify_id': notify_id, 'notify_message': CResult.result_message(result) })
def update_dir_status(self, dir_id, result, status=None): if status is not None: sql_update_directory_status = ''' update dm2_storage_directory set dsdscanstatus = :status, dsdscanmemo = :memo, dsdlastmodifytime = now() where dsdid = :dsdid ''' elif CResult.result_success(result): sql_update_directory_status = ''' update dm2_storage_directory set dsdscanstatus = {0}, dsdscanmemo = :memo, dsdlastmodifytime = now() where dsdid = :dsdid '''.format(self.ProcStatus_Finished) else: sql_update_directory_status = ''' update dm2_storage_directory set dsdscanstatus = (dsdscanstatus / 10 + 1) * 10 + {0}, dsdscanmemo = :memo, dsdlastmodifytime = now() where dsdid = :dsdid '''.format(self.ProcStatus_InQueue) params = dict() params['dsdid'] = dir_id params['memo'] = CResult.result_message(result) params['status'] = status CFactory().give_me_db(self.get_mission_db_id()).execute(sql_update_directory_status, params)
def update_ib_data_status_in_core_or_mix_storage(self, ib_id, storage_id, ib_directory_name, ib_dir_id): """ 如果是在线存储或混合存储, 直接将业务状态修改即可 :param ib_id: :param ib_dir_id: :param storage_id: :param ib_directory_name: :return: """ sql_update_file = ''' update dm2_storage_file set dsf_bus_status = '{0}' where dsf_ib_id = :ib_id '''.format(self.IB_Bus_Status_Online) params_update_file = {'ib_id': ib_id} # 更新子目录状态 sql_update_directory = ''' update dm2_storage_directory set dsd_bus_status = '{0}' where dsd_ib_id = :ib_id '''.format(self.IB_Bus_Status_Online) params_update_directory = {'ib_id': ib_id} # 更新对象状态 sql_update_object = ''' update dm2_storage_object set dso_bus_status = '{0}' where dso_ib_id = :ib_id '''.format(self.IB_Bus_Status_Online) params_update_object = {'ib_id': ib_id} # 将入库记录中的目标存储标识进行更新 sql_update_ib_target_storage = ''' update dm2_storage_inbound set dsitargetstorageid = :target_storage_id where dsiid = :ib_id ''' params_update_ib_target_storage = { 'target_storage_id': storage_id, 'ib_id': ib_id } commands = [(sql_update_file, params_update_file), (sql_update_directory, params_update_directory), (sql_update_object, params_update_object), (sql_update_ib_target_storage, params_update_ib_target_storage)] try: CFactory().give_me_db( self.get_mission_db_id()).execute_batch(commands) return CResult.merge_result( self.Success, '存储[{0}]下的数据[{1}]入库成功!'.format(storage_id, ib_directory_name)) except Exception as error: return CResult.merge_result( self.Failure, '存储[{0}]下的数据[{1}]入库成功失败, 错误原因为: [{2}]!'.format( storage_id, ib_directory_name, error.__str__()))
def save_metadata_view(self) -> str: """ 完成可视元数据的入库更新操作 :return: """ mdt_view_result, mdt_view_memo, mdt_view_thumb_file, mdt_view_browse_file = self.metadata.metadata_view() if mdt_view_result == self.DB_False: mdt_view_thumb_file = None mdt_view_browse_file = None # 所有元数据入库 CFactory().give_me_db(self.file_info.db_server_id).execute( ''' update dm2_storage_object set dso_view_result = :dso_view_result , dso_view_parsermemo = :dso_view_parsermemo , dso_browser = :dso_browser , dso_thumb = :dso_thumb where dsoid = :dsoid ''', { 'dsoid': self.object_id, 'dso_view_result': mdt_view_result, 'dso_view_parsermemo': mdt_view_memo, 'dso_browser': mdt_view_browse_file, 'dso_thumb': mdt_view_thumb_file } ) return CResult.merge_result(self.Success, '可视化元数据处理完毕!')
def save_metadata_time(self) -> str: """ 完成时间元数据的入库更新操作 :return: """ mdt_ext_result, mdt_ext_memo, mdt_ext_content = self.metadata.metadata_time() if mdt_ext_result == self.DB_False: mdt_ext_content = None if CUtils.equal_ignore_case(mdt_ext_result, ''): mdt_ext_content = None # None相当于sql中的null,可以插入数据库中,而''不能插入jsonb字段中 # 所有元数据入库 CFactory().give_me_db(self.file_info.db_server_id).execute( ''' update dm2_storage_object set dso_time_result = :dso_time_result , dso_time_parsermemo = :dso_time_parsermemo , dso_time = :dso_time where dsoid = :dsoid ''', { 'dsoid': self.object_id, 'dso_time_result': mdt_ext_result, 'dso_time_parsermemo': mdt_ext_memo, 'dso_time': mdt_ext_content } ) return CResult.merge_result(self.Success, '时间元数据处理完毕!')
def __db_insert(self, ib_id): """ 将当前目录, 创建一条新记录到dm2_storage_directory表中 :return: """ sql_insert = ''' insert into dm2_storage_file( dsfid, dsfstorageid, dsfdirectoryid, dsffilerelationname, dsffilename, dsffilemainname, dsfext , dsffilecreatetime, dsffilemodifytime, dsfaddtime, dsflastmodifytime, dsffilevalid , dsfscanstatus, dsfprocessid, dsf_object_type, dsf_object_confirm, dsf_object_id , dsffilesize, dsfparentobjid, dsf_ib_id) values( :dsfid, :dsfstorageid, :dsfdirectoryid, :dsffilerelationname, :dsffilename, :dsffilemainname, :dsfext , :dsffilecreatetime, :dsffilemodifytime, now(), now(), -1 , 1, null, null, 0, null , :dsffilesize, :dsfparentobjid, :dsf_ib_id) ''' params = dict() params['dsfid'] = self.my_id params['dsfdirectoryid'] = self.parent_id params['dsfstorageid'] = self.storage_id params['dsffilerelationname'] = CFile.unify(self.file_name_with_rel_path) params['dsffilename'] = self.file_name_without_path params['dsffilemainname'] = self.file_main_name params['dsfext'] = self.file_ext params['dsffilecreatetime'] = self.file_create_time params['dsffilemodifytime'] = self.file_modify_time params['dsffilesize'] = self.file_size params['dsfparentobjid'] = self.owner_obj_id params['dsf_ib_id'] = ib_id CFactory().give_me_db(self.db_server_id).execute(sql_insert, params)