Exemplo n.º 1
0
 def get_sync_dict_list(self, insert_or_updata) -> list:
     """
     insert_or_updata 指明配置的是更新还是插入,-1时为插入,0为更新
     本方法的写法为强规则,调用add_value_to_sync_dict_list配置
     第一个参数为list,第二个参数为字段名,第三个参数为字段值,第四个参数为特殊配置
     """
     sync_dict_list = self.get_sync_predefined_dict_list(insert_or_updata)
     object_table_data = self._dataset
     # 时间信息
     dso_time = object_table_data.value_by_name(0, 'dso_time', '')
     dso_time_json = CJson()
     dso_time_json.load_obj(dso_time)
     self.add_value_to_sync_dict_list(
         sync_dict_list, 'begdate',
         dso_time_json.xpath_one('start_time', ''))
     self.add_value_to_sync_dict_list(
         sync_dict_list, 'enddate', dso_time_json.xpath_one('end_time', ''))
     self.add_value_to_sync_dict_list(
         sync_dict_list, 'imagedate',
         CUtils.to_day_format(dso_time_json.xpath_one('time', ''),
                              dso_time_json.xpath_one('time', '')))
     self.add_value_to_sync_dict_list(
         sync_dict_list, 'producetime',
         CUtils.to_day_format(dso_time_json.xpath_one('time', ''),
                              dso_time_json.xpath_one('time', '')))
     self.add_value_to_sync_dict_list(
         sync_dict_list, 'imagedatetag',
         self.transform_time_to_imagedatetag(
             CUtils.to_day_format(dso_time_json.xpath_one('time', ''),
                                  dso_time_json.xpath_one('time', ''))))
     if insert_or_updata:
         self.add_value_to_sync_dict_list(sync_dict_list, 'isdel', '1')
     return sync_dict_list
Exemplo n.º 2
0
    def get_sync_predefined_dict_list(self, insert_or_updata) -> list:
        """
        insert_or_updata 指明配置的是更新还是插入,-1时为插入,0为更新
        本方法的写法为强规则,调用add_value_to_sync_dict_list配置
        第一个参数为list,第二个参数为字段名,第三个参数为字段值,第四个参数为特殊配置
        本方法处理公共部分
        datacount:数据量 secrecylevel:密级 regioncode:行政区码 regionname:行政区 resolution:分辨率
        colormodel:色彩模式 piexldepth:像素位数 scale:比例尺分母 mainrssource:主要星源  交插件去处理
        """
        sync_dict_list = list()
        object_table_id = self._obj_id  # 获取oid
        object_table_data = self._dataset
        self.add_value_to_sync_dict_list(sync_dict_list, 'aprid',
                                         object_table_id)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'productname',
            object_table_data.value_by_name(0, 'dsoobjectname', ''))
        self.add_value_to_sync_dict_list(sync_dict_list, 'producttype',
                                         self._obj_type_code)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'dsodatatype',
            object_table_data.value_by_name(0, 'dsodatatype', ''))
        dso_time = object_table_data.value_by_name(0, 'dso_time', '')
        dso_time_json = CJson()
        dso_time_json.load_obj(dso_time)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'begdate',
            dso_time_json.xpath_one('start_time', ''))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'enddate', dso_time_json.xpath_one('end_time', ''))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'imagedate',
            CUtils.to_day_format(dso_time_json.xpath_one('time', ''),
                                 dso_time_json.xpath_one('time', '')))
        # datacount:数据数量
        # secrecylevel:密级
        # regioncode:行政区码
        # regionname:行政区  上面四个字段交插件处理
        self.add_value_to_sync_dict_list(  # 配置子查询,调用函数
            sync_dict_list, 'centerx', '''
            (select 
            st_x(st_centroid(
            (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}')
            ))::decimal(8, 2))
            '''.format(object_table_id), self.DataValueType_SQL)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'centery', '''
            (select 
            st_y(st_centroid(
            (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}')
            ))::decimal(8, 2))
            '''.format(object_table_id), self.DataValueType_SQL)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'geomwkt', '''
            st_astext(
            (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}')
            )
            '''.format(object_table_id), self.DataValueType_SQL)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'geomobj', '''
            (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}')
            '''.format(object_table_id), self.DataValueType_SQL)

        self.add_value_to_sync_dict_list(
            sync_dict_list, 'browserimg',
            object_table_data.value_by_name(0, 'dso_browser', ''))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'thumbimg',
            object_table_data.value_by_name(0, 'dso_thumb', ''))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'producetime',
            CUtils.to_day_format(dso_time_json.xpath_one('time', ''),
                                 dso_time_json.xpath_one('time', '')))
        # resolution:分辨率,交插件处理
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'imgsize', '''
            (select round((sum(dodfilesize)/1048576),2) from dm2_storage_obj_detail where dodobjectid='{0}')
            '''.format(object_table_id), self.DataValueType_SQL)
        # colormodel:交插件处理
        # piexldepth:交插件处理
        if insert_or_updata:
            self.add_value_to_sync_dict_list(sync_dict_list, 'isdel', '0')
            now_time = CUtils.any_2_str(
                datetime.datetime.now().strftime('%F %T'))
            self.add_value_to_sync_dict_list(sync_dict_list, 'addtime',
                                             now_time)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'extent',
            "(select dso_geo_bb_wgs84 from dm2_storage_object where dsoid='{0}')"
            .format(object_table_id), self.DataValueType_SQL)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'proj',
            object_table_data.value_by_name(0, 'dso_prj_wkt', ''))
        # remark:暂时为空
        # ispublishservice:暂时为空
        self.add_value_to_sync_dict_list(sync_dict_list, 'queryable', '0')
        # scale:交插件处理
        # mainrssource:交插件处理
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'dsdid',
            object_table_data.value_by_name(0, 'query_directory_id', ''))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'dsfid',
            object_table_data.value_by_name(0, 'query_file_id', ''))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'imagedatetag',
            self.transform_time_to_imagedatetag(
                CUtils.to_day_format(dso_time_json.xpath_one('time', ''),
                                     dso_time_json.xpath_one('time', ''))))

        return sync_dict_list
Exemplo n.º 3
0
    def _do_access(self) -> str:
        try:
            quality_info_xml = self._quality_info  # 获取质检xml
            quality_summary = self._dataset.value_by_name(0, 'dso_quality_summary', '')
            quality_summary_json = CJson()
            quality_summary_json.load_obj(quality_summary)
            access_wait_flag = self.DB_False  # 定义等待标志,为True则存在检查项目为等待
            access_forbid_flag = self.DB_False  # 定义禁止标志,为True则存在检查项目为禁止
            message = ''

            # 文件与影像质检部分
            file_qa = quality_summary_json.xpath_one('total', '')
            image_qa = quality_summary_json.xpath_one('metadata.data', '')
            if CUtils.equal_ignore_case(file_qa, self.QA_Result_Error) \
                    or CUtils.equal_ignore_case(image_qa, self.QA_Result_Error):
                message = message + '[数据与其相关文件的质检存在error!请进行修正!]'
                access_forbid_flag = self.DB_True
            elif CUtils.equal_ignore_case(file_qa, self.QA_Result_Warn) \
                    or CUtils.equal_ignore_case(image_qa, self.QA_Result_Warn):
                message = message + '[数据与其相关文件的质检存在warn!请进行检查!]'
                access_wait_flag = self.DB_True
            elif CUtils.equal_ignore_case(quality_summary, ''):
                message = message + '[数据质检未进行,可能数据存在问题!请进行检查!]'
                access_forbid_flag = self.DB_True
            else:
                pass

            for qa_name, qa_id in self.access_check_dict().items():  # 循环写好的检查列表
                # qa_id = CUtils.dict_value_by_name(access_check_dict, 'qa_id', '')  # 获取id
                qa_node = quality_info_xml.xpath_one("//item[@id='{0}']".format(qa_id))  # 查询xml中的节点
                if qa_node is not None:
                    node_result = CXml.get_attr(qa_node, self.Name_Result, '', False)  # 获取质检结果
                    if CUtils.equal_ignore_case(node_result, self.QA_Result_Pass):
                        pass
                    elif CUtils.equal_ignore_case(node_result, self.QA_Result_Warn):  # 警告则等待
                        message = message + '[业务元数据的质检中,项目{0}不符合要求,建议修正!]'.format(qa_name)
                        access_wait_flag = self.DB_True
                    else:  # 错误以及其他情况,比如'',或者为其他字段
                        message = message + '[业务元数据的质检中,项目{0}不符合要求,必须修改后方可入库!]'.format(qa_name)
                        access_forbid_flag = self.DB_True
                else:
                    message = message + '[业务元数据的质检中,没有项目{0},请进行修正!]'.format(qa_name)
                    access_forbid_flag = self.DB_True

            # 数据库部分
            access_wait_flag, access_forbid_flag, message = \
                self.db_access_check(access_wait_flag, access_forbid_flag, message)

            # 开始进行检查的结果判断
            access_flag = self.DataAccess_Pass
            if access_forbid_flag:
                access_flag = self.DataAccess_Forbid
            elif access_wait_flag:
                access_flag = self.DataAccess_Wait
            if CUtils.equal_ignore_case(message, ''):
                message = '模块可以进行访问!'

            result = CResult.merge_result(
                self.Success,
                '模块[{0}.{1}]对对象[{2}]的访问能力已经分析完毕!分析结果为:{3}'.format(
                    CUtils.dict_value_by_name(self.information(), self.Name_ID, ''),
                    CUtils.dict_value_by_name(self.information(), self.Name_Title, ''),
                    self._obj_name,
                    message
                )
            )
            return CResult.merge_result_info(result, self.Name_Access, access_flag)
        except Exception as error:
            result = CResult.merge_result(
                self.Failure,
                '模块[{0}.{1}]对对象[{2}]的访问能力的分析存在异常!详细情况: {3}!'.format(
                    CUtils.dict_value_by_name(self.information(), self.Name_ID, ''),
                    CUtils.dict_value_by_name(self.information(), self.Name_Title, ''),
                    self._obj_name,
                    error.__str__()
                )
            )
            return CResult.merge_result_info(result, self.Name_Access, self.DataAccess_Forbid)
Exemplo n.º 4
0
 def test_load_obj(self):
     data = {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5}
     json_obj = CJson()
     json_obj.load_obj(data)
     assert json_obj.xpath_one('a', -1) == 1
Exemplo n.º 5
0
    def object_copy_stat(self, storage_id, object_id, object_name,
                         object_relation_name):
        try:
            ds_object_stat = CFactory().give_me_db(
                self.get_mission_db_id()).one_row(
                    '''
                select sum(dodfilesize), max(dodfilemodifytime) from dm2_storage_obj_detail where dodobjectid = :object_id
                ''', {'object_id': object_id})

            object_size = None
            object_last_modify_time = None
            if not ds_object_stat.is_empty():
                object_size = ds_object_stat.value_by_index(0, 0, 0)
                object_last_modify_time = ds_object_stat.value_by_index(
                    0, 1, None)

            batch_root_relation_dir = CFactory().give_me_db(
                self.get_mission_db_id()).one_value(
                    '''
                select dsddirectory 
                from dm2_storage_directory 
                where dsdStorageid = :storage_id and position(dsddirectory || '{0}' in :directory) = 1 
                    and dsddirectory <> '' 
                order by dsddirectory 
                limit 1 
                '''.format(CFile.sep()), {
                        'storage_id': storage_id,
                        'directory': object_relation_name
                    }, object_relation_name)

            # 更新当前对象的存储大小, 以及最后修改时间
            CFactory().give_me_db(self.get_mission_db_id()).execute(
                '''
                update dm2_storage_object 
                set dso_volumn_now = :object_size, dso_obj_lastmodifytime = :object_last_modify_time
                where dsoid = :object_id
                ''', {
                    'object_id': object_id,
                    'object_size': object_size,
                    'object_last_modify_time': object_last_modify_time
                })

            count_copy_same_filename_core = CFactory().give_me_db(
                self.get_mission_db_id()).one_value(
                    '''
                select count(dm2_storage_object.dsoid)
                from dm2_storage_object
                    left join dm2_storage_directory on dm2_storage_object.dsoid = dm2_storage_directory.dsd_object_id 
                    left join dm2_storage on dm2_storage_directory.dsdstorageid = dm2_storage.dstid 
                where 
                    dm2_storage.dstid is not null
                    and dm2_storage.dsttype = '{0}'
                    and dm2_storage_object.dsoobjectname = :object_name
                    and dm2_storage_object.dsoid <> :object_id
                    and dm2_storage_object.dsodatatype = '{1}'
                '''.format(self.Storage_Type_Core, self.FileType_Dir), {
                        'object_id': object_id,
                        'object_name': object_name
                    }, 0) + CFactory().give_me_db(
                        self.get_mission_db_id()).one_value(
                            '''
                select count(dm2_storage_object.dsoid)
                from dm2_storage_object
                    left join dm2_storage_file on dm2_storage_file.dsf_object_id = dm2_storage_object.dsoid
                    left join dm2_storage on dm2_storage_file.dsfstorageid = dm2_storage.dstid 
                where 
                    dm2_storage.dstid is not null
                    and dm2_storage.dsttype = '{0}'
                    and dm2_storage_object.dsoobjectname = :object_name
                    and dm2_storage_object.dsoid <> :object_id
                    and dm2_storage_object.dsodatatype = '{1}'
                '''.format(self.Storage_Type_Core, self.FileType_File), {
                                'object_id': object_id,
                                'object_name': object_name
                            }, 0)

            count_copy_same_filename_and_size_core = CFactory().give_me_db(
                self.get_mission_db_id()).one_value(
                    '''
                select count(dm2_storage_object.dsoid)
                from dm2_storage_object
                    left join dm2_storage_directory on dm2_storage_object.dsoid = dm2_storage_directory.dsd_object_id 
                    left join dm2_storage on dm2_storage_directory.dsdstorageid = dm2_storage.dstid 
                where 
                    dm2_storage.dstid is not null
                    and dm2_storage.dsttype = '{0}'
                    and dm2_storage_object.dso_volumn_now = :object_size
                    and dm2_storage_object.dsoobjectname = :object_name
                    and dm2_storage_object.dsoid <> :object_id
                    and dm2_storage_object.dsodatatype = '{1}'
                '''.format(self.Storage_Type_Core, self.FileType_Dir), {
                        'object_id': object_id,
                        'object_name': object_name,
                        'object_size': object_size
                    }, 0) + CFactory().give_me_db(
                        self.get_mission_db_id()).one_value(
                            '''
                select count(dm2_storage_object.dsoid)
                from dm2_storage_object
                    left join dm2_storage_file on dm2_storage_file.dsf_object_id = dm2_storage_object.dsoid
                    left join dm2_storage on dm2_storage_file.dsfstorageid = dm2_storage.dstid 
                where 
                    dm2_storage.dstid is not null
                    and dm2_storage.dsttype = '{0}'
                    and dm2_storage_object.dso_volumn_now = :object_size
                    and dm2_storage_object.dsoobjectname = :object_name
                    and dm2_storage_object.dsoid <> :object_id
                    and dm2_storage_object.dsodatatype = '{1}'
                '''.format(self.Storage_Type_Core, self.FileType_File), {
                                'object_id': object_id,
                                'object_name': object_name,
                                'object_size': object_size
                            }, 0)

            count_copy_same_filename_same_batch = CFactory().give_me_db(
                self.get_mission_db_id()).one_value(
                    '''
                select count(dm2_storage_object.dsoid)
                from dm2_storage_object
                    left join dm2_storage_directory on dm2_storage_object.dsoid = dm2_storage_directory.dsd_object_id 
                    left join dm2_storage on dm2_storage_directory.dsdstorageid = dm2_storage.dstid 
                where 
                    dm2_storage.dstid = :storage_id
                    and position(:directory in dm2_storage_directory.dsddirectory) = 1
                    and dm2_storage_object.dsoobjectname = :object_name
                    and dm2_storage_object.dsoid <> :object_id
                    and dm2_storage_object.dsodatatype = '{0}'
                '''.format(self.FileType_Dir), {
                        'storage_id': storage_id,
                        'object_id': object_id,
                        'object_name': object_name,
                        'directory': batch_root_relation_dir
                    }, 0) + CFactory().give_me_db(
                        self.get_mission_db_id()).one_value(
                            '''
                select count(dm2_storage_object.dsoid)
                from dm2_storage_object
                    left join dm2_storage_file on dm2_storage_file.dsf_object_id = dm2_storage_object.dsoid
                    left join dm2_storage on dm2_storage_file.dsfstorageid = dm2_storage.dstid 
                where 
                    dm2_storage.dstid = :storage_id
                    and position(:directory in dm2_storage_file.dsffilerelationname) = 1
                    and dm2_storage_object.dsoobjectname = :object_name
                    and dm2_storage_object.dsoid <> :object_id
                    and dm2_storage_object.dsodatatype = '{0}'
                '''.format(self.FileType_File), {
                                'storage_id': storage_id,
                                'object_id': object_id,
                                'object_name': object_name,
                                'directory': batch_root_relation_dir
                            }, 0)

            count_copy_same_filename_and_size_same_batch = CFactory(
            ).give_me_db(self.get_mission_db_id()).one_value(
                '''
                select count(dm2_storage_object.dsoid)
                from dm2_storage_object
                    left join dm2_storage_directory on dm2_storage_object.dsoid = dm2_storage_directory.dsd_object_id 
                    left join dm2_storage on dm2_storage_directory.dsdstorageid = dm2_storage.dstid 
                where 
                    dm2_storage.dstid = :storage_id
                    and position(:directory in dm2_storage_directory.dsddirectory) = 1
                    and dm2_storage_object.dso_volumn_now = :object_size
                    and dm2_storage_object.dsoobjectname = :object_name
                    and dm2_storage_object.dsoid <> :object_id
                    and dm2_storage_object.dsodatatype = '{0}'
                '''.format(self.FileType_Dir), {
                    'storage_id': storage_id,
                    'object_id': object_id,
                    'object_name': object_name,
                    'object_size': object_size,
                    'directory': object_relation_name
                }, 0) + CFactory().give_me_db(
                    self.get_mission_db_id()).one_value(
                        '''
                select count(dm2_storage_object.dsoid)
                from dm2_storage_object
                    left join dm2_storage_file on dm2_storage_file.dsf_object_id = dm2_storage_object.dsoid
                    left join dm2_storage on dm2_storage_file.dsfstorageid = dm2_storage.dstid 
                where 
                    dm2_storage.dstid = :storage_id
                    and position(:directory in dm2_storage_file.dsffilerelationname) = 1
                    and dm2_storage_object.dso_volumn_now = :object_size
                    and dm2_storage_object.dsoobjectname = :object_name
                    and dm2_storage_object.dsoid <> :object_id
                    and dm2_storage_object.dsodatatype = '{0}'
                '''.format(self.FileType_File), {
                            'storage_id': storage_id,
                            'object_id': object_id,
                            'object_name': object_name,
                            'object_size': object_size,
                            'directory': object_relation_name
                        }, 0)

            json_text = None
            if count_copy_same_filename_and_size_same_batch + \
                    count_copy_same_filename_and_size_core + \
                    count_copy_same_filename_same_batch + count_copy_same_filename_core > 0:
                json_obj = CJson()
                json_obj.load_obj({
                    self.Storage_Type_Core: {
                        self.Name_FileName:
                        count_copy_same_filename_core,
                        '{0}_{1}'.format(self.Name_FileName, self.Name_Size):
                        count_copy_same_filename_and_size_core
                    },
                    self.Storage_Type_InBound: {
                        self.Name_FileName:
                        count_copy_same_filename_same_batch,
                        '{0}_{1}'.format(self.Name_FileName, self.Name_Size):
                        count_copy_same_filename_and_size_same_batch
                    }
                })
                json_text = json_obj.to_json()

            CFactory().give_me_db(self.get_mission_db_id()).execute(
                '''
                update dm2_storage_object
                set dsocopystat = :copy_stat
                where dsoid = :dsoid
                ''', {
                    'dsoid': object_id,
                    'copy_stat': json_text
                })
            return CResult.merge_result(self.Success, '数据容量统计和数据重复数据分析成功完成! ')
        except Exception as error:
            return CResult.merge_result(
                self.Failure,
                '数据容量统计和数据重复数据分析过程出现错误, 详细情况: {0}'.format(error.__str__()))
Exemplo n.º 6
0
    def access(self) -> str:
        try:
            # quality_info_xml = self._quality_info  # 获取质检xml
            quality_summary = self._dataset.value_by_name(
                0, 'dso_quality_summary', '')
            quality_summary_json = CJson()
            quality_summary_json.load_obj(quality_summary)
            access_wait_flag = self.DB_False  # 定义等待标志,为True则存在检查项目为等待
            access_forbid_flag = self.DB_False  # 定义禁止标志,为True则存在检查项目为禁止
            message = ''

            # 文件与影像质检部分
            file_qa = quality_summary_json.xpath_one('total', '')
            image_qa = quality_summary_json.xpath_one('metadata.data', '')
            business_qa = quality_summary_json.xpath_one(
                'metadata.business', '')
            if CUtils.equal_ignore_case(file_qa, self.QA_Result_Error) \
                    or CUtils.equal_ignore_case(image_qa, self.QA_Result_Error) \
                    or CUtils.equal_ignore_case(business_qa, self.QA_Result_Error):
                message = message + '[数据与其相关文件的质检存在error!请进行修正!]'
                access_forbid_flag = self.DB_True
            elif CUtils.equal_ignore_case(file_qa, self.QA_Result_Warn) \
                    or CUtils.equal_ignore_case(image_qa, self.QA_Result_Warn) \
                    or CUtils.equal_ignore_case(business_qa, self.QA_Result_Warn):
                message = message + '[数据与其相关文件的质检存在warn!请进行检查!]'
                access_wait_flag = self.DB_True
            else:
                pass

            # 数据库部分
            access_wait_flag, access_forbid_flag, message = \
                self.db_access_check(access_wait_flag, access_forbid_flag, message)

            # 开始进行检查的结果判断
            access_flag = self.DataAccess_Pass
            if access_forbid_flag:
                access_flag = self.DataAccess_Forbid
            elif access_wait_flag:
                access_flag = self.DataAccess_Wait
            if CUtils.equal_ignore_case(message, ''):
                message = '模块可以进行访问!'

            result = CResult.merge_result(
                self.Success,
                '模块[{0}.{1}]对对象[{2}]的访问能力已经分析完毕!分析结果为:{3}'.format(
                    CUtils.dict_value_by_name(self.information(), self.Name_ID,
                                              ''),
                    CUtils.dict_value_by_name(self.information(),
                                              self.Name_Title, ''),
                    self._obj_name, message))
            return CResult.merge_result_info(result, self.Name_Access,
                                             access_flag)
        except Exception as error:
            result = CResult.merge_result(
                self.Failure,
                '模块[{0}.{1}]对对象[{2}]的访问能力的分析存在异常!详细情况: {3}!'.format(
                    CUtils.dict_value_by_name(self.information(), self.Name_ID,
                                              ''),
                    CUtils.dict_value_by_name(self.information(),
                                              self.Name_Title, ''),
                    self._obj_name, error.__str__()))
            return CResult.merge_result_info(result, self.Name_Access,
                                             self.DataAccess_Forbid)
Exemplo n.º 7
0
    def process_main_table(self):
        object_table_id = self._obj_id  # 获取oid
        object_table_data = self._dataset
        metadata_bus_dict = self.get_metadata_bus_dict()
        main_table_name = CUtils.dict_value_by_name(self.information(),
                                                    'main_table_name',
                                                    'ap_product')

        main_table = CTable()
        main_table.load_info(self._db_id, main_table_name)
        main_table.column_list.column_by_name('id').set_value(object_table_id)
        productname = CUtils.dict_value_by_name(metadata_bus_dict,
                                                'productname', None)
        if CUtils.equal_ignore_case(productname, ''):
            productname = object_table_data.value_by_name(
                0, 'dsoobjectname', None)
        main_table.column_list.column_by_name('productname').set_value(
            productname)
        main_table.column_list.column_by_name('producttype').set_value(
            CUtils.dict_value_by_name(metadata_bus_dict, 'producttype', None))
        main_table.column_list.column_by_name('regioncode').set_null()
        main_table.column_list.column_by_name('productattribute').set_value(
            CUtils.dict_value_by_name(metadata_bus_dict, 'productattribute',
                                      None))

        centerlatitude = CUtils.dict_value_by_name(metadata_bus_dict,
                                                   'centerlatitude', None)
        centerlongitude = CUtils.dict_value_by_name(metadata_bus_dict,
                                                    'centerlongitude', None)
        centerlonlat = '{0},{1}'.format(centerlongitude, centerlatitude)
        main_table.column_list.column_by_name('centerlonlat').set_value(
            centerlonlat)

        main_table.column_list.column_by_name('geomwkt').set_sql('''
            st_astext(
            (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}')
            )
            '''.format(object_table_id))
        main_table.column_list.column_by_name('geomobj').set_sql('''
            (select dso_geo_wgs84 from dm2_storage_object where dsoid='{0}')
            '''.format(object_table_id))
        main_table.column_list.column_by_name('browserimg').set_value(
            '{0}{1}'.format(
                CFile.sep(),
                object_table_data.value_by_name(0, 'dso_browser', None)))
        main_table.column_list.column_by_name('thumbimg').set_value(
            '{0}{1}'.format(
                CFile.sep(),
                object_table_data.value_by_name(0, 'dso_thumb', None)))
        main_table.column_list.column_by_name('publishdate').set_value(
            CUtils.dict_value_by_name(metadata_bus_dict, 'publishdate', None))
        main_table.column_list.column_by_name('copyright').set_value(
            CUtils.dict_value_by_name(metadata_bus_dict, 'copyright', None))

        dso_time = object_table_data.value_by_name(0, 'dso_time', None)
        dso_time_json = CJson()
        dso_time_json.load_obj(dso_time)
        imgdate = dso_time_json.xpath_one('time', None)
        if not CUtils.equal_ignore_case(imgdate, ''):
            main_table.column_list.column_by_name('imgdate').set_value(
                imgdate[0:4])
        else:
            main_table.column_list.column_by_name('imgdate').set_null()

        main_table.column_list.column_by_name('starttime').set_value(
            dso_time_json.xpath_one('start_time', None))
        main_table.column_list.column_by_name('endtime').set_value(
            dso_time_json.xpath_one('end_time', None))
        resolution = CUtils.any_2_str(
            CUtils.dict_value_by_name(metadata_bus_dict, 'resolution', None))
        if not CUtils.equal_ignore_case(resolution, ''):
            if '/' in resolution:
                resolution_list = resolution.split('/')
                temp_list = list()
                for resolution in resolution_list:
                    temp_list.append(CUtils.to_decimal(resolution, -1))
                temp_list = list(set(temp_list))  # 去重
                if -1 in temp_list:
                    temp_list.remove(-1)
                if len(temp_list) > 0:
                    main_table.column_list.column_by_name(
                        'resolution').set_value(min(temp_list))
                else:
                    main_table.column_list.column_by_name(
                        'resolution').set_value(0)
            else:
                main_table.column_list.column_by_name('resolution').set_value(
                    resolution)
        else:
            main_table.column_list.column_by_name('resolution').set_value(0)

        main_table.column_list.column_by_name('filesize').set_sql('''
            (select sum(dodfilesize) from dm2_storage_obj_detail where dodobjectid='{0}')
            '''.format(object_table_id))

        productid = CUtils.dict_value_by_name(metadata_bus_dict, 'productid',
                                              None)
        if CUtils.equal_ignore_case(productid, ''):
            object_type = object_table_data.value_by_name(0, 'dsodatatype', '')
            if CUtils.equal_ignore_case(object_type, self.Name_Dir):
                main_table.column_list.column_by_name('productid').set_value(
                    object_table_data.value_by_name(0, 'dsoobjectname', None))
            elif CUtils.equal_ignore_case(object_type, self.Name_File):
                main_table.column_list.column_by_name('productid').set_sql('''
                    (SELECT dsffilename FROM dm2_storage_file WHERE dsf_object_id = '{0}')
                    '''.format(object_table_id))
            else:
                main_table.column_list.column_by_name('productid').set_null()
        else:
            main_table.column_list.column_by_name('productid').set_value(
                productid)

        main_table.column_list.column_by_name('remark').set_value(
            CUtils.dict_value_by_name(metadata_bus_dict, 'remark', None))
        main_table.column_list.column_by_name('extent').set_sql('''
            (select dso_geo_bb_wgs84 from dm2_storage_object where dsoid='{0}')
            '''.format(object_table_id))
        main_table.column_list.column_by_name('proj').set_null()  # 原始数据保持空

        main_table.column_list.column_by_name('dataid').set_value(
            object_table_id)
        main_table.column_list.column_by_name('shplog').set_null()

        if not main_table.if_exists():
            now_time = CUtils.any_2_str(
                datetime.datetime.now().strftime('%F %T'))
            main_table.column_list.column_by_name('addtime').set_value(
                now_time)
            main_table.column_list.column_by_name('isdel').set_value(0)
            main_table.column_list.column_by_name('projectnames').set_value(
                'productname')
        result = main_table.save_data()

        return result
    def process(self) -> str:
        """
        在这里提取文档数据的元数据, 将元数据文件存储在self.file_content.work_root_dir下, 固定名称为self.FileName_MetaData, 注意返回的串中有元数据的格式
        注意: 如果出现内存泄漏现象, 则使用新建进程提取元数据, 放置到文件中, 在本进程中解析元数据!!!
        :return:
        """
        default_result = super().process()
        out_metadata_file_fullname = CFile.join_file(
            self.file_content.work_root_dir, self.FileName_MetaData)
        in_file_fullname = self.file_info.file_name_with_full_path

        if not settings.application.xpath_one(
                self.Path_Setting_Dependence_Tika_Enable, True):
            return default_result

        tika_dependence_mode = settings.application.xpath_one(
            self.Path_Setting_Dependence_Tika_Mode, self.Name_Server)
        if CUtils.equal_ignore_case(tika_dependence_mode, self.Name_Server):
            tika_server_url = settings.application.xpath_one(
                self.Path_Setting_Dependence_Tika_Server_Url, None)
            tika_server_connect_timeout = settings.application.xpath_one(
                self.Path_Setting_Dependence_Tika_Server_Timeout, 30)
            if CUtils.equal_ignore_case(tika_server_url, ''):
                return default_result

            try:
                parsed = TikaServer.from_file(
                    in_file_fullname,
                    tika_server_url,
                    requestOptions={'timeout': tika_server_connect_timeout})
                meta_data_dict = parsed["metadata"]
                json_obj = CJson()
                json_obj.load_obj(meta_data_dict)
                json_obj.to_file(out_metadata_file_fullname)
                return CResult.merge_result_info(
                    CResult.merge_result(
                        self.Success,
                        '文档[{0}]的元数据提取成功'.format(in_file_fullname)),
                    self.Name_Format, self.MetaDataFormat_Json)
            except Exception as error:
                return CResult.merge_result(
                    self.Failure, '文档[{0}]的元数据提取过程出现错误, 详细信息为: [{1}]'.format(
                        in_file_fullname, error.__str__()))
        else:
            tika_application = settings.application.xpath_one(
                self.Path_Setting_Dependence_Tika_Client_App, None)
            if CUtils.equal_ignore_case(tika_application, ''):
                return default_result

            if not CFile.file_or_path_exist(tika_application):
                return CResult.merge_result(
                    self.Failure,
                    '文档[{0}]的元数据无法提取, 详细原因为: [依赖中间件{1}文件不存在, 请修正后重试!]'.format(
                        in_file_fullname, tika_application))

            try:
                tika_client = TikaApplication(file_jar=tika_application)
                meta_data_dict = tika_client.extract_only_metadata(
                    in_file_fullname)
                json_obj = CJson()
                json_obj.load_obj(meta_data_dict)
                json_obj.to_file(out_metadata_file_fullname)
                return CResult.merge_result_info(
                    CResult.merge_result(
                        self.Success,
                        '文档[{0}]的元数据提取成功'.format(in_file_fullname)),
                    self.Name_Format, self.MetaDataFormat_Json)
            except Exception as error:
                return CResult.merge_result(
                    self.Failure, '文档[{0}]的元数据提取过程出现错误, 详细信息为: [{1}]'.format(
                        in_file_fullname, error.__str__()))

        # result = raster_mdreader.get_metadata_2_file(out_metadata_file_fullname)
        # result = CProcessUtils.processing_method(raster_mdreader.get_metadata_2_file, out_metadata_file_fullname)
        # 进程调用模式
        # p_one = Process(target=raster_mdreader.get_metadata_2_file, args=(out_metadata_file_fullname,))
        # p_one.start()
        # p_one.join()
        return CResult.merge_result_info(result, self.Name_Format,
                                         self.MetaDataFormat_Json)
    def get_sync_xml_dict_list(self, insert_or_updata) -> list:
        """
                insert_or_updata 中 self.DB_True为insert,DB_False为updata
                本方法的写法为强规则,调用add_value_to_sync_dict_list配置
                第一个参数为list,第二个参数为字段名,第三个参数为字段值,第四个参数为特殊配置
                """
        object_id = self._obj_id
        object_name = self._obj_name
        dsometadataxml_bus = self._dataset.value_by_name(
            0, 'dsometadataxml_bus', '')
        dso_time = self._dataset.value_by_name(0, 'dso_time', '')
        dso_time_json = CJson()  # 时间数据json
        dso_time_json.load_obj(dso_time)
        metadataxml_bus_xml = CXml()  # 业务元数据xml
        metadataxml_bus_xml.load_xml(dsometadataxml_bus)

        sync_dict_list = self.get_sync_predefined_dict_list(insert_or_updata)
        self.add_value_to_sync_dict_list(sync_dict_list, 'aprndid', object_id)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'aprnwid',
            self._dataset.value_by_name(0, 'dsoparentobjid', ''))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'dataformat',
            self._dataset.value_by_name(0, 'dsodatatype', ''))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'projinfo',
            self._dataset.value_by_name(0, 'dso_prj_project', ''))
        # sync_dict_list, 'createrorganize'  # 为空
        # sync_dict_list, 'submitorganize'  # 为空
        # sync_dict_list, 'copyrightorgnize'  # 为空
        # sync_dict_list, 'supplyorganize'  # 为空
        self.add_value_to_sync_dict_list(sync_dict_list, 'metafilename',
                                         '{0}_21at.xml'.format(object_name))
        # sync_dict_list, 'networksize'  # 为空
        # sync_dict_list, 'zonetype'  # 为空
        # sync_dict_list, 'centerline'  # 为空
        # sync_dict_list, 'zoneno'  # 为空
        # sync_dict_list, 'coordinateunit'  # 为空
        # sync_dict_list, 'demname'  # 为空
        # sync_dict_list, 'demstandard'  # 为空
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'dsometadatajson',
            self._dataset.value_by_name(0, 'dsometadataxml_bus', ''))
        # 插件处理字段
        self.add_value_to_sync_dict_list(sync_dict_list, 'datacount', 1)
        # sync_dict_list, 'secrecylevel'  # 为空
        # sync_dict['regioncode']  # 为空
        # sync_dict['regionname']  # 为空
        # sync_dict_list, 'resolution'  # 为空
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'imagedate',
            CUtils.to_day_format(dso_time_json.xpath_one('time', ''),
                                 dso_time_json.xpath_one('time', '')))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'begdate',
            CUtils.to_day_format(dso_time_json.xpath_one('start_time', ''),
                                 dso_time_json.xpath_one('start_time', '')))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'enddate',
            CUtils.to_day_format(dso_time_json.xpath_one('end_time', ''),
                                 dso_time_json.xpath_one('end_time', '')))
        return sync_dict_list
Exemplo n.º 10
0
    def get_sync_mdb_dict_list(self, insert_or_updata) -> list:
        """
                insert_or_updata 中 self.DB_True为insert,DB_False为updata
                本方法的写法为强规则,调用add_value_to_sync_dict_list配置
                第一个参数为list,第二个参数为字段名,第三个参数为字段值,第四个参数为特殊配置
                """
        object_id = self._obj_id
        object_name = self._obj_name
        dsometadataxml_bus = self._dataset.value_by_name(
            0, 'dsometadataxml_bus', '')
        dso_time = self._dataset.value_by_name(0, 'dso_time', '')
        dso_time_json = CJson()  # 时间数据json
        dso_time_json.load_obj(dso_time)
        metadataxml_bus_xml = CXml()  # 业务元数据xml
        metadataxml_bus_xml.load_xml(dsometadataxml_bus)

        sync_dict_list = self.get_sync_predefined_dict_list(insert_or_updata)
        self.add_value_to_sync_dict_list(sync_dict_list, 'aprsdid', object_id)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'aprswid',
            self._dataset.value_by_name(0, 'dsoparentobjid', ''))
        # sync_dict['fname']   #为空
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'fno',
            metadataxml_bus_xml.get_element_text_by_xpath_one(
                "//item[@name='th']"))
        '''
                object_name[0:1]    100万图幅行号为字母
                object_name[1:3]    100万图幅列号为数字
                object_name[3:4]    比例尺代码为字母
                object_name[4:7]    图幅行号为数字
                object_name[7:10]   图幅列号为数字
                '''
        if CUtils.text_is_alpha(object_name[0:1]):
            self.add_value_to_sync_dict_list(sync_dict_list, 'hrowno',
                                             object_name[0:1])
            self.add_value_to_sync_dict_list(sync_dict_list, 'hcolno',
                                             object_name[1:3])
            self.add_value_to_sync_dict_list(sync_dict_list, 'scalecode',
                                             object_name[3:4])
            self.add_value_to_sync_dict_list(sync_dict_list, 'rowno',
                                             object_name[4:7])
            self.add_value_to_sync_dict_list(sync_dict_list, 'colno',
                                             object_name[7:10])
        # sync_dict['expandextent']  # 为空
        # sync_dict['pupdatedate']  # 为空
        # sync_dict['pversion']  # 为空
        # sync_dict['publishdate']  # 为空
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'dataformat',
            metadataxml_bus_xml.get_element_text_by_xpath_one(
                "//item[@name='sjgs']"))
        # sync_dict['maindatasource']  # 为空
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'dsometadatajson',
            self._dataset.value_by_name(0, 'dsometadataxml_bus', ''))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'createrorganize',
            metadataxml_bus_xml.get_element_text_by_xpath_one(
                "//item[@name='sjscdwm']"))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'submitorganize',
            metadataxml_bus_xml.get_element_text_by_xpath_one(
                "//item[@name='sjbqdwm']"))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'copyrightorgnize',
            metadataxml_bus_xml.get_element_text_by_xpath_one(
                "//item[@name='sjcbdwm']"))
        # sync_dict['supplyorganize']  # 为空
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'colormodel',
            metadataxml_bus_xml.get_element_text_by_xpath_one(
                "//item[@name='yxscms']"))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'piexldepth',
            metadataxml_bus_xml.get_element_text_by_xpath_one(
                "//item[@name='xsws']"))
        # sync_dict['scale']  # 为空
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'mainrssource',
            metadataxml_bus_xml.get_element_text_by_xpath_one(
                "//item[@name='wxmc']"))
        # 插件处理字段
        self.add_value_to_sync_dict_list(sync_dict_list, 'datacount', 1)
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'secrecylevel',
            metadataxml_bus_xml.get_element_text_by_xpath_one(
                "//item[@name='mj']"))
        # sync_dict['regioncode']  # 为空
        # sync_dict['regionname']  # 为空
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'resolution',
            metadataxml_bus_xml.get_element_text_by_xpath_one(
                "//item[@name='dmfbl']"))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'imagedate',
            CUtils.to_day_format(dso_time_json.xpath_one('time', ''),
                                 dso_time_json.xpath_one('time', '')))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'begdate',
            CUtils.to_day_format(dso_time_json.xpath_one('start_time', ''),
                                 dso_time_json.xpath_one('start_time', '')))
        self.add_value_to_sync_dict_list(
            sync_dict_list, 'enddate',
            CUtils.to_day_format(dso_time_json.xpath_one('end_time', ''),
                                 dso_time_json.xpath_one('end_time', '')))
        return sync_dict_list