def patch_after_feature_change( self, workspace, layer, ): if self.is_aborted(): raise AbortedException file_type = layman_util.get_publication_info(workspace, LAYER_TYPE, layer, context={'keys': ['file']})['file']['file_type'] if file_type == settings.FILE_TYPE_VECTOR: bbox = geoserver.get_layer_bbox(workspace, layer) geoserver_workspace = wms.get_geoserver_workspace(workspace) info = layman_util.get_publication_info(workspace, LAYER_TYPE, layer, context={'keys': ['style_type', 'native_crs', ], }) style_type = info['style_type'] crs = info['native_crs'] lat_lon_bbox = bbox_util.transform(bbox, crs, crs_def.EPSG_4326) if style_type == 'sld': gs_util.patch_feature_type(geoserver_workspace, layer, auth=settings.LAYMAN_GS_AUTH, bbox=bbox, crs=crs, lat_lon_bbox=lat_lon_bbox) elif style_type == 'qml': gs_util.patch_wms_layer(geoserver_workspace, layer, auth=settings.LAYMAN_GS_AUTH, bbox=bbox, crs=crs, lat_lon_bbox=lat_lon_bbox) elif file_type != settings.FILE_TYPE_RASTER: raise NotImplementedError(f"Unknown file type: {file_type}") wms.clear_cache(workspace) if self.is_aborted(): raise AbortedException
def refresh_input_chunk(self, workspace, layername, check_crs=True): if self.is_aborted(): raise AbortedException last_change = time.time() num_files_saved = 0 num_chunks_saved = 0 chunk_info = input_chunk.layer_file_chunk_info(workspace, layername) logger.debug(f'chunk_info {str(chunk_info)}') while not chunk_info[0]: if time.time() - last_change > settings.UPLOAD_MAX_INACTIVITY_TIME: logger.info( f'UPLOAD_MAX_INACTIVITY_TIME reached {workspace}.{layername}') input_file.delete_layer(workspace, layername) raise LaymanError(22) time.sleep(0.5) if self.is_aborted(): logger.info(f'Aborting for layer {workspace}.{layername}') input_file.delete_layer(workspace, layername) logger.info(f'Aborted for layer {workspace}.{layername}') raise AbortedException chunk_info = input_chunk.layer_file_chunk_info(workspace, layername) logger.debug(f'chunk_info {str(chunk_info)}') if num_files_saved != chunk_info[1] \ or num_chunks_saved != chunk_info[2]: last_change = time.time() num_files_saved = chunk_info[1] num_chunks_saved = chunk_info[2] logger.info(f'Layer chunks uploaded {workspace}.{layername}') input_files = input_file.get_layer_input_files(workspace, layername) input_file.check_filenames(workspace, layername, input_files, check_crs, ignore_existing_files=True) main_filepath = layman_util.get_publication_info(workspace, LAYER_TYPE, layername, context={ 'keys': ['file'] })['_file']['gdal_path'] input_file.check_main_file(main_filepath, check_crs=check_crs) file_type = input_file.get_file_type( input_files.raw_or_archived_main_file_path) style_type_for_check = layman_util.get_publication_info( workspace, LAYER_TYPE, layername, context={'keys': ['style_type']})['style_type'] if file_type == settings.FILE_TYPE_RASTER and style_type_for_check == 'qml': raise LaymanError(48, f'Raster layers are not allowed to have QML style.')
def assert_all_sources_bbox(workspace, layer, expected_bbox_3857, *, expected_native_bbox=None, expected_native_crs=None): with app.app_context(): info = layman_util.get_publication_info(workspace, LAYER_TYPE, layer, context={'key': ['bounding_box', 'native_bounding_box', 'native_crs']}) bbox_3857 = tuple(info['bounding_box']) native_bbox = tuple(info['native_bounding_box']) native_crs = info['native_crs'] assert_same_bboxes(expected_bbox_3857, bbox_3857, 0.00001) if expected_native_bbox is not None: assert_same_bboxes(expected_native_bbox, native_bbox, 0) assert expected_native_crs == native_crs assert_wfs_bbox(workspace, layer, expected_bbox_3857) assert_wms_bbox(workspace, layer, expected_bbox_3857) if expected_native_bbox is not None: assert_wfs_bbox(workspace, layer, expected_native_bbox, expected_bbox_crs=expected_native_crs) assert_wms_bbox(workspace, layer, expected_native_bbox, expected_bbox_crs=expected_native_crs) with app.app_context(): expected_bbox_4326 = bbox_util.transform(expected_bbox_3857, crs_from=crs_def.EPSG_3857, crs_to=crs_def.EPSG_4326, ) md_comparison = get_workspace_layer_metadata_comparison(workspace, layer) csw_prefix = settings.CSW_PROXY_URL csw_src_key = get_source_key_from_metadata_comparison(md_comparison, csw_prefix) assert csw_src_key is not None prop_key = 'extent' md_props = md_comparison['metadata_properties'] assert md_props[prop_key]['equal'] is True, md_props[prop_key] assert md_props[prop_key]['equal_or_null'] is True, md_props[prop_key] csw_bbox_4326 = tuple(md_props[prop_key]['values'][csw_src_key]) assert_same_bboxes(expected_bbox_4326, csw_bbox_4326, 0.001)
def patch_after_feature_change( self, workspace, layer, ): if self.is_aborted(): raise AbortedException info = layman_util.get_publication_info( workspace, LAYER_TYPE, layer, context={'keys': ['file', 'native_crs']}) file_type = info['file']['file_type'] if file_type == settings.FILE_TYPE_RASTER: return if file_type != settings.FILE_TYPE_VECTOR: raise NotImplementedError(f"Unknown file type: {file_type}") bbox = geoserver.get_layer_bbox(workspace, layer) crs = info['native_crs'] gs_util.patch_feature_type(workspace, layer, auth=settings.LAYMAN_GS_AUTH, bbox=bbox, crs=crs) wfs.clear_cache(workspace) if self.is_aborted(): raise AbortedException
def patch_after_feature_change( self, username, layername, ): if self.is_aborted(): raise AbortedException info = layman_util.get_publication_info( username, LAYER_TYPE, layername, context={'keys': [ 'file', 'native_crs', ]}) file_type = info['file']['file_type'] crs = info['native_crs'] assert file_type == settings.FILE_TYPE_VECTOR bbox = db_get_bbox(username, layername) if self.is_aborted(): raise AbortedException set_bbox(username, LAYER_TYPE, layername, bbox, crs) if self.is_aborted(): raise AbortedException
def get_layer_info(workspace, layername, context=None): partial_info = layman_util.get_publication_info(workspace, LAYER_TYPE, layername, context) chain_info = get_layer_chain(workspace, layername) if chain_info is None or celery_util.is_chain_successful(chain_info): return partial_info failed = False for res in chain_info['by_order']: task_name = next(k for k, v in chain_info['by_name'].items() if v == res) source_state = {'status': res.state if not failed else 'NOT_AVAILABLE'} if res.failed(): failed = True res_exc = res.get(propagate=False) # current_app.logger.info(f"Exception catched: {str(res_exc)}") if isinstance(res_exc, LaymanError): source_state.update({'error': res_exc.to_dict()}) if task_name not in TASKS_TO_LAYER_INFO_KEYS: continue for layerinfo_key in TASKS_TO_LAYER_INFO_KEYS[task_name]: if layerinfo_key not in partial_info or not res.successful(): partial_info[layerinfo_key] = source_state return partial_info
def get_map_info(workspace, mapname, context=None): partial_info = layman_util.get_publication_info(workspace, MAP_TYPE, mapname, context) chain_info = get_map_chain(workspace, mapname) if chain_info is None or celery_util.is_chain_successful(chain_info): return partial_info failed = False for res in chain_info['by_order']: task_name = next(k for k, v in chain_info['by_name'].items() if v == res) source_state = { 'status': res.state if not failed else 'NOT_AVAILABLE' } if res.failed(): failed = True res_exc = res.get(propagate=False) if isinstance(res_exc, LaymanError): source_state.update({ 'error': res_exc.to_dict() }) if task_name not in TASKS_TO_MAP_INFO_KEYS: continue for mapinfo_key in TASKS_TO_MAP_INFO_KEYS[task_name]: if mapinfo_key not in partial_info or not res.successful(): partial_info[mapinfo_key] = source_state return partial_info
def publish_layer_from_qgis(workspace, layer, description, title, *, geoserver_workspace=None): geoserver_workspace = geoserver_workspace or workspace store_name = wms.get_qgis_store_name(layer) info = layman_util.get_publication_info( workspace, LAYER_TYPE, layer, context={'keys': [ 'wms', 'native_crs', ]}) layer_capabilities_url = info['_wms']['qgis_capabilities_url'] crs = info['native_crs'] gs_util.create_wms_store(geoserver_workspace, settings.LAYMAN_GS_AUTH, store_name, layer_capabilities_url) bbox = get_layer_bbox(workspace, layer) lat_lon_bbox = bbox_util.transform(bbox, crs, crs_def.EPSG_4326) gs_util.post_wms_layer(geoserver_workspace, layer, store_name, title, description, bbox, crs, settings.LAYMAN_GS_AUTH, lat_lon_bbox=lat_lon_bbox)
def set_security_rules(workspace, layer, access_rights, auth, geoserver_workspace): geoserver_workspace = geoserver_workspace or workspace layer_info = None if not access_rights or not access_rights.get( 'read') or not access_rights.get('write'): layer_info = layman_util.get_publication_info( workspace, LAYER_TYPE, layer, context={'keys': [ 'access_rights', ]}) read_roles = access_rights.get( 'read') if access_rights and access_rights.get( 'read') else layer_info['access_rights']['read'] write_roles = access_rights.get( 'write') if access_rights and access_rights.get( 'write') else layer_info['access_rights']['write'] security_read_roles = gs_common.layman_users_to_geoserver_roles(read_roles) gs_util.ensure_layer_security_roles(geoserver_workspace, layer, security_read_roles, 'r', auth) security_write_roles = gs_common.layman_users_to_geoserver_roles( write_roles) gs_util.ensure_layer_security_roles(geoserver_workspace, layer, security_write_roles, 'w', auth)
def patch_publication_by_soap(workspace, publ_type, publ_name, metadata_properties_to_refresh, actor_name, access_rights, csw_source, csw_patch_method, soap_insert_method): publ_info = get_publication_info(workspace, publ_type, publ_name, context={ 'keys': ['access_rights'], }) uuid = publ_info.get('uuid') csw_instance = create_csw() if uuid is None or csw_instance is None: return muuid = csw_source.get_metadata_uuid(uuid) num_records = get_number_of_records(muuid, True) if num_records == 0: full_access_rights = authz.complete_access_rights( access_rights, publ_info['access_rights']) soap_insert_method(workspace, publ_name, full_access_rights, actor_name) else: use_soap = is_soap_visibility_change_needed(muuid, access_rights) if use_soap: csw_delete(muuid) time.sleep(1) soap_insert_method(workspace, publ_name, access_rights, actor_name) else: csw_patch_method(workspace, publ_name, metadata_properties_to_refresh, actor_name)
def generate_layer_thumbnail(workspace, layername): headers = { settings.LAYMAN_GS_AUTHN_HTTP_HEADER_ATTRIBUTE: settings.LAYMAN_GS_USER, } layer_info = get_publication_info(workspace, LAYER_TYPE, layername, context={'keys': ['wms', 'native_bounding_box', 'native_crs', ]}) wms_url = layer_info['_wms']['url'] native_bbox = layer_info['native_bounding_box'] native_crs = layer_info['native_crs'] raw_bbox = native_bbox if not bbox_util.is_empty(native_bbox) else crs_def.CRSDefinitions[native_crs].default_bbox bbox = bbox_util.ensure_bbox_with_area(raw_bbox, crs_def.CRSDefinitions[native_crs].no_area_bbox_padding) tn_bbox = gs_util.get_square_bbox(bbox) # Reason: https://github.com/geopython/OWSLib/issues/709 # tn_img = wms.getmap( # layers=[layername], # srs='EPSG:3857', # bbox=tn_bbox, # size=(300, 300), # format='image/png', # transparent=True, # ) ensure_layer_thumbnail_dir(workspace, layername) tn_path = get_layer_thumbnail_path(workspace, layername) # out = open(tn_path, 'wb') # out.write(tn_img.read()) # out.close() from layman.layer.geoserver.wms import VERSION response = gs_util.get_layer_thumbnail(wms_url, layername, tn_bbox, native_crs, headers=headers, wms_version=VERSION) if "png" not in response.headers['content-type'].lower(): raise LaymanError("Thumbnail rendering failed", data=response.content) response.raise_for_status() with open(tn_path, "wb") as out_file: out_file.write(response.content)
def refresh_bbox( self, username, layername, ): if self.is_aborted(): raise AbortedException file_type = layman_util.get_publication_info( username, LAYER_TYPE, layername, context={'keys': ['file']})['file']['file_type'] if file_type == settings.FILE_TYPE_VECTOR: bbox = db_get_bbox(username, layername) crs = db_get_crs(username, layername) elif file_type == settings.FILE_TYPE_RASTER: bbox = gdal_get_bbox(username, layername) crs = gdal_get_crs(username, layername) else: raise NotImplementedError(f"Unknown file type: {file_type}") if self.is_aborted(): raise AbortedException set_bbox( username, LAYER_TYPE, layername, bbox, crs, ) if self.is_aborted(): raise AbortedException
def get_template_path_and_values(username, mapname, http_method=None, actor_name=None): assert http_method in [ common.REQUEST_METHOD_POST, common.REQUEST_METHOD_PATCH ] uuid_file_path = get_publication_uuid_file(MAP_TYPE, username, mapname) publ_datetime = datetime.fromtimestamp(os.path.getmtime(uuid_file_path)) revision_date = datetime.now() map_json = get_map_json(username, mapname) operates_on = map_json_to_operates_on(map_json, editor=actor_name) publ_info = get_publication_info( username, MAP_TYPE, mapname, context={ 'keys': ['title', 'bounding_box', 'description'], }) bbox_3857 = publ_info.get('bounding_box') if bbox_util.is_empty(bbox_3857): bbox_3857 = settings.LAYMAN_DEFAULT_OUTPUT_BBOX extent = bbox_util.transform(tuple(bbox_3857), epsg_from=3857, epsg_to=4326) title = publ_info['title'] abstract = publ_info.get('description') md_language = next( iter( common_language.get_languages_iso639_2(' '.join([ title or '', abstract or '', ]))), None) prop_values = _get_property_values( username=username, mapname=mapname, uuid=get_map_uuid(username, mapname), title=title, abstract=abstract or None, publication_date=publ_datetime.strftime('%Y-%m-%d'), revision_date=revision_date.strftime('%Y-%m-%d'), md_date_stamp=date.today().strftime('%Y-%m-%d'), identifier=url_for('rest_workspace_map.get', workspace=username, mapname=mapname), identifier_label=mapname, extent=extent, epsg_codes=map_json_to_epsg_codes(map_json), md_organisation_name=None, organisation_name=None, operates_on=operates_on, md_language=md_language, ) if http_method == common.REQUEST_METHOD_POST: prop_values.pop('revision_date', None) template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'record-template.xml') return template_path, prop_values
def get_map_info(workspace, mapname, context=None): partial_info = layman_util.get_publication_info(workspace, MAP_TYPE, mapname, context) chain_info = get_map_chain(workspace, mapname) filled_partial_info = fill_in_partial_info_statuses( partial_info, chain_info) return filled_partial_info
def does_not_exist( workspace, publ_type, name, ): with app.app_context(): pub_info = layman_util.get_publication_info(workspace, publ_type, name) assert not pub_info, pub_info
def get_layer_info(workspace, layername, context=None): partial_info = layman_util.get_publication_info(workspace, LAYER_TYPE, layername, context) chain_info = get_layer_chain(workspace, layername) filled_partial_info = fill_in_partial_info_statuses( partial_info, chain_info) return filled_partial_info
def get_publication_writer(publication): with app.app_context(): info = layman_util.get_publication_info( publication.workspace, publication.type, publication.name, context={'keys': ['access_rights']}) writer = info['access_rights']['write'][0] return writer
def get_layer_native_bbox(workspace, layer): bbox = get_layer_bbox(workspace, layer) crs = layman_util.get_publication_info(workspace, LAYER_TYPE, layer, context={'keys': ['native_crs'] })['native_crs'] return gs_util.bbox_to_dict(bbox, crs)
def source_has_its_key_or_it_is_empty(workspace, publ_type, name): with app.app_context(): all_items = layman_util.get_publication_types( )[publ_type]['internal_sources'].values() for source_def in all_items: for key in source_def.info_items: context = {'keys': [key]} info = layman_util.get_publication_info( workspace, publ_type, name, context) assert key in info or not info, info
def get_layer_bbox(workspace, layer): db_bbox = layman_util.get_publication_info(workspace, LAYER_TYPE, layer, context={ 'keys': ['bounding_box'] })['bounding_box'] # GeoServer is not working good with degradeted bbox return bbox_util.ensure_bbox_with_area( db_bbox, settings.NO_AREA_BBOX_PADDING) if not bbox_util.is_empty( db_bbox) else settings.LAYMAN_DEFAULT_OUTPUT_BBOX
def test_auth_get_publication(workspace, publ_type, publication): ensure_publication(workspace, publ_type, publication) all_auth_info = util.get_users_and_headers_for_publication( workspace, publ_type, publication) readers = all_auth_info['read'][util.KEY_AUTH][util.KEY_USERS] non_readers = all_auth_info['read'][util.KEY_NOT_AUTH][util.KEY_USERS] for user in readers: with app.app_context(): pub_info = layman_util.get_publication_info( workspace, publ_type, publication, {'actor_name': user}) assert pub_info['name'] == publication, f'pub_info={pub_info}' assert pub_info['type'] == publ_type, f'pub_info={pub_info}' for user in non_readers: with app.app_context(): pub_info = layman_util.get_publication_info( workspace, publ_type, publication, {'actor_name': user}) assert pub_info == dict(), pub_info
def get_metadata_comparison(workspace, layername): info = layman_util.get_publication_info(workspace, LAYER_TYPE, layername, context={'keys': ['file', ]}) file_type = info['file']['file_type'] if file_type in (settings.FILE_TYPE_RASTER, settings.FILE_TYPE_UNKNOWN): return dict() if file_type != settings.FILE_TYPE_VECTOR: raise NotImplementedError(f"Unknown file type: {file_type}") wfs = get_wfs_direct(workspace) if wfs is None: return {} cap_op = wfs.getOperationByName('GetCapabilities') wfs_url = next( ( m.get("url") for m in (cap_op.methods if cap_op else []) if m.get("type").lower() == 'get' ), None ) wfs_layername = f"{workspace}:{layername}" wfs_layer = wfs.contents.get(wfs_layername, None) try: title = wfs_layer.title except BaseException: title = None try: abstract = wfs_layer.abstract except BaseException: abstract = None try: extent = wfs_layer.boundingBox[:-1] except BaseException: extent = None try: crs_list = [int(crs.getcode().split(':')[-1]) for crs in wfs_layer.crsOptions] crs_list.append(4326) crs_list = sorted(list(set(crs_list))) reference_system = crs_list except BaseException as exception: current_app.logger.error(exception) reference_system = None props = { 'wfs_url': wfs_url, 'title': title, 'abstract': abstract, 'extent': extent, 'reference_system': reference_system, } # current_app.logger.info(f"props:\n{json.dumps(props, indent=2)}") url = get_capabilities_url(workspace) return { f"{url}": props }
def mandatory_keys_in_all_sources(workspace, publ_type, name): # Items with app.app_context(): pub_info = layman_util.get_publication_info(workspace, publ_type, name) assert { 'name', 'title', 'access_rights', 'uuid', 'metadata', 'file', }.issubset(set(pub_info)), pub_info
def nodata_preserved_in_normalized_raster(workspace, publ_type, name): with app.app_context(): publ_info = layman_util.get_publication_info(workspace, publ_type, name, {'keys': ['file']}) file_type = publ_info['file']['file_type'] if file_type == settings.FILE_TYPE_RASTER: input_nodata_value = gdal.get_nodata_value( publ_info['_file']['gdal_path']) normalized_nodata_value = gdal.get_nodata_value( publ_info['_file']['normalized_file']['path']) assert normalized_nodata_value == pytest.approx( input_nodata_value, 0.000000001)
def get_template_path_and_values(workspace, layername, http_method=None): assert http_method in [common.REQUEST_METHOD_POST, common.REQUEST_METHOD_PATCH] publ_info = get_publication_info(workspace, LAYER_TYPE, layername, context={ 'keys': ['title', 'bounding_box', 'description'], }) title = publ_info['title'] abstract = publ_info.get('description') bbox_3857 = publ_info.get('bounding_box') if bbox_util.is_empty(bbox_3857): bbox_3857 = settings.LAYMAN_DEFAULT_OUTPUT_BBOX extent = bbox_util.transform(tuple(bbox_3857), epsg_from=3857, epsg_to=4326) uuid_file_path = get_publication_uuid_file(LAYER_TYPE, workspace, layername) publ_datetime = datetime.fromtimestamp(os.path.getmtime(uuid_file_path)) revision_date = datetime.now() md_language = next(iter(common_language.get_languages_iso639_2(' '.join([ title or '', abstract or '' ]))), None) try: languages = db.get_text_languages(workspace, layername) except LaymanError: languages = [] try: scale_denominator = db.guess_scale_denominator(workspace, layername) except LaymanError: scale_denominator = None prop_values = _get_property_values( workspace=workspace, layername=layername, uuid=get_layer_uuid(workspace, layername), title=title, abstract=abstract or None, publication_date=publ_datetime.strftime('%Y-%m-%d'), revision_date=revision_date.strftime('%Y-%m-%d'), md_date_stamp=date.today().strftime('%Y-%m-%d'), identifier=url_for('rest_workspace_layer.get', workspace=workspace, layername=layername), identifier_label=layername, extent=extent, wms_url=wms.get_wms_url(workspace, external_url=True), wfs_url=wfs.get_wfs_url(workspace, external_url=True), md_organisation_name=None, organisation_name=None, md_language=md_language, languages=languages, scale_denominator=scale_denominator, epsg_codes=settings.LAYMAN_OUTPUT_SRS_LIST, ) if http_method == common.REQUEST_METHOD_POST: prop_values.pop('revision_date', None) template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'record-template.xml') return template_path, prop_values
def same_title_in_source_and_rest_multi(workspace, publ_type, name, headers): with app.app_context(): publ_info = layman_util.get_publication_info( workspace, publ_type, name, context={'keys': ['title']}) title = publ_info['title'] infos = process_client.get_workspace_publications(publ_type, workspace, headers=headers) publication_infos = [info for info in infos if info['name'] == name] info = next(iter(publication_infos)) assert info['title'] == title, f'publication_infos={publication_infos}'
def patch_layer(workspace, layername, title, description, access_rights=None): if not get_layer_info(workspace, layername): return geoserver_workspace = get_geoserver_workspace(workspace) info = layman_util.get_publication_info(workspace, LAYER_TYPE, layername, context={ 'keys': [ 'style_type', 'file', ], }) file_type = info['file']['file_type'] if file_type == settings.FILE_TYPE_VECTOR: if info['style_type'] == 'sld': gs_util.patch_feature_type(geoserver_workspace, layername, title=title, description=description, auth=settings.LAYMAN_GS_AUTH) if info['style_type'] == 'qml': gs_util.patch_wms_layer(geoserver_workspace, layername, title=title, description=description, auth=settings.LAYMAN_GS_AUTH) elif file_type == settings.FILE_TYPE_RASTER: store = get_geotiff_store_name(layername) gs_util.patch_coverage(geoserver_workspace, layername, store, title=title, description=description, auth=settings.LAYMAN_GS_AUTH) else: raise NotImplementedError(f"Unknown file type: {file_type}") clear_cache(workspace) if access_rights and access_rights.get('read'): security_read_roles = gs_common.layman_users_to_geoserver_roles( access_rights['read']) gs_util.ensure_layer_security_roles(geoserver_workspace, layername, security_read_roles, 'r', settings.LAYMAN_GS_AUTH) if access_rights and access_rights.get('write'): security_write_roles = gs_common.layman_users_to_geoserver_roles( access_rights['write']) gs_util.ensure_layer_security_roles(geoserver_workspace, layername, security_write_roles, 'w', settings.LAYMAN_GS_AUTH)
def thumbnail_equals( workspace, publ_type, name, exp_thumbnail, ): with app.app_context(): pub_info = layman_util.get_publication_info(workspace, publ_type, name, {'keys': ['thumbnail']}) diffs = test_util.compare_images(exp_thumbnail, pub_info['_thumbnail']['path']) assert diffs < 500
def patch_after_feature_change( self, workspace, layer, ): if self.is_aborted(): raise AbortedException uuid = layman_util.get_publication_info(workspace, LAYER_TYPE, layer, context={'keys': ['uuid']})['uuid'] soap.patch_layer(workspace, layer, metadata_properties_to_refresh=['extent']) # Sometimes, when delete request run just after other request for the same publication (for example WFS-T), # the aborted task keep running and finish after end of delete task for the same source. This part make sure, # that in that case we delete it. info = layman_util.get_publication_info(workspace, LAYER_TYPE, layer, context={'keys': ['name']}) if not info: logger.warning(f"layman.layer.micka.soap.patch_after_feature_change: workspace={workspace}, " f"layer={layer}, uuid={uuid} Publication does not exist, so we delete it") soap.delete_layer(workspace, layer, backup_uuid=uuid) if self.is_aborted(): raise AbortedException
def metadata_key_sources_do_not_contain_other_keys(workspace, publ_type, name): with app.app_context(): pub_info = layman_util.get_publication_info(workspace, publ_type, name, {'keys': ['metadata']}) assert { 'metadata', }.issubset(set(pub_info)), pub_info assert all(item not in pub_info for item in { 'name', 'title', 'access_rights', 'uuid', 'file', }), pub_info