def test_patch_layer_style(client): with app.app_context(): workspace = 'testuser1' layername = 'ne_110m_admin_0_countries' rest_path = url_for('rest_workspace_layer.patch', workspace=workspace, layername=layername) sld_path = 'sample/style/generic-blue_sld.xml' assert os.path.isfile(sld_path) response = client.patch(rest_path, data={ 'style': (open(sld_path, 'rb'), os.path.basename(sld_path)), 'title': 'countries in blue' }) assert response.status_code == 200 # last_task = util._get_layer_task(workspace, layername) # Time to generate testing thumbnail is probably shorter than getting & parsing WMS/WFS capabilities documents # so it's finished before PATCH request is completed # # assert last_task is not None and not util._is_task_ready(last_task) # resp_json = rv.get_json() # keys_to_check = ['thumbnail'] # for key_to_check in keys_to_check: # assert 'status' in resp_json[key_to_check] flask_client.wait_till_layer_ready(workspace, layername) # last_task['last'].get() resp_json = response.get_json() assert resp_json['title'] == "countries in blue" wms_url = geoserver_wms.get_wms_url(workspace) wms = wms_proxy(wms_url) assert layername in wms.contents assert wms[layername].title == 'countries in blue' assert wms[layername].styles[ workspace + '_wms:' + layername]['title'] == 'Generic Blue' uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() }) expected_md_values = { 'abstract': "and new description", 'extent': [-180.0, -85.60903859383285, 180.0, 83.64513109859944], 'graphic_url': url_for_external('rest_workspace_layer_thumbnail.get', workspace=workspace, layername=layername), 'identifier': { 'identifier': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'label': 'ne_110m_admin_0_countries' }, 'language': ['eng'], 'layer_endpoint': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'organisation_name': None, 'publication_date': TODAY_DATE, 'reference_system': EXP_REFERENCE_SYSTEMS, 'revision_date': TODAY_DATE, 'spatial_resolution': { 'scale_denominator': 100000000, }, 'title': 'countries in blue', } check_metadata(client, workspace, layername, METADATA_PROPERTIES_EQUAL, expected_md_values)
def assert_wms_layer(workspace, layername, exp_title): wms = WebMapService(gs_wms.get_wms_url(workspace), gs_wms.VERSION) assert layername in wms.contents wms_layer = wms[layername] assert wms_layer.title == exp_title assert_non_empty_bbox(wms_layer.boundingBox) assert_non_empty_bbox(wms_layer.boundingBoxWGS84) return wms_layer
def get_template_path_and_values(workspace, layername, http_method=None): assert http_method in [common.REQUEST_METHOD_POST, common.REQUEST_METHOD_PATCH] publ_info = get_publication_info(workspace, LAYER_TYPE, layername, context={ 'keys': ['title', 'bounding_box', 'description'], }) title = publ_info['title'] abstract = publ_info.get('description') bbox_3857 = publ_info.get('bounding_box') if bbox_util.is_empty(bbox_3857): bbox_3857 = settings.LAYMAN_DEFAULT_OUTPUT_BBOX extent = bbox_util.transform(tuple(bbox_3857), epsg_from=3857, epsg_to=4326) uuid_file_path = get_publication_uuid_file(LAYER_TYPE, workspace, layername) publ_datetime = datetime.fromtimestamp(os.path.getmtime(uuid_file_path)) revision_date = datetime.now() md_language = next(iter(common_language.get_languages_iso639_2(' '.join([ title or '', abstract or '' ]))), None) try: languages = db.get_text_languages(workspace, layername) except LaymanError: languages = [] try: scale_denominator = db.guess_scale_denominator(workspace, layername) except LaymanError: scale_denominator = None prop_values = _get_property_values( workspace=workspace, layername=layername, uuid=get_layer_uuid(workspace, layername), title=title, abstract=abstract or None, publication_date=publ_datetime.strftime('%Y-%m-%d'), revision_date=revision_date.strftime('%Y-%m-%d'), md_date_stamp=date.today().strftime('%Y-%m-%d'), identifier=url_for('rest_workspace_layer.get', workspace=workspace, layername=layername), identifier_label=layername, extent=extent, wms_url=wms.get_wms_url(workspace, external_url=True), wfs_url=wfs.get_wfs_url(workspace, external_url=True), md_organisation_name=None, organisation_name=None, md_language=md_language, languages=languages, scale_denominator=scale_denominator, epsg_codes=settings.LAYMAN_OUTPUT_SRS_LIST, ) if http_method == common.REQUEST_METHOD_POST: prop_values.pop('revision_date', None) template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'record-template.xml') return template_path, prop_values
def test_post_layers_shp(client): workspace = 'testuser1' layername = 'ne_110m_admin_0_countries_shp' rest_path = url_for('rest_workspace_layers.post', workspace=workspace) file_paths = [ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.cpg', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.dbf', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.prj', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.README.html', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.shp', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.shx', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.VERSION.txt', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, 'name': layername, }) assert response.status_code == 200 finally: for file_path in files: file_path[0].close() chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) flask_client.wait_till_layer_ready(workspace, layername) # last_task['last'].get() wms_url = geoserver_wms.get_wms_url(workspace) wms = wms_proxy(wms_url) assert 'ne_110m_admin_0_countries_shp' in wms.contents publication_counter.increase() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() })
def migrate_layer_metadata(workspace_filter=None): logger.info(f' Starting - migrate layer metadata records') query = f''' select w.name, p.name from {DB_SCHEMA}.publications p inner join {DB_SCHEMA}.workspaces w on w.id = p.id_workspace where p.type = %s ''' params = (LAYER_TYPE, ) if workspace_filter: query = query + ' AND w.name = %s' params = params + (workspace_filter, ) publications = db_util.run_query(query, params) for (workspace, layer) in publications: logger.info(f' Migrate layer {workspace}.{layer}') try: muuid = layer_csw.patch_layer(workspace, layer, ['wms_url', 'wfs_url'], create_if_not_exists=False, timeout=2) if not muuid: logger.warning( f' Metadata record of layer was not migrated, because the record does not exist.' ) except requests.exceptions.ReadTimeout: md_props = list( layer_csw.get_metadata_comparison(workspace, layer).values()) md_wms_url = md_props[0]['wms_url'] if md_props else None base_wms_url = wms.add_capabilities_params_to_url( wms.get_wms_url(workspace, external_url=True)) exp_wms_url = f"{base_wms_url}&LAYERS={layer}" if md_wms_url != exp_wms_url: logger.exception( f' WMS URL was not migrated (should be {exp_wms_url}, but is {md_wms_url})!' ) time.sleep(0.5) logger.info(f' DONE - migrate layer metadata records')
def test_post_layers_sld_1_1_0(client): workspace = 'testuser1' layername = 'countries_sld_1_1_0' rest_path = url_for('rest_workspace_layers.post', workspace=workspace, layername=layername) file_paths = [ 'sample/data/test_layer4.geojson', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] sld_path = 'sample/style/sld_1_1_0.xml' assert os.path.isfile(sld_path) try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, 'name': layername, 'style': (open(sld_path, 'rb'), os.path.basename(sld_path)), }) assert response.status_code == 200 resp_json = response.get_json() # print(resp_json) assert layername == resp_json[0]['name'] finally: for file_path in files: file_path[0].close() layer_info = util.get_layer_info(workspace, layername) while ('status' in layer_info['wms'] and layer_info['wms']['status'] in ['PENDING', 'STARTED'])\ or ('status' in layer_info['style'] and layer_info['style']['status'] in ['PENDING', 'STARTED']): time.sleep(0.1) layer_info = util.get_layer_info(workspace, layername) wms_url = geoserver_wms.get_wms_url(workspace) wms = wms_proxy(wms_url) assert layername in wms.contents assert wms[layername].title == 'countries_sld_1_1_0' style_url = geoserver_sld.get_workspace_style_url(workspace, layername) response = requests.get(style_url + '.sld', auth=settings.LAYMAN_GS_AUTH ) response.raise_for_status() sld_file = io.BytesIO(response.content) tree = ET.parse(sld_file) root = tree.getroot() assert root.attrib['version'] == '1.1.0' assert root[0][1][1][0][1][0][0].text == '#e31a1c' # assert wms[layername].styles[ # username+':'+layername]['title'] == 'test_layer2' publication_counter.increase() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() }) rest_path = url_for('rest_workspace_layer.delete_layer', workspace=workspace, layername=layername) response = client.delete(rest_path) assert response.status_code == 200 publication_counter.decrease() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() })
def test_post_layers_complex(client): with app.app_context(): workspace = 'testuser2' rest_path = url_for('rest_workspace_layers.post', workspace=workspace) file_paths = [ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] sld_path = 'sample/style/generic-blue_sld.xml' assert os.path.isfile(sld_path) layername = '' try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, 'name': 'countries', 'title': 'staty', 'description': 'popis států', 'style': (open(sld_path, 'rb'), os.path.basename(sld_path)), }) assert response.status_code == 200 resp_json = response.get_json() # print(resp_json) layername = resp_json[0]['name'] finally: for file_path in files: file_path[0].close() chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) flask_client.wait_till_layer_ready(workspace, layername) # last_task['last'].get() assert celery_util.is_chain_ready(chain_info) wms_url = geoserver_wms.get_wms_url(workspace) wms = wms_proxy(wms_url) assert 'countries' in wms.contents assert wms['countries'].title == 'staty' assert wms['countries'].abstract == 'popis států' assert wms['countries'].styles[workspace + '_wms:countries']['title'] == 'Generic Blue' assert layername != '' rest_path = url_for('rest_workspace_layer.get', workspace=workspace, layername=layername) response = client.get(rest_path) assert 200 <= response.status_code < 300 resp_json = response.get_json() # print(resp_json) assert resp_json['title'] == 'staty' assert resp_json['description'] == 'popis států' for source in [ 'wms', 'wfs', 'thumbnail', 'file', 'db_table', 'metadata', ]: assert 'status' not in resp_json[source] style_url = geoserver_sld.get_workspace_style_url(workspace, layername) response = requests.get(style_url + '.sld', auth=settings.LAYMAN_GS_AUTH ) response.raise_for_status() sld_file = io.BytesIO(response.content) tree = ET.parse(sld_file) root = tree.getroot() assert root.attrib['version'] == '1.0.0' feature_type = get_feature_type(workspace, 'postgresql', layername) attributes = feature_type['attributes']['attribute'] assert next(( a for a in attributes if a['name'] == 'sovereignt' ), None) is not None publication_counter.increase() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() }) with app.app_context(): expected_md_values = { 'abstract': "popis st\u00e1t\u016f", 'extent': [-180.0, -85.60903859383285, 180.0, 83.64513109859944], 'graphic_url': url_for_external('rest_workspace_layer_thumbnail.get', workspace=workspace, layername=layername), 'identifier': { "identifier": url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), "label": "countries" }, 'language': ["eng"], 'layer_endpoint': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'organisation_name': None, 'publication_date': TODAY_DATE, 'reference_system': EXP_REFERENCE_SYSTEMS, 'revision_date': None, 'spatial_resolution': { 'scale_denominator': 100000000, }, 'title': "staty", } check_metadata(client, workspace, layername, METADATA_PROPERTIES_EQUAL, expected_md_values)
def test_post_layers_simple(client): with app.app_context(): workspace = 'testuser1' rest_path = url_for('rest_workspace_layers.post', workspace=workspace) file_paths = [ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, }) assert response.status_code == 200 finally: for file_path in files: file_path[0].close() layername = 'ne_110m_admin_0_countries' chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) layer_info = util.get_layer_info(workspace, layername) keys_to_check = ['db_table', 'wms', 'wfs', 'thumbnail', 'metadata'] for key_to_check in keys_to_check: assert 'status' in layer_info[key_to_check] # For some reason this hangs forever on get() if run (either with src/layman/authz/read_everyone_write_owner_auth2_test.py::test_authn_map_access_rights or src/layman/authn/oauth2_test.py::test_patch_current_user_without_username) and with src/layman/common/metadata/util.csw_insert # last_task['last'].get() # e.g. python3 -m pytest -W ignore::DeprecationWarning -xsvv src/layman/authn/oauth2_test.py::test_patch_current_user_without_username src/layman/layer/rest_workspace_test.py::test_post_layers_simple # this can badly affect also .get(propagate=False) in layman.celery.abort_task_chain # but hopefully this is only related to magic flask&celery test suite flask_client.wait_till_layer_ready(workspace, layername) layer_info = util.get_layer_info(workspace, layername) for key_to_check in keys_to_check: assert isinstance(layer_info[key_to_check], str) \ or 'status' not in layer_info[key_to_check] wms_url = geoserver_wms.get_wms_url(workspace) wms = wms_proxy(wms_url) assert layername in wms.contents from layman.layer import get_layer_type_def from layman.common.filesystem import uuid as common_uuid uuid_filename = common_uuid.get_publication_uuid_file( get_layer_type_def()['type'], workspace, layername) assert os.path.isfile(uuid_filename) uuid_str = None with open(uuid_filename, "r") as file: uuid_str = file.read().strip() assert uuid.is_valid_uuid(uuid_str) assert settings.LAYMAN_REDIS.sismember(uuid.UUID_SET_KEY, uuid_str) assert settings.LAYMAN_REDIS.exists(uuid.get_uuid_metadata_key(uuid_str)) assert settings.LAYMAN_REDIS.hexists( uuid.get_workspace_type_names_key(workspace, '.'.join(__name__.split('.')[:-1])), layername ) layer_info = client.get(url_for('rest_workspace_layer.get', workspace=workspace, layername=layername)).get_json() assert set(layer_info['metadata'].keys()) == {'identifier', 'csw_url', 'record_url', 'comparison_url'} assert layer_info['metadata']['identifier'] == f"m-{uuid_str}" assert layer_info['metadata']['csw_url'] == settings.CSW_PROXY_URL md_record_url = f"http://micka:80/record/basic/m-{uuid_str}" assert layer_info['metadata']['record_url'].replace("http://localhost:3080", "http://micka:80") == md_record_url assert layer_info['metadata']['comparison_url'] == url_for_external('rest_workspace_layer_metadata_comparison.get', workspace=workspace, layername=layername) assert 'id' not in layer_info.keys() assert 'type' not in layer_info.keys() response = requests.get(md_record_url, auth=settings.CSW_BASIC_AUTHN) response.raise_for_status() assert layername in response.text publication_counter.increase() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() }) with app.app_context(): expected_md_values = { 'abstract': None, 'extent': [-180.0, -85.60903859383285, 180.0, 83.64513109859944], 'graphic_url': url_for_external('rest_workspace_layer_thumbnail.get', workspace=workspace, layername=layername), 'identifier': { 'identifier': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'label': 'ne_110m_admin_0_countries' }, 'language': ['eng'], 'layer_endpoint': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'organisation_name': None, 'publication_date': TODAY_DATE, 'reference_system': EXP_REFERENCE_SYSTEMS, 'revision_date': None, 'spatial_resolution': { 'scale_denominator': 100000000, }, 'title': 'ne_110m_admin_0_countries', } check_metadata(client, workspace, layername, METADATA_PROPERTIES_EQUAL, expected_md_values)
def migrate_metadata_records(workspace_filter=None): logger.info(f' Starting - migrate publication metadata records') query = f''' select w.name, p.name from {db_schema}.publications p inner join {db_schema}.workspaces w on w.id = p.id_workspace where p.type = %s ''' params = (LAYER_TYPE, ) if workspace_filter: query = query + ' AND w.name = %s' params = params + (workspace_filter, ) publications = db_util.run_query(query, params) for (workspace, layer) in publications: wms.clear_cache(workspace) logger.info(f' Migrate layer {workspace}.{layer}') try: muuid = layer_csw.patch_layer(workspace, layer, [ 'wms_url', 'graphic_url', 'identifier', 'layer_endpoint', ], create_if_not_exists=False, timeout=2) if not muuid: logger.warning( f' Metadata record of layer was not migrated, because the record does not exist.' ) except requests.exceptions.ReadTimeout: md_props = list( layer_csw.get_metadata_comparison(workspace, layer).values()) md_wms_url = md_props[0]['wms_url'] if md_props else None exp_wms_url = wms.add_capabilities_params_to_url( wms.get_wms_url(workspace, external_url=True)) if md_wms_url != exp_wms_url: logger.exception( f' WMS URL was not migrated (should be {exp_wms_url}, but is {md_wms_url})!' ) time.sleep(0.5) query = f''' select w.name, p.name from {db_schema}.publications p inner join {db_schema}.workspaces w on w.id = p.id_workspace where p.type = %s ''' params = (MAP_TYPE, ) if workspace_filter: query = query + ' AND w.name = %s' params = params + (workspace_filter, ) publications = db_util.run_query(query, params) for (workspace, map) in publications: logger.info(f' Migrate map {workspace}.{map}') try: muuid = map_csw.patch_map(workspace, map, metadata_properties_to_refresh=[ 'graphic_url', 'identifier', 'map_endpoint', 'map_file_endpoint', ], create_if_not_exists=False, timeout=2) if not muuid: logger.warning( f' Metadata record of the map was not migrated, because the record does not exist.' ) except requests.exceptions.ReadTimeout: md_props = list( map_csw.get_metadata_comparison(workspace, map).values()) md_map_endpoint = md_props[0]['map_endpoint'] if md_props else None exp_map_endpoint = util.url_for('rest_workspace_map.get', workspace=workspace, mapname=map) if md_map_endpoint != exp_map_endpoint: logger.exception( f' Map endpoint was not migrated (should be {exp_map_endpoint}, but is {md_map_endpoint})!' ) time.sleep(0.5) logger.info(f' DONE - migrate publication metadata records')