def get_map_info(workspace, mapname): map_file_path = get_map_file(workspace, mapname) result = {} if os.path.exists(map_file_path): with open(map_file_path, 'r') as map_file: map_json = json.load(map_file) map_file_path = os.path.relpath( map_file_path, common_util.get_workspace_dir(workspace)) result = { 'file': { 'path': map_file_path, 'url': url_for('rest_workspace_map_file.get', mapname=mapname, workspace=workspace), }, '_file': { 'url': url_for('rest_workspace_map_file.get', mapname=mapname, workspace=workspace, internal=True), }, 'title': map_json['title'] or '', 'description': map_json['abstract'] or '', } elif os.path.exists(util.get_map_dir(workspace, mapname)): result = {'name': mapname} return result
def _get_property_values( workspace='browser', layername='layer', uuid='ca238200-8200-1a23-9399-42c9fca53542', title='CORINE - Krajinný pokryv CLC 90', abstract=None, md_organisation_name=None, organisation_name=None, publication_date='2007-05-25', revision_date='2008-05-25', md_date_stamp='2007-05-25', identifier='http://www.env.cz/data/corine/1990', identifier_label='MZP-CORINE', extent=None, # w, s, e, n wms_url="http://www.env.cz/corine/data/download.zip", wfs_url="http://www.env.cz/corine/data/download.zip", epsg_codes=None, scale_denominator=None, languages=None, md_language=None, ): epsg_codes = epsg_codes or [3857, 4326] west, south, east, north = extent or [11.87, 48.12, 19.13, 51.59] extent = [max(west, -180), max(south, -90), min(east, 180), min(north, 90)] languages = languages or [] result = { 'md_file_identifier': get_metadata_uuid(uuid), 'md_language': md_language, 'md_date_stamp': md_date_stamp, 'reference_system': epsg_codes, 'title': title, 'publication_date': publication_date, 'revision_date': revision_date, 'identifier': { 'identifier': identifier, 'label': identifier_label, }, 'abstract': abstract, 'graphic_url': url_for('rest_workspace_layer_thumbnail.get', workspace=workspace, layername=layername), 'extent': extent, 'wms_url': f"{wms.add_capabilities_params_to_url(wms_url)}&LAYERS={layername}", 'wfs_url': f"{wfs.add_capabilities_params_to_url(wfs_url)}&LAYERS={layername}", 'layer_endpoint': url_for('rest_workspace_layer.get', workspace=workspace, layername=layername), 'scale_denominator': scale_denominator, 'language': languages, 'md_organisation_name': md_organisation_name, 'organisation_name': organisation_name, } return result
def get_map_info(workspace, mapname): uuid = get_map_uuid(workspace, mapname) try: csw = common_util.create_csw() if uuid is None or csw is None: return {} muuid = get_metadata_uuid(uuid) csw.getrecordbyid(id=[muuid], esn='brief') except (HTTPError, ConnectionError): current_app.logger.info(traceback.format_exc()) return {} if muuid in csw.records: return { 'metadata': { 'identifier': muuid, 'csw_url': settings.CSW_PROXY_URL, 'record_url': settings.CSW_RECORD_URL.format(identifier=muuid), 'comparison_url': url_for('rest_workspace_map_metadata_comparison.get', workspace=workspace, mapname=mapname), } } return {}
def get_metadata_comparison(workspace, mapname): layman_info = get_complete_map_info(cached=True) layman_props = map_info_to_metadata_properties(layman_info) all_props = { f"{layman_props['map_endpoint']}": layman_props, } sources = get_sources() partial_infos = call_modules_fn(sources, 'get_metadata_comparison', [workspace, mapname]) for partial_info in partial_infos.values(): if partial_info is not None: all_props.update(partial_info) map_json = get_map_file_json(workspace, mapname) if map_json: soap_operates_on = next(iter(partial_infos[csw].values()) )['operates_on'] if partial_infos[csw] else [] operates_on_muuids_filter = micka_util.operates_on_values_to_muuids( soap_operates_on) layman_file_props = map_file_to_metadata_properties( map_json, operates_on_muuids_filter) map_file_url = url_for('rest_workspace_map_file.get', mapname=mapname, workspace=workspace) all_props[map_file_url] = layman_file_props return metadata_common.transform_metadata_props_to_comparison(all_props)
def get_complete_map_info(username=None, mapname=None, cached=False): assert (username is not None and mapname is not None) or cached if cached: return g.get(FLASK_INFO_KEY) partial_info = get_map_info(username, mapname) if not any(partial_info): raise LaymanError(26, {'mapname': mapname}) item_keys = ['file', 'thumbnail', 'metadata', ] complete_info = { 'name': mapname, 'url': url_for('rest_workspace_map.get', mapname=mapname, workspace=username), 'title': mapname, 'description': '', } for key in item_keys: complete_info[key] = {'status': 'NOT_AVAILABLE'} complete_info.update(partial_info) complete_info['layman_metadata'] = {'publication_status': layman_util.get_publication_status(username, MAP_TYPE, mapname, complete_info, item_keys)} complete_info = clear_publication_info(complete_info) return complete_info
def get_template_path_and_values(username, mapname, http_method=None, actor_name=None): assert http_method in [ common.REQUEST_METHOD_POST, common.REQUEST_METHOD_PATCH ] uuid_file_path = get_publication_uuid_file(MAP_TYPE, username, mapname) publ_datetime = datetime.fromtimestamp(os.path.getmtime(uuid_file_path)) revision_date = datetime.now() map_json = get_map_json(username, mapname) operates_on = map_json_to_operates_on(map_json, editor=actor_name) publ_info = get_publication_info( username, MAP_TYPE, mapname, context={ 'keys': ['title', 'bounding_box', 'description'], }) bbox_3857 = publ_info.get('bounding_box') if bbox_util.is_empty(bbox_3857): bbox_3857 = settings.LAYMAN_DEFAULT_OUTPUT_BBOX extent = bbox_util.transform(tuple(bbox_3857), epsg_from=3857, epsg_to=4326) title = publ_info['title'] abstract = publ_info.get('description') md_language = next( iter( common_language.get_languages_iso639_2(' '.join([ title or '', abstract or '', ]))), None) prop_values = _get_property_values( username=username, mapname=mapname, uuid=get_map_uuid(username, mapname), title=title, abstract=abstract or None, publication_date=publ_datetime.strftime('%Y-%m-%d'), revision_date=revision_date.strftime('%Y-%m-%d'), md_date_stamp=date.today().strftime('%Y-%m-%d'), identifier=url_for('rest_workspace_map.get', workspace=username, mapname=mapname), identifier_label=mapname, extent=extent, epsg_codes=map_json_to_epsg_codes(map_json), md_organisation_name=None, organisation_name=None, operates_on=operates_on, md_language=md_language, ) if http_method == common.REQUEST_METHOD_POST: prop_values.pop('revision_date', None) template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'record-template.xml') return template_path, prop_values
def get_template_path_and_values(workspace, layername, http_method=None): assert http_method in [common.REQUEST_METHOD_POST, common.REQUEST_METHOD_PATCH] publ_info = get_publication_info(workspace, LAYER_TYPE, layername, context={ 'keys': ['title', 'bounding_box', 'description'], }) title = publ_info['title'] abstract = publ_info.get('description') bbox_3857 = publ_info.get('bounding_box') if bbox_util.is_empty(bbox_3857): bbox_3857 = settings.LAYMAN_DEFAULT_OUTPUT_BBOX extent = bbox_util.transform(tuple(bbox_3857), epsg_from=3857, epsg_to=4326) uuid_file_path = get_publication_uuid_file(LAYER_TYPE, workspace, layername) publ_datetime = datetime.fromtimestamp(os.path.getmtime(uuid_file_path)) revision_date = datetime.now() md_language = next(iter(common_language.get_languages_iso639_2(' '.join([ title or '', abstract or '' ]))), None) try: languages = db.get_text_languages(workspace, layername) except LaymanError: languages = [] try: scale_denominator = db.guess_scale_denominator(workspace, layername) except LaymanError: scale_denominator = None prop_values = _get_property_values( workspace=workspace, layername=layername, uuid=get_layer_uuid(workspace, layername), title=title, abstract=abstract or None, publication_date=publ_datetime.strftime('%Y-%m-%d'), revision_date=revision_date.strftime('%Y-%m-%d'), md_date_stamp=date.today().strftime('%Y-%m-%d'), identifier=url_for('rest_workspace_layer.get', workspace=workspace, layername=layername), identifier_label=layername, extent=extent, wms_url=wms.get_wms_url(workspace, external_url=True), wfs_url=wfs.get_wfs_url(workspace, external_url=True), md_organisation_name=None, organisation_name=None, md_language=md_language, languages=languages, scale_denominator=scale_denominator, epsg_codes=settings.LAYMAN_OUTPUT_SRS_LIST, ) if http_method == common.REQUEST_METHOD_POST: prop_values.pop('revision_date', None) template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'record-template.xml') return template_path, prop_values
def get_layer_info(workspace, layername): thumbnail_path = get_layer_thumbnail_path(workspace, layername) if os.path.exists(thumbnail_path): return { 'thumbnail': { 'url': url_for('rest_workspace_layer_thumbnail.get', workspace=workspace, layername=layername), 'path': os.path.relpath(thumbnail_path, common_util.get_workspace_dir(workspace)) }, '_thumbnail': { 'path': thumbnail_path, }, } return {}
def get_map_info(workspace, mapname): thumbnail_path = get_map_thumbnail_path(workspace, mapname) if os.path.exists(thumbnail_path): return { 'thumbnail': { 'url': url_for('rest_workspace_map_thumbnail.get', workspace=workspace, mapname=mapname), 'path': os.path.relpath(thumbnail_path, common_util.get_workspace_dir(workspace)) } } return {}
def get_complete_layer_info(username=None, layername=None, cached=False): assert (username is not None and layername is not None) or cached if cached: return g.get(FLASK_INFO_KEY) partial_info = get_layer_info(username, layername) if not any(partial_info): raise LaymanError(15, {'layername': layername}) item_keys = [ 'wms', 'wfs', 'thumbnail', 'file', 'db_table', 'metadata', 'style', ] complete_info = { 'name': layername, 'url': url_for('rest_workspace_layer.get', layername=layername, workspace=username), 'title': layername, 'description': '', } for key in item_keys: complete_info[key] = {'status': 'NOT_AVAILABLE'} complete_info.update(partial_info) complete_info['layman_metadata'] = { 'publication_status': layman_util.get_publication_status(username, LAYER_TYPE, layername, complete_info, item_keys) } complete_info = clear_publication_info(complete_info) complete_info['sld'] = complete_info['style'] return complete_info
def get_layer_info(workspace, layername): input_file_dir = qgis.get_layer_dir(workspace, layername) result = {} if os.path.exists(input_file_dir): url = layman_util.url_for('rest_workspace_layer_style.get', workspace=workspace, layername=layername) result = { 'name': layername, 'style': { 'url': url, 'type': 'qml', }, '_wms': { 'qgis_capabilities_url': get_layer_capabilities_url(workspace, layername), } } return result
def get_complete_layer_info(workspace=None, layername=None, cached=False): assert (workspace is not None and layername is not None) or cached if cached: return g.get(FLASK_INFO_KEY) partial_info = get_layer_info(workspace, layername) if not any(partial_info): raise LaymanError(15, {'layername': layername}) file_type = partial_info['file']['file_type'] item_keys = get_layer_info_keys(file_type) complete_info = dict() for key in item_keys: complete_info[key] = {'status': 'NOT_AVAILABLE'} complete_info.update({ 'name': layername, 'url': url_for('rest_workspace_layer.get', layername=layername, workspace=workspace), 'title': layername, 'description': '', }) complete_info.update(partial_info) complete_info['sld'] = complete_info['style'] complete_info = clear_publication_info(complete_info) complete_info.pop('layman_metadata') complete_info['layman_metadata'] = { 'publication_status': layman_util.get_publication_status(workspace, LAYER_TYPE, layername, complete_info, item_keys) } return complete_info
def get_workspace_publication_url(workspace, publication_name): return url_for('rest_workspace_layer.get', layername=publication_name, workspace=workspace)
def get_workspace_publication_url(workspace, publication_name): return url_for('rest_workspace_map.get', mapname=publication_name, workspace=workspace)
def migrate_metadata_records(workspace_filter=None): logger.info(f' Starting - migrate publication metadata records') query = f''' select w.name, p.name from {db_schema}.publications p inner join {db_schema}.workspaces w on w.id = p.id_workspace where p.type = %s ''' params = (LAYER_TYPE, ) if workspace_filter: query = query + ' AND w.name = %s' params = params + (workspace_filter, ) publications = db_util.run_query(query, params) for (workspace, layer) in publications: wms.clear_cache(workspace) logger.info(f' Migrate layer {workspace}.{layer}') try: muuid = layer_csw.patch_layer(workspace, layer, [ 'wms_url', 'graphic_url', 'identifier', 'layer_endpoint', ], create_if_not_exists=False, timeout=2) if not muuid: logger.warning( f' Metadata record of layer was not migrated, because the record does not exist.' ) except requests.exceptions.ReadTimeout: md_props = list( layer_csw.get_metadata_comparison(workspace, layer).values()) md_wms_url = md_props[0]['wms_url'] if md_props else None exp_wms_url = wms.add_capabilities_params_to_url( wms.get_wms_url(workspace, external_url=True)) if md_wms_url != exp_wms_url: logger.exception( f' WMS URL was not migrated (should be {exp_wms_url}, but is {md_wms_url})!' ) time.sleep(0.5) query = f''' select w.name, p.name from {db_schema}.publications p inner join {db_schema}.workspaces w on w.id = p.id_workspace where p.type = %s ''' params = (MAP_TYPE, ) if workspace_filter: query = query + ' AND w.name = %s' params = params + (workspace_filter, ) publications = db_util.run_query(query, params) for (workspace, map) in publications: logger.info(f' Migrate map {workspace}.{map}') try: muuid = map_csw.patch_map(workspace, map, metadata_properties_to_refresh=[ 'graphic_url', 'identifier', 'map_endpoint', 'map_file_endpoint', ], create_if_not_exists=False, timeout=2) if not muuid: logger.warning( f' Metadata record of the map was not migrated, because the record does not exist.' ) except requests.exceptions.ReadTimeout: md_props = list( map_csw.get_metadata_comparison(workspace, map).values()) md_map_endpoint = md_props[0]['map_endpoint'] if md_props else None exp_map_endpoint = util.url_for('rest_workspace_map.get', workspace=workspace, mapname=map) if md_map_endpoint != exp_map_endpoint: logger.exception( f' Map endpoint was not migrated (should be {exp_map_endpoint}, but is {md_map_endpoint})!' ) time.sleep(0.5) logger.info(f' DONE - migrate publication metadata records')
def get_template_path_and_values(workspace, mapname, http_method=None, actor_name=None): assert http_method in [ common.REQUEST_METHOD_POST, common.REQUEST_METHOD_PATCH ] uuid_file_path = get_publication_uuid_file(MAP_TYPE, workspace, mapname) publ_datetime = datetime.fromtimestamp(os.path.getmtime(uuid_file_path)) revision_date = datetime.now() map_json = get_map_json(workspace, mapname) operates_on = map_json_to_operates_on(map_json, editor=actor_name) publ_info = get_publication_info( workspace, MAP_TYPE, mapname, context={ 'keys': ['title', 'native_bounding_box', 'description', 'native_crs'], }) native_bbox = publ_info.get('native_bounding_box') crs = publ_info.get('native_crs') if bbox_util.is_empty(native_bbox): native_bbox = crs_def.CRSDefinitions[crs].default_bbox extent = bbox_util.transform(native_bbox, crs_from=publ_info.get('native_crs'), crs_to=crs_def.EPSG_4326) title = publ_info['title'] abstract = publ_info.get('description') md_language = next( iter( common_language.get_languages_iso639_2(' '.join([ title or '', abstract or '', ]))), None) prop_values = _get_property_values( workspace=workspace, mapname=mapname, uuid=get_map_uuid(workspace, mapname), title=title, abstract=abstract or None, publication_date=publ_datetime.strftime('%Y-%m-%d'), revision_date=revision_date.strftime('%Y-%m-%d'), md_date_stamp=date.today().strftime('%Y-%m-%d'), identifier=url_for('rest_workspace_map.get', workspace=workspace, mapname=mapname), identifier_label=mapname, extent=extent, crs_list=[crs], md_organisation_name=None, organisation_name=None, operates_on=operates_on, md_language=md_language, ) if http_method == common.REQUEST_METHOD_POST: prop_values.pop('revision_date', None) template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'record-template.xml') return template_path, prop_values
def _get_property_values( username='******', mapname='map', uuid='af238200-8200-1a23-9399-42c9fca53543', title='Administrativní členění Libereckého kraje', abstract=None, md_organisation_name=None, organisation_name=None, publication_date='2007-05-25', revision_date='2008-05-25', md_date_stamp='2007-05-25', identifier='http://www.env.cz/data/liberec/admin-cleneni', identifier_label='Liberec-AdminUnits', extent=None, # west, south, east, north epsg_codes=None, operates_on=None, md_language=None, ): epsg_codes = epsg_codes or ['3857'] west, south, east, north = extent or [14.62, 50.58, 15.42, 50.82] extent = [max(west, -180), max(south, -90), min(east, 180), min(north, 90)] # list of dictionaries, possible keys are 'xlink:title', 'xlink:href', 'uuidref' operates_on = operates_on or [] operates_on = [{ a: v for a, v in item.items() if a in ['xlink:title', 'xlink:href', 'uuidref'] } for item in operates_on] result = { 'md_file_identifier': get_metadata_uuid(uuid), 'md_language': md_language, 'md_date_stamp': md_date_stamp, 'reference_system': epsg_codes, 'title': title, 'publication_date': publication_date, 'revision_date': revision_date, 'identifier': { 'identifier': identifier, 'label': identifier_label, }, 'abstract': abstract, 'graphic_url': url_for('rest_workspace_map_thumbnail.get', workspace=username, mapname=mapname), 'extent': extent, 'map_endpoint': escape( url_for('rest_workspace_map.get', workspace=username, mapname=mapname)), 'map_file_endpoint': escape( url_for('rest_workspace_map_file.get', workspace=username, mapname=mapname)), 'operates_on': operates_on, 'md_organisation_name': md_organisation_name, 'organisation_name': organisation_name, } return result
def post(workspace): app.logger.info(f"POST Maps, user={g.user}") # FILE if 'file' in request.files and not request.files['file'].filename == '': file = request.files["file"] else: raise LaymanError(1, {'parameter': 'file'}) file_json = util.check_file(file) # NAME unsafe_mapname = request.form.get('name', '') if len(unsafe_mapname) == 0: unsafe_mapname = input_file.get_unsafe_mapname(file_json) mapname = util.to_safe_map_name(unsafe_mapname) util.check_mapname(mapname) info = util.get_map_info(workspace, mapname) if info: raise LaymanError(24, {'mapname': mapname}) # TITLE if len(request.form.get('title', '')) > 0: title = request.form['title'] elif len(file_json.get('title', '')) > 0: title = file_json['title'] else: title = mapname # DESCRIPTION if len(request.form.get('description', '')) > 0: description = request.form['description'] else: description = file_json.get('abstract', '') mapurl = url_for('rest_workspace_map.get', mapname=mapname, workspace=workspace) redis_util.create_lock(workspace, MAP_TYPE, mapname, request.method) try: map_result = { 'name': mapname, 'url': mapurl, } actor_name = authn.get_authn_username() kwargs = { 'title': title, 'description': description, 'actor_name': actor_name } rest_common.setup_post_access_rights(request.form, kwargs, actor_name) util.pre_publication_action_check( workspace, mapname, kwargs, ) # register map uuid uuid_str = uuid.assign_map_uuid(workspace, mapname) kwargs['uuid'] = uuid_str map_result.update({ 'uuid': uuid_str, }) file = FileStorage(io.BytesIO(json.dumps(file_json).encode()), file.filename) input_file.save_map_files(workspace, mapname, [file]) util.post_map(workspace, mapname, kwargs, 'layman.map.filesystem.input_file') except Exception as exception: try: if util.is_map_chain_ready(workspace, mapname): redis_util.unlock_publication(workspace, MAP_TYPE, mapname) finally: redis_util.unlock_publication(workspace, MAP_TYPE, mapname) raise exception # app.logger.info('uploaded map '+mapname) return jsonify([map_result]), 200
def post(workspace): app.logger.info(f"POST Layers, user={g.user}") # FILE use_chunk_upload = False files = [] if 'file' in request.files: files = [ f for f in request.files.getlist("file") if len(f.filename) > 0 ] if len(files) == 0 and len(request.form.getlist('file')) > 0: files = [ filename for filename in request.form.getlist('file') if len(filename) > 0 ] if len(files) > 0: use_chunk_upload = True if len(files) == 0: raise LaymanError(1, {'parameter': 'file'}) # NAME unsafe_layername = request.form.get('name', '') if len(unsafe_layername) == 0: unsafe_layername = input_file.get_unsafe_layername(files) layername = util.to_safe_layer_name(unsafe_layername) util.check_layername(layername) info = util.get_layer_info(workspace, layername) if info: raise LaymanError(17, {'layername': layername}) util.check_new_layername(workspace, layername) # CRS crs_id = None if len(request.form.get('crs', '')) > 0: crs_id = request.form['crs'] if crs_id not in settings.INPUT_SRS_LIST: raise LaymanError(2, {'parameter': 'crs', 'supported_values': settings.INPUT_SRS_LIST}) check_crs = crs_id is None # TITLE if len(request.form.get('title', '')) > 0: title = request.form['title'] else: title = layername # DESCRIPTION description = request.form.get('description', '') # Style style_file = None if 'style' in request.files and not request.files['style'].filename == '': style_file = request.files['style'] elif 'sld' in request.files and not request.files['sld'].filename == '': style_file = request.files['sld'] style_type = input_style.get_style_type_from_file_storage(style_file) actor_name = authn.get_authn_username() task_options = { 'crs_id': crs_id, 'description': description, 'title': title, 'ensure_user': True, 'check_crs': False, 'actor_name': actor_name, 'style_type': style_type, 'store_in_geoserver': style_type.store_in_geoserver, } rest_common.setup_post_access_rights(request.form, task_options, actor_name) util.pre_publication_action_check(workspace, layername, task_options, ) layerurl = url_for('rest_workspace_layer.get', layername=layername, workspace=workspace) layer_result = { 'name': layername, 'url': layerurl, } # FILE NAMES if use_chunk_upload: filenames = files else: filenames = [f.filename for f in files] input_file.check_filenames(workspace, layername, filenames, check_crs) redis_util.create_lock(workspace, LAYER_TYPE, layername, request.method) try: # register layer uuid uuid_str = uuid.assign_layer_uuid(workspace, layername) layer_result.update({ 'uuid': uuid_str, }) task_options.update({'uuid': uuid_str, }) # save files input_style.save_layer_file(workspace, layername, style_file, style_type) if use_chunk_upload: files_to_upload = input_chunk.save_layer_files_str( workspace, layername, files, check_crs) layer_result.update({ 'files_to_upload': files_to_upload, }) task_options.update({ 'check_crs': check_crs, }) else: input_file.save_layer_files( workspace, layername, files, check_crs) util.post_layer( workspace, layername, task_options, 'layman.layer.filesystem.input_chunk' if use_chunk_upload else 'layman.layer.filesystem.input_file' ) except Exception as exc: try: if util.is_layer_chain_ready(workspace, layername): redis_util.unlock_publication(workspace, LAYER_TYPE, layername) finally: redis_util.unlock_publication(workspace, LAYER_TYPE, layername) raise exc # app.logger.info('uploaded layer '+layername) return jsonify([layer_result]), 200