def base_insert(xml_str): # print(f"Micka insert=\n{xml_str}") response = requests.post( settings.CSW_URL, auth=settings.CSW_BASIC_AUTHN, data=xml_str.encode('utf-8'), timeout=5, ) # print(f"Micka insert response=\n{r.text}") response.raise_for_status() root_el = ET.fromstring(response.content) if root_el.tag == nspath_eval('ows:ExceptionReport', NAMESPACES): if is_record_exists_exception(root_el): raise LaymanError(36, data={ 'exception_code': root_el[0].attrib["exceptionCode"], 'locator': root_el[0].attrib["locator"], 'text': root_el[0][0].text, }) raise LaymanError(37, data={'response': response.text}) return root_el, response
def check_file(file): try: file_json = json.load(file) schema_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'schema.draft-07.json' ) with open(schema_path) as schema_file: schema_json = json.load(schema_file) validator = Draft7Validator(schema_json) if not validator.is_valid(file_json): errors = [ { 'message': e.message, 'absolute_path': list(e.absolute_path), } for e in validator.iter_errors(file_json) ] raise LaymanError(2, { 'parameter': 'file', 'reason': 'JSON not valid against schema layman/map/schema.draft-07.json', 'validation-errors': errors, }) validate(instance=file_json, schema=schema_json) return file_json except ValueError as exc: raise LaymanError(2, { 'parameter': 'file', 'reason': 'Invalid JSON syntax' }) from exc
def solve_locks(workspace, publication_type, publication_name, requested_lock): current_lock = get_publication_lock( workspace, publication_type, publication_name, ) if current_lock is None: return if requested_lock not in [common.PUBLICATION_LOCK_PATCH, common.PUBLICATION_LOCK_DELETE, common.PUBLICATION_LOCK_FEATURE_CHANGE, ]: raise Exception(f"Unknown method to check: {requested_lock}") if current_lock not in [common.PUBLICATION_LOCK_PATCH, common.PUBLICATION_LOCK_DELETE, common.PUBLICATION_LOCK_POST, common.PUBLICATION_LOCK_FEATURE_CHANGE, ]: raise Exception(f"Unknown current lock: {current_lock}") if current_lock in [common.PUBLICATION_LOCK_PATCH, common.PUBLICATION_LOCK_POST, ]: if requested_lock in [common.PUBLICATION_LOCK_PATCH, common.PUBLICATION_LOCK_POST, ]: raise LaymanError(49) elif current_lock in [common.PUBLICATION_LOCK_DELETE, ]: if requested_lock in [common.PUBLICATION_LOCK_PATCH, common.PUBLICATION_LOCK_POST, ]: raise LaymanError(49) if requested_lock not in [common.PUBLICATION_LOCK_DELETE, ]: if requested_lock == common.PUBLICATION_LOCK_FEATURE_CHANGE: raise LaymanError(49, private_data={'can_run_later': True}) if current_lock == common.PUBLICATION_LOCK_FEATURE_CHANGE and requested_lock in [common.REQUEST_METHOD_PATCH, common.REQUEST_METHOD_POST, ]: celery_util.abort_publication_chain(workspace, publication_type, publication_name) celery_util.push_step_to_run_after_chain(workspace, publication_type, publication_name, 'layman.util::patch_after_feature_change')
def layer_file_chunk_exists(username, layername, parameter_name, filename, chunk_number): resumable_dir = get_layer_resumable_dir(username, layername) info_path = os.path.join(resumable_dir, 'info.json') chunk_dir = os.path.join(resumable_dir, 'chunks') if os.path.isfile(info_path): with open(info_path, 'r') as info_file: info = json.load(info_file) files_to_upload = info['files_to_upload'] file_info = next( (fi for fi in files_to_upload if fi['input_file'] == filename and fi['layman_original_parameter'] == parameter_name), None) if file_info is None: raise LaymanError( 21, { 'file': filename, 'layman_original_parameter': parameter_name, }) target_filepath = file_info['target_file'] target_filename = os.path.basename(target_filepath) chunk_name = _get_chunk_name(target_filename, chunk_number) chunk_path = os.path.join(chunk_dir, chunk_name) return os.path.exists(chunk_path) or os.path.exists( target_filepath) else: raise LaymanError(20)
def save_layer_file_chunk(username, layername, parameter_name, filename, chunk, chunk_number, total_chunks): resumable_dir = get_layer_resumable_dir(username, layername) info_path = os.path.join(resumable_dir, 'info.json') chunk_dir = os.path.join(resumable_dir, 'chunks') if os.path.isfile(info_path): with open(info_path, 'r') as info_file: info = json.load(info_file) files_to_upload = info['files_to_upload'] file_info = next( (fi for fi in files_to_upload if fi['input_file'] == filename and fi['layman_original_parameter'] == parameter_name), None) if file_info is None: raise LaymanError( 21, { 'file': filename, 'layman_original_parameter': parameter_name, }) settings.LAYMAN_REDIS.hset( get_layer_redis_total_chunks_key(username, layername), f'{parameter_name}:{file_info["target_file"]}', total_chunks) target_filename = os.path.basename(file_info['target_file']) chunk_name = _get_chunk_name(target_filename, chunk_number) chunk_path = os.path.join(chunk_dir, chunk_name) chunk.save(chunk_path) current_app.logger.info('Resumable chunk saved to: %s', chunk_path) else: raise LaymanError(20)
def register_publication_uuid(workspace, publication_type, publication_name, uuid_str=None, ignore_duplicate=False): if uuid_str is None: uuid_str = generate_uuid() workspace_type_names_key = get_workspace_type_names_key( workspace, publication_type) uuid_metadata_key = get_uuid_metadata_key(uuid_str) with settings.LAYMAN_REDIS.pipeline() as pipe: while True: try: pipe.watch(UUID_SET_KEY, uuid_metadata_key, workspace_type_names_key) if not ignore_duplicate: if pipe.sismember(UUID_SET_KEY, uuid_str): raise LaymanError(23, { 'message': f'Redis already contains UUID {uuid_str}' }) if pipe.exists(uuid_metadata_key): raise LaymanError( 23, { 'message': f'Redis already contains metadata of UUID {uuid_str}' }) if pipe.hexists(workspace_type_names_key, publication_name): raise LaymanError( 23, { 'message': f'Redis already contains publication type/workspace/name {publication_type}/{workspace}/{publication_name}' }) pipe.multi() pipe.sadd(UUID_SET_KEY, uuid_str) pipe.hmset( uuid_metadata_key, { 'workspace': workspace, 'publication_type': publication_type, 'publication_name': publication_name, }) pipe.hset(workspace_type_names_key, publication_name, uuid_str) pipe.execute() break except WatchError: continue return uuid_str
def csw_delete(muuid): template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'csw-delete-template.xml') template_values = {'muuid': muuid} xml_str = fill_template_as_str(template_path, template_values) # print(f"CSW delete request=\n{xml_str}") response = requests.post( settings.CSW_URL, auth=settings.CSW_BASIC_AUTHN, data=xml_str.encode('utf-8'), timeout=5, ) # print(f"CSW delete response=\n{r.text}") response.raise_for_status() root_el = ET.fromstring(response.content) if root_el.tag == nspath_eval('ows:ExceptionReport', NAMESPACES): if is_record_does_not_exist_exception(root_el): return raise LaymanError(37, data={'response': response.text}) assert root_el.tag == nspath_eval('csw:TransactionResponse', NAMESPACES), response.content assert root_el.find( nspath_eval('csw:TransactionSummary/csw:totalDeleted', NAMESPACES)).text == "1", response.content
def post(workspace, layername): app.logger.info(f"POST Layer Chunk, actor={g.user}") total_chunks = request.form.get('resumableTotalChunks', type=int) if total_chunks > 999: raise LaymanError( 2, { 'parameter': 'resumableTotalChunks', 'expected value': 'number from 0 to 999', }) chunk_number = request.form.get('resumableChunkNumber', default=1, type=int) filename = request.form.get('resumableFilename', default='error', type=str) parameter_name = request.form.get('layman_original_parameter', default='error', type=str) chunk = request.files['file'] input_chunk.save_layer_file_chunk(workspace, layername, parameter_name, filename, chunk, chunk_number, total_chunks) # time.sleep(5) return jsonify({'message': 'Chunk saved.'}), 200
def check_publication_info(workspace_name, info): owner_info = users.get_user_infos(workspace_name).get(workspace_name) info["owner"] = owner_info and owner_info["username"] try: check_rights_axioms( info['access_rights'].get('read'), info['access_rights'].get('write'), info["actor_name"], info["owner"], info['access_rights'].get('read_old'), info['access_rights'].get('write_old'), ) except LaymanError as exc_info: raise LaymanError( 43, { 'workspace_name': workspace_name, 'publication_name': info.get("name"), 'access_rights': { 'read': info['access_rights'].get('read'), 'write': info['access_rights'].get('write'), }, 'actor_name': info.get("actor_name"), 'owner': info["owner"], 'message': exc_info.data, }) from exc_info
def get_complete_map_info(username=None, mapname=None, cached=False): assert (username is not None and mapname is not None) or cached if cached: return g.get(FLASK_INFO_KEY) partial_info = get_map_info(username, mapname) if not any(partial_info): raise LaymanError(26, {'mapname': mapname}) item_keys = ['file', 'thumbnail', 'metadata', ] complete_info = { 'name': mapname, 'url': url_for('rest_workspace_map.get', mapname=mapname, workspace=username), 'title': mapname, 'description': '', } for key in item_keys: complete_info[key] = {'status': 'NOT_AVAILABLE'} complete_info.update(partial_info) complete_info['layman_metadata'] = {'publication_status': layman_util.get_publication_status(username, MAP_TYPE, mapname, complete_info, item_keys)} complete_info = clear_publication_info(complete_info) return complete_info
def generate_layer_thumbnail(workspace, layername): headers = { settings.LAYMAN_GS_AUTHN_HTTP_HEADER_ATTRIBUTE: settings.LAYMAN_GS_USER, } layer_info = get_publication_info(workspace, LAYER_TYPE, layername, context={'keys': ['wms', 'native_bounding_box', 'native_crs', ]}) wms_url = layer_info['_wms']['url'] native_bbox = layer_info['native_bounding_box'] native_crs = layer_info['native_crs'] raw_bbox = native_bbox if not bbox_util.is_empty(native_bbox) else crs_def.CRSDefinitions[native_crs].default_bbox bbox = bbox_util.ensure_bbox_with_area(raw_bbox, crs_def.CRSDefinitions[native_crs].no_area_bbox_padding) tn_bbox = gs_util.get_square_bbox(bbox) # Reason: https://github.com/geopython/OWSLib/issues/709 # tn_img = wms.getmap( # layers=[layername], # srs='EPSG:3857', # bbox=tn_bbox, # size=(300, 300), # format='image/png', # transparent=True, # ) ensure_layer_thumbnail_dir(workspace, layername) tn_path = get_layer_thumbnail_path(workspace, layername) # out = open(tn_path, 'wb') # out.write(tn_img.read()) # out.close() from layman.layer.geoserver.wms import VERSION response = gs_util.get_layer_thumbnail(wms_url, layername, tn_bbox, native_crs, headers=headers, wms_version=VERSION) if "png" not in response.headers['content-type'].lower(): raise LaymanError("Thumbnail rendering failed", data=response.content) response.raise_for_status() with open(tn_path, "wb") as out_file: out_file.write(response.content)
def get_source_type(db_types, qml_geometry): result = None if qml_geometry == "Point": if "ST_MultiPoint" in db_types: result = "MultiPoint" elif "ST_Point" in db_types: result = "Point" elif qml_geometry == "Line": if "ST_LineString" in db_types and "ST_MultiLineString" not in db_types: result = "LineString" elif "ST_LineString" in db_types and "ST_MultiLineString" in db_types: result = "MultiCurve" elif "ST_LineString" not in db_types and "ST_MultiLineString" in db_types: result = "MultiLineString" elif qml_geometry == "Polygon": if "ST_Polygon" in db_types and "ST_MultiPolygon" not in db_types: result = "Polygon" elif "ST_Polygon" in db_types and "ST_MultiPolygon" in db_types: result = "MultiSurface" elif "ST_Polygon" not in db_types and "ST_MultiPolygon" in db_types: result = "MultiPolygon" elif qml_geometry == "Unknown geometry": if "ST_GeometryCollection" in db_types: result = "GeometryCollection" if result is None: raise LaymanError( 47, data= f'Unknown combination of QML geometry "{qml_geometry}" and DB geometry types ' f'{db_types}') return result
def ensure_attributes_in_qml(qml, attrs_to_ensure): existing_attr_names = get_attribute_names_from_qml(qml) missing_attrs = [ attr for attr in attrs_to_ensure if attr.name not in existing_attr_names ] parser = ET.XMLParser(remove_blank_text=True) field_template = """ <field configurationFlags="None" name="{field_name}"> <editWidget type="TextEdit"> <config> <Option/> </config> </editWidget> </field> """ fields_el = qml.xpath(f'/qgis/fieldConfiguration')[0] for attr in missing_attrs: if attr.data_type != 'character varying': raise LaymanError( 47, data= f'Attribute "{attr.name}" can not be automatically added to QML, because of its ' f'unsupported data type "{attr.data_type}". This is probably caused by ' f'inconsistency between attributes used in QML style and attributes in data ' f'file. You can fix this by uploading QML style listing all data attributes.' ) field_el = ET.fromstring(field_template.format(field_name=attr.name), parser=parser) fields_el.append(field_el)
def only_valid_names(users_list): usernames_for_check = set(users_list) usernames_for_check.discard(ROLE_EVERYONE) for username in usernames_for_check: info = users.get_user_infos(username) if not info: raise LaymanError(43, f'Not existing user. Username={username}')
def owner_can_still_write( owner, can_write, ): if owner and ROLE_EVERYONE not in can_write and owner not in can_write: raise LaymanError( 43, f'Owner of the personal workspace have to keep write right.')
def check_file(file): try: file_json = json.load(file) except ValueError as exc: raise LaymanError(2, { 'parameter': 'file', 'reason': 'Invalid JSON syntax' }) from exc try: schema_url = file_json['describedBy'] except KeyError as exc: raise LaymanError( 2, { 'parameter': 'file', 'reason': 'Missing key `describedBy`', 'expected': 'JSON file according schema `https://github.com/hslayers/map-compositions`, version ' + _ACCEPTED_SCHEMA_MAJOR_VERSION, }) from exc schema_json = get_composition_schema(schema_url) validator = Draft7Validator(schema_json) if not validator.is_valid(file_json): errors = [{ 'message': e.message, 'absolute_path': list(e.absolute_path), } for e in validator.iter_errors(file_json)] raise LaymanError( 2, { 'parameter': 'file', 'reason': f'JSON not valid against schema {schema_url}', 'validation-errors': errors, }) validate(instance=file_json, schema=schema_json) map_crs = get_crs_from_json(file_json) if map_crs not in settings.INPUT_SRS_LIST: raise LaymanError(4, { 'found': map_crs, 'supported_values': settings.INPUT_SRS_LIST }) return file_json
def reserve_username(username, adjust=False): current_username = authn.get_authn_username() if is_user_with_name(current_username): raise LaymanError(34, {'username': current_username}) if adjust is not True: check_workspace_name(username) workspaces = get_workspaces() if username in workspaces: raise LaymanError(35) try: ensure_whole_user(username) claims = get_open_id_claims() _save_reservation(username, claims) except LaymanError as exc: delete_whole_user(username) raise exc return claims = get_open_id_claims() suggestions = [username] + get_username_suggestions_from_claims(claims) suggestions = [ slugify(s) for s in suggestions if s is not None and len(s) > 0 ] suggestions = to_safe_names(suggestions, 'user') workspaces = get_workspaces() username = None idx = 0 while True: for suggestion in suggestions: if idx > 0: suggestion = f"{suggestion}{idx}" try: check_workspace_name(suggestion) except LaymanError as exc: if not (exc.code == 2 or exc.code == 35): raise exc if suggestion in workspaces: continue try: ensure_whole_user(suggestion) username = suggestion _save_reservation(username, claims) break except LaymanError: delete_whole_user(suggestion) if username is not None: break idx += 1
def get(workspace, mapname): app.logger.info(f"GET Map File, actor={g.user}") map_json = util.get_map_file_json(workspace, mapname) if map_json is not None: return jsonify(map_json), 200 raise LaymanError(27, {'mapname': mapname})
def layer_file_chunk_info(workspace, layername): # print('print layer_file_chunk_info') resumable_dir = get_layer_resumable_dir(workspace, layername) info_path = os.path.join(resumable_dir, 'info.json') chunk_dir = os.path.join(resumable_dir, 'chunks') if os.path.isfile(info_path): # print('print layer_file_chunk_info info_path') with open(info_path, 'r') as info_file: info = json.load(info_file) files_to_upload = info['files_to_upload'] r_key = get_layer_redis_total_chunks_key(workspace, layername) for file in files_to_upload: rh_key = f'{file["layman_original_parameter"]}:{file["target_file"]}' total_chunks = settings.LAYMAN_REDIS.hget(r_key, rh_key) # print(f'file {rh_key} {total_chunks}') if total_chunks is None: continue total_chunks = int(total_chunks) target_fn = os.path.basename(file['target_file']) chunk_paths = [ os.path.join(chunk_dir, _get_chunk_name(target_fn, x)) for x in range(1, total_chunks + 1) ] file_upload_complete = \ all(os.path.exists(p) for p in chunk_paths) if file_upload_complete: current_app.logger.info('file_upload_complete ' + target_fn) target_fp = file['target_file'] input_file.ensure_layer_input_file_dir( workspace, layername) with open(target_fp, "ab") as target_file: for chunk_path in chunk_paths: stored_chunk_file = open(chunk_path, 'rb') target_file.write(stored_chunk_file.read()) stored_chunk_file.close() os.unlink(chunk_path) target_file.close() settings.LAYMAN_REDIS.hdel(r_key, rh_key) current_app.logger.info('Resumable file saved to: %s', target_fp) num_files_saved = len([ fi for fi in files_to_upload if os.path.exists(fi['target_file']) ]) all_files_saved = num_files_saved == len(files_to_upload) if all_files_saved: delete_layer(workspace, layername) num_chunks_saved = 0 else: num_chunks_saved = len(os.listdir(chunk_dir)) return all_files_saved, num_files_saved, num_chunks_saved else: raise LaymanError(20)
def get_qml_geometry_from_qml(qml): symbol_to_geometry_type = { 'marker': 'Point', 'line': 'Line', 'fill': 'Polygon', } symbol_types = { str(attr_value) for attr_value in qml.xpath('/qgis/renderer-v2/symbols/symbol/@type') } if not symbol_types: raise LaymanError(47, data=f'Symbol type not found in QML.') if len(symbol_types) > 1: raise LaymanError(47, data=f'Mixed symbol types in QML: {symbol_types}') symbol_type = next(iter(symbol_types)) if symbol_type not in symbol_to_geometry_type: raise LaymanError(47, data=f'Unknown QGIS symbol type "{symbol_type}".') result = symbol_to_geometry_type[symbol_type] return result
def fill_layer_template(workspace, layer, uuid, native_bbox, qml_xml, source_type, attrs_to_ensure): db_schema = workspace layer_name = layer wkb_type = source_type qml_geometry = get_qml_geometry_from_qml(qml_xml) db_table = layer template_path = get_layer_template_path() with open(template_path, 'r') as template_file: template_str = template_file.read() skeleton_xml_str = template_str.format( db_name=settings.LAYMAN_PG_DBNAME, db_host=settings.LAYMAN_PG_HOST, db_port=settings.LAYMAN_PG_PORT, db_user=settings.LAYMAN_PG_USER, db_password=settings.LAYMAN_PG_PASSWORD, source_type=source_type, db_schema=db_schema, db_table=db_table, layer_name=layer_name, layer_uuid=uuid, wkb_type=wkb_type, qml_geometry=qml_geometry, extent=extent_to_xml_string(native_bbox), default_action_canvas_value='{00000000-0000-0000-0000-000000000000}') launder_attribute_names(qml_xml) ensure_attributes_in_qml(qml_xml, attrs_to_ensure) parser = ET.XMLParser(remove_blank_text=True) layer_xml = ET.fromstring(skeleton_xml_str.encode('utf-8'), parser=parser) layer_el_tags = [el.tag for el in layer_xml.xpath('/maplayer/*')] for qml_el in qml_xml.xpath('/qgis/*'): # print(f"qml_el={qml_el.tag}") tag = qml_el.tag if tag in layer_el_tags: if tag in ELEMENTS_TO_REWRITE: layer_el = layer_xml.xpath(f'/maplayer/{tag}')[0] layer_el.getparent().replace(layer_el, copy.deepcopy(qml_el)) else: raise LaymanError( 47, data=f'Element {tag} already present in layer template.') else: layer_xml.append(copy.deepcopy(qml_el)) skip_attrs = ['version'] qml_root = qml_xml.getroot() for attr_name, attr_value in qml_root.attrib.items(): if attr_name in skip_attrs: continue layer_xml.attrib[attr_name] = attr_value full_xml_str = ET.tostring(layer_xml, encoding='unicode', pretty_print=True) return full_xml_str
def delete_map(workspace, mapname): uuid = get_map_uuid(workspace, mapname) muuid = get_metadata_uuid(uuid) if muuid is None: return try: common_util.csw_delete(muuid) except (HTTPError, ConnectionError) as exc: current_app.logger.info(traceback.format_exc()) raise LaymanError(38) from exc
def get_integer_from_param(request_args, param_name, negative=True, zero=True, positive=True): result = None assert negative or zero or positive if request_args.get(param_name): match = re.match(consts.INTEGER_PATTERN, request_args[param_name]) if not match: raise LaymanError( 2, { 'parameter': param_name, 'expected': { 'text': 'Integer with optional sign', 'regular_expression': consts.INTEGER_PATTERN, } }) integer = int(match.groups()[0]) if integer < 0 and not negative: expected_than = '>' + ('=' if zero else '') raise LaymanError(2, { 'parameter': param_name, 'expected': f'value {expected_than} 0' }) if integer == 0 and not zero: expected_than = ('<' if negative else '') + ('>' if positive else '') raise LaymanError(2, { 'parameter': param_name, 'expected': f'value {expected_than} 0' }) if integer > 0 and not positive: expected_than = '<' + ('=' if zero else '') raise LaymanError(2, { 'parameter': param_name, 'expected': f'value {expected_than} 0' }) result = integer return result
def get(workspace, layername): app.logger.info(f"GET Layer Thumbnail, user={g.user}") thumbnail_info = thumbnail.get_layer_info(workspace, layername) if thumbnail_info: userdir = get_workspace_dir(workspace) thumbnail_path = thumbnail_info['thumbnail']['path'] thumbnail_path = os.path.join(userdir, thumbnail_path) return send_file(thumbnail_path, mimetype='image/png') raise LaymanError(16, {'layername': layername})
def get(workspace, mapname): app.logger.info(f"GET Map Thumbnail, actor={g.user}") thumbnail_info = thumbnail.get_map_info(workspace, mapname) if thumbnail_info: workspace_dir = get_workspace_dir(workspace) thumbnail_path = thumbnail_info['thumbnail']['path'] thumbnail_path = os.path.join(workspace_dir, thumbnail_path) return send_file(thumbnail_path, mimetype='image/png') raise LaymanError(16, {'mapname': mapname})
def csw_insert(workspace, layername): template_path, prop_values = get_template_path_and_values(workspace, layername, http_method='post') record = common_util.fill_xml_template_as_pretty_str(template_path, prop_values, METADATA_PROPERTIES) try: muuid = common_util.csw_insert({ 'record': record }) except (HTTPError, ConnectionError) as exc: current_app.logger.info(traceback.format_exc()) raise LaymanError(38) from exc return muuid
def layer_file_chunk_exists(workspace, layername, parameter_name, filename, chunk_number): info = get_info_json(workspace, layername) resumable_dir = get_layer_resumable_dir(workspace, layername) chunk_dir = os.path.join(resumable_dir, 'chunks') if info: files_to_upload = info['files_to_upload'] file_info = next( (fi for fi in files_to_upload if fi['input_file'] == filename and fi['layman_original_parameter'] == parameter_name), None) if file_info is None: raise LaymanError(21, { 'file': filename, 'layman_original_parameter': parameter_name, }) target_filepath = file_info['target_file'] target_filename = os.path.basename(target_filepath) chunk_name = _get_chunk_name(target_filename, chunk_number) chunk_path = os.path.join(chunk_dir, chunk_name) return os.path.exists(chunk_path) or os.path.exists(target_filepath) raise LaymanError(20)
def soap_insert_record(record, is_public): try: muuid = soap_insert({ 'public': '1' if is_public else '0', 'record': record, 'edit_user': settings.CSW_BASIC_AUTHN[0], 'read_user': settings.CSW_BASIC_AUTHN[0], }) except (HTTPError, ConnectionError) as exc: current_app.logger.info(traceback.format_exc()) raise LaymanError(38) from exc return muuid
def delete_layer(workspace, layername, *, backup_uuid=None): uuid = get_layer_uuid(workspace, layername) or backup_uuid if backup_uuid and uuid: assert backup_uuid == uuid muuid = get_metadata_uuid(uuid) if muuid is None: return try: common_util.csw_delete(muuid) except (HTTPError, ConnectionError) as exc: current_app.logger.info(traceback.format_exc()) raise LaymanError(38) from exc
def open_raster_file(filepath, mode=gdal.GA_ReadOnly): dataset = gdal.Open(filepath, mode) if not dataset: raise LaymanError( 2, { 'parameter': 'file', 'message': f"Unable to open raster file.", 'expected': f"At least one file with any of extensions: .geojson, .shp, .tiff, .tif, .jp2, .png, .jpg, .jpeg; or one of them in single .zip file.", 'file': filepath, }) return dataset