def get_workspaces(conn_cur=None): if conn_cur is None: conn_cur = db_util.get_connection_cursor() _, cur = conn_cur try: cur.execute(f"""select schema_name from information_schema.schemata where schema_name NOT IN ('{"', '".join(settings.PG_NON_USER_SCHEMAS)}\ ') AND schema_owner = '{settings.LAYMAN_PG_USER}'""") except BaseException as exc: logger.error(f'get_workspaces ERROR') raise LaymanError(7) from exc rows = cur.fetchall() return [r[0] for r in rows]
def raise_layman_error(response, status_codes_to_skip=None): status_codes_to_skip = status_codes_to_skip or set() status_codes_to_skip.add(200) if 400 <= response.status_code < 500 and response.status_code not in status_codes_to_skip: details = json.loads(response.text) raise LaymanError(details['code'], details.get('detail'), http_code=response.status_code, sub_code=details.get('sub_code')) if response.status_code not in status_codes_to_skip: logger.error( f'raise_layman_error: response.status_code={response.status_code}, response.text={response.text}' ) response.raise_for_status() assert response.status_code in status_codes_to_skip, f"response.status_code={response.status_code}\nresponse.text={response.text}" assert 'Deprecation' not in response.headers, f'This is deprecated URL! Use new one. headers={response.headers}'
def get_text_column_names(username, layername, conn_cur=None): _, cur = conn_cur or db_util.get_connection_cursor() try: cur.execute(f""" SELECT QUOTE_IDENT(column_name) AS column_name FROM information_schema.columns WHERE table_schema = '{username}' AND table_name = '{layername}' AND data_type IN ('character varying', 'varchar', 'character', 'char', 'text') """) except BaseException as exc: logger.error(f'get_text_column_names ERROR') raise LaymanError(7) from exc rows = cur.fetchall() return [r[0] for r in rows]
def finish_gdal_process(process): if self.is_aborted(): logger.info( f'terminating GDAL process workspace.layer={workspace}.{layername}' ) process.terminate() logger.info( f'terminated GDAL process workspace.layer={workspace}.{layername}' ) gdal.delete_layer(workspace, layername) raise AbortedException return_code = process.poll() if return_code != 0: gdal_error = str(process.stdout.read()) logger.error(f"STDOUT: {gdal_error}") raise LaymanError(50, private_data=gdal_error)
def refresh_table( self, workspace, layername, crs_id=None, ): db.ensure_workspace(workspace) if self.is_aborted(): raise AbortedException publ_info = layman_util.get_publication_info(workspace, LAYER_TYPE, layername, context={'keys': ['file']}) file_type = publ_info['file']['file_type'] if file_type == settings.FILE_TYPE_RASTER: return if file_type != settings.FILE_TYPE_VECTOR: raise NotImplementedError(f"Unknown file type: {file_type}") if self.is_aborted(): raise AbortedException main_filepath = publ_info['_file']['gdal_path'] process = db.import_layer_vector_file_async(workspace, layername, main_filepath, crs_id) while process.poll() is None and not self.is_aborted(): pass if self.is_aborted(): logger.info(f'terminating {workspace} {layername}') process.terminate() logger.info(f'terminating {workspace} {layername}') table.delete_layer(workspace, layername) raise AbortedException return_code = process.poll() output = process.stdout.read() if return_code != 0 or output: info = table.get_layer_info(workspace, layername) if not info: pg_error = str(output) logger.error(f"STDOUT: {pg_error}") if "ERROR: zero-length delimited identifier at or near" in pg_error: err_code = 28 else: err_code = 11 raise LaymanError(err_code, private_data=pg_error) crs = db.get_crs(workspace, layername) if crs_def.CRSDefinitions[crs].srid: table.set_layer_srid(workspace, layername, crs_def.CRSDefinitions[crs].srid)
def get_layer_info(workspace, layername, conn_cur=None): if conn_cur is None: conn_cur = db_util.get_connection_cursor() _, cur = conn_cur try: cur.execute(f""" SELECT schemaname, tablename, tableowner FROM pg_tables WHERE schemaname = '{workspace}' AND tablename = '{layername}' AND tableowner = '{settings.LAYMAN_PG_USER}' """) except BaseException as exc: raise LaymanError(7) from exc rows = cur.fetchall() result = {} if len(rows) > 0: result = { 'db_table': { 'name': layername, }, } return result
def get_most_frequent_lower_distance2(workspace, layername, conn_cur=None): _, cur = conn_cur or db_util.get_connection_cursor() query = get_most_frequent_lower_distance_query(workspace, layername, ['st_area', 'Box2D']) # print(f"\nget_most_frequent_lower_distance v2\nusername={username}, layername={layername}") # print(query) try: cur.execute(query) except BaseException as exc: logger.error(f'get_most_frequent_lower_distance2 ERROR') raise LaymanError(7) from exc rows = cur.fetchall() # for row in rows: # print(f"row={row}") result = None if len(rows) > 0: distance, freq, num_distances = rows[0] if freq / num_distances > 0.03: result = distance return result
def patch(workspace, layername): app.logger.info(f"PATCH Layer, user={g.user}") info = util.get_complete_layer_info(cached=True) kwargs = { 'title': info.get('title', info['name']), 'description': info.get('description', ''), } # FILE use_chunk_upload = False files = [] if 'file' in request.files: files = [ f for f in request.files.getlist("file") if len(f.filename) > 0 ] if len(files) == 0 and len(request.form.getlist('file')) > 0: files = [ filename for filename in request.form.getlist('file') if len(filename) > 0 ] if len(files) > 0: use_chunk_upload = True # CRS crs_id = None if len(files) > 0 and len(request.form.get('crs', '')) > 0: crs_id = request.form['crs'] if crs_id not in settings.INPUT_SRS_LIST: raise LaymanError(2, {'parameter': 'crs', 'supported_values': settings.INPUT_SRS_LIST}) check_crs = crs_id is None # TITLE if len(request.form.get('title', '')) > 0: kwargs['title'] = request.form['title'] # DESCRIPTION if len(request.form.get('description', '')) > 0: kwargs['description'] = request.form['description'] # SLD style_file = None if 'style' in request.files and not request.files['style'].filename == '': style_file = request.files['style'] elif 'sld' in request.files and not request.files['sld'].filename == '': style_file = request.files['sld'] delete_from = None if style_file: style_type = input_style.get_style_type_from_file_storage(style_file) kwargs['style_type'] = style_type kwargs['store_in_geoserver'] = style_type.store_in_geoserver delete_from = 'layman.layer.qgis.wms' if len(files) > 0: delete_from = 'layman.layer.filesystem.input_file' # FILE NAMES if delete_from == 'layman.layer.filesystem.input_file': if use_chunk_upload: filenames = files else: filenames = [f.filename for f in files] input_file.check_filenames(workspace, layername, filenames, check_crs, ignore_existing_files=True) props_to_refresh = util.get_same_or_missing_prop_names(workspace, layername) kwargs['metadata_properties_to_refresh'] = props_to_refresh layer_result = {} if delete_from is not None: request_method = request.method.lower() deleted = util.delete_layer(workspace, layername, source=delete_from, http_method=request_method) if style_file is None: try: style_file = deleted['style']['file'] except KeyError: pass style_type = input_style.get_style_type_from_file_storage(style_file) kwargs['style_type'] = style_type kwargs['store_in_geoserver'] = style_type.store_in_geoserver if style_file: input_style.save_layer_file(workspace, layername, style_file, style_type) kwargs.update({ 'crs_id': crs_id, 'ensure_user': False, 'http_method': request_method, 'metadata_properties_to_refresh': props_to_refresh, }) if delete_from == 'layman.layer.filesystem.input_file': if use_chunk_upload: files_to_upload = input_chunk.save_layer_files_str( workspace, layername, files, check_crs) layer_result.update({ 'files_to_upload': files_to_upload, }) kwargs.update({ 'check_crs': check_crs, }) else: input_file.save_layer_files( workspace, layername, files, check_crs) kwargs.update({'actor_name': authn.get_authn_username()}) rest_util.setup_patch_access_rights(request.form, kwargs) util.pre_publication_action_check(workspace, layername, kwargs, ) util.patch_layer( workspace, layername, kwargs, delete_from, 'layman.layer.filesystem.input_chunk' if use_chunk_upload else delete_from ) app.logger.info('PATCH Layer changes done') info = util.get_complete_layer_info(workspace, layername) info.update(layer_result) return jsonify(info), 200
def migrate_users_and_publications(): workspace_names = global_get_workspaces(use_cache=False) layer_context = { 'sources_filter': 'layman.layer.filesystem.uuid, layman.layer.filesystem.input_chunk, ' 'layman.layer.filesystem.input_file, layman.layer.filesystem.input_style, layman.layer.db.table, ' 'layman.layer.qgis.wms, layman.layer.geoserver.wfs, layman.layer.geoserver.wms, ' 'layman.layer.geoserver.sld, layman.layer.filesystem.thumbnail, layman.layer.micka.soap' } map_context = { 'sources_filter': 'layman.map.filesystem.uuid, layman.map.filesystem.input_file, layman.map.filesystem.thumbnail, ' 'layman.map.micka.soap' } for workspace_name in workspace_names: userinfo = get_authn_info(workspace_name) id_workspace = workspaces.ensure_workspace(workspace_name) if userinfo: # It is personal workspace iss_sub_infos = users.get_user_infos( iss_sub={ 'issuer_id': userinfo["iss_id"], 'sub': userinfo["sub"] }) if iss_sub_infos: username_in_conflict, iss_sub_info = iss_sub_infos.popitem() raise LaymanError( f"Two workspaces are registered as private workspaces of the same user. To migrate successfully, " f"choose which workspace should be the only private workspace of the user, delete authn.txt file " f"from the other workspace, and restart layman. The other workspace becomes public.", data={ 'user': iss_sub_info, 'workspaces': [workspace_name, username_in_conflict] }) userinfo['issuer_id'] = userinfo['iss_id'] users.ensure_user(id_workspace, userinfo) everyone_can_write = False else: # It is public workspace, so all publications are available to everybody everyone_can_write = True for (publ_type, infos_method, context) in [(LAYER_TYPE, get_layer_infos, layer_context), (MAP_TYPE, get_map_infos, map_context)]: publications = infos_method(workspace_name) for name in publications: info = layman_util.get_publication_info( workspace_name, publ_type, name, context) insert_publications_sql = f'''insert into {DB_SCHEMA}.publications as p (id_workspace, name, title, type, uuid, everyone_can_read, everyone_can_write) values (%s, %s, %s, %s, %s, %s, %s) returning id ;''' data = ( id_workspace, name, info.get("title", name), publ_type, info["uuid"], True, everyone_can_write, ) db_util.run_statement(insert_publications_sql, data)
def check_username(username): if username in settings.PG_NON_USER_SCHEMAS: raise LaymanError(35, {'reserved_by': __name__, 'schema': username})
def post(workspace): app.logger.info(f"POST Layers, user={g.user}") # FILE use_chunk_upload = False files = [] if 'file' in request.files: files = [ f for f in request.files.getlist("file") if len(f.filename) > 0 ] if len(files) == 0 and len(request.form.getlist('file')) > 0: files = [ filename for filename in request.form.getlist('file') if len(filename) > 0 ] if len(files) > 0: use_chunk_upload = True if len(files) == 0: raise LaymanError(1, {'parameter': 'file'}) # NAME unsafe_layername = request.form.get('name', '') if len(unsafe_layername) == 0: unsafe_layername = input_file.get_unsafe_layername(files) layername = util.to_safe_layer_name(unsafe_layername) util.check_layername(layername) info = util.get_layer_info(workspace, layername) if info: raise LaymanError(17, {'layername': layername}) util.check_new_layername(workspace, layername) # CRS crs_id = None if len(request.form.get('crs', '')) > 0: crs_id = request.form['crs'] if crs_id not in settings.INPUT_SRS_LIST: raise LaymanError(2, {'parameter': 'crs', 'supported_values': settings.INPUT_SRS_LIST}) check_crs = crs_id is None # TITLE if len(request.form.get('title', '')) > 0: title = request.form['title'] else: title = layername # DESCRIPTION description = request.form.get('description', '') # Style style_file = None if 'style' in request.files and not request.files['style'].filename == '': style_file = request.files['style'] elif 'sld' in request.files and not request.files['sld'].filename == '': style_file = request.files['sld'] style_type = input_style.get_style_type_from_file_storage(style_file) actor_name = authn.get_authn_username() task_options = { 'crs_id': crs_id, 'description': description, 'title': title, 'ensure_user': True, 'check_crs': False, 'actor_name': actor_name, 'style_type': style_type, 'store_in_geoserver': style_type.store_in_geoserver, } rest_common.setup_post_access_rights(request.form, task_options, actor_name) util.pre_publication_action_check(workspace, layername, task_options, ) layerurl = url_for('rest_workspace_layer.get', layername=layername, workspace=workspace) layer_result = { 'name': layername, 'url': layerurl, } # FILE NAMES if use_chunk_upload: filenames = files else: filenames = [f.filename for f in files] input_file.check_filenames(workspace, layername, filenames, check_crs) redis_util.create_lock(workspace, LAYER_TYPE, layername, request.method) try: # register layer uuid uuid_str = uuid.assign_layer_uuid(workspace, layername) layer_result.update({ 'uuid': uuid_str, }) task_options.update({'uuid': uuid_str, }) # save files input_style.save_layer_file(workspace, layername, style_file, style_type) if use_chunk_upload: files_to_upload = input_chunk.save_layer_files_str( workspace, layername, files, check_crs) layer_result.update({ 'files_to_upload': files_to_upload, }) task_options.update({ 'check_crs': check_crs, }) else: input_file.save_layer_files( workspace, layername, files, check_crs) util.post_layer( workspace, layername, task_options, 'layman.layer.filesystem.input_chunk' if use_chunk_upload else 'layman.layer.filesystem.input_file' ) except Exception as exc: try: if util.is_layer_chain_ready(workspace, layername): redis_util.unlock_publication(workspace, LAYER_TYPE, layername) finally: redis_util.unlock_publication(workspace, LAYER_TYPE, layername) raise exc # app.logger.info('uploaded layer '+layername) return jsonify([layer_result]), 200
def authenticate(): user = None iss_url = request.headers.get(ISS_URL_HEADER, None) authz_header = request.headers.get(TOKEN_HEADER, None) if authz_header is not None and iss_url is None and len( _get_provider_modules()) == 1: iss_url = _get_provider_modules()[0].AUTH_URLS[0] current_app.logger.info(f"\nusing default iss_url={iss_url}") if iss_url is None and authz_header is None: return user if iss_url is None: raise LaymanError( 32, f'HTTP header {TOKEN_HEADER} was set, but HTTP header {ISS_URL_HEADER} was not found', sub_code=1) if authz_header is None: raise LaymanError( 32, f'HTTP header {ISS_URL_HEADER} was set, but HTTP header {TOKEN_HEADER} was not found.', sub_code=2) authz_header_parts = authz_header.split(' ') if len(authz_header_parts) != 2: raise LaymanError( 32, f'HTTP header {TOKEN_HEADER} must have 2 parts: "Bearer <access_token>", but has {len(authz_header_parts)} parts.', sub_code=3) if authz_header_parts[0] != 'Bearer': raise LaymanError( 32, f'First part of HTTP header {TOKEN_HEADER} must be "Bearer", but it\'s {authz_header_parts[0]}', sub_code=4) access_token = authz_header_parts[1] if len(access_token) == 0: raise LaymanError( 32, f'HTTP header {TOKEN_HEADER} contains empty access token. The structure must be "Bearer <access_token>"', sub_code=5) provider_module = _get_provider_by_auth_url(iss_url) if provider_module is None: raise LaymanError( 32, f'No OAuth2 provider was found for URL passed in HTTP header {ISS_URL_HEADER}.', sub_code=6) access_token_info = _get_redis_access_token_info(provider_module, access_token) if access_token_info is None: # current_app.logger.info(f"Veryfying cretentials against OAuth2 provider") clients = settings.OAUTH2_LIFERAY_CLIENTS valid_resp = None all_connection_errors = True for client in clients: try: request_data = { k: v for k, v in { 'client_id': client['id'], 'client_secret': client['secret'], 'token': access_token, }.items() if v is not None } response = requests.post(provider_module.INTROSPECTION_URL, data=request_data, timeout=min(25 / len(clients), 15)) if response.status_code != 200: continue all_connection_errors = False except ConnectionError: continue try: r_json = response.json() # current_app.logger.info(f"r_json={r_json}") if r_json['active'] is True and r_json.get( 'token_type', 'Bearer') == 'Bearer': valid_resp = r_json break except ValueError: continue if all_connection_errors: raise LaymanError( 32, f'Introspection endpoint is not reachable or returned status code other than 200.', sub_code=8) if valid_resp is None: raise LaymanError( 32, f'Introspection endpoint claims that access token is not active or it\'s not Bearer token.', sub_code=9) sub = valid_resp[provider_module.INTROSPECTION_SUB_KEY] exp = valid_resp['exp'] exp_in = math.ceil(exp - time.time()) key_exp = max(min(exp_in, settings.LAYMAN_AUTHN_CACHE_MAX_TIMEOUT), 1) authn_info = {'sub': sub} # current_app.logger.info(f'Cache authn info, info={authn_info}, exp_in={exp_in}') _set_redis_access_token_info(provider_module, access_token, authn_info, ex=key_exp) else: # current_app.logger.info(f"Cretentials verified against Layman cache") sub = access_token_info['sub'] assert FLASK_PROVIDER_KEY not in g assert FLASK_ACCESS_TOKEN_KEY not in g assert FLASK_SUB_KEY not in g g.setdefault(FLASK_PROVIDER_KEY, provider_module) g.setdefault(FLASK_ACCESS_TOKEN_KEY, access_token) g.setdefault(FLASK_SUB_KEY, sub) iss_id = get_iss_id() username = get_username(iss_id, sub) user = {} if username is not None: user['username'] = username # pylint: disable=assigning-non-slot g.user = user return user
def check_workspace_name(workspace): if workspace in settings.PG_NON_USER_SCHEMAS: raise LaymanError(35, {'reserved_by': __name__, 'schema': workspace})
def check_filenames(workspace, layername, input_files, check_crs, ignore_existing_files=False): main_files = input_files.raw_or_archived_main_file_paths if len(main_files) > 1: raise LaymanError( 2, { 'parameter': 'file', 'expected': 'At most one file with any of extensions: ' + ', '.join(util.get_all_allowed_main_extensions()), 'files': [ os.path.relpath(fp, input_files.saved_paths_dir) for fp in main_files ], }) filenames = input_files.raw_or_archived_paths if not main_files: if len(input_files.raw_paths_to_archives) > 1: raise LaymanError( 2, { 'parameter': 'file', 'expected': 'At most one file with extensions: ' + ', '.join(settings.COMPRESSED_FILE_EXTENSIONS.keys()), 'files': [ os.path.relpath(fp, input_files.saved_paths_dir) for fp in input_files.raw_paths_to_archives ], }) if len(input_files.raw_paths_to_archives) == 0: raise LaymanError( 2, { 'parameter': 'file', 'message': 'No data file in input.', 'expected': 'At least one file with any of extensions: ' + ', '.join(util.get_all_allowed_main_extensions()) + '; or one of them in single .zip file.', 'files': [ os.path.relpath(fp, input_files.saved_paths_dir) for fp in filenames ], }) if input_files.is_one_archive_with_available_content: raise LaymanError( 2, { 'parameter': 'file', 'message': 'Zip file without data file inside.', 'expected': 'At least one file with any of extensions: ' + ', '.join(util.get_all_allowed_main_extensions()) + '; or one of them in single .zip file.', 'files': [ os.path.relpath(fp, input_files.saved_paths_dir) for fp in filenames ], }) main_files = input_files.raw_paths_to_archives main_filename = main_files[0] basename, ext = map(lambda s: s.lower(), os.path.splitext(main_filename)) if ext == '.shp': lower_filenames = list(map(lambda fn: fn.lower(), filenames)) shp_exts = ['.dbf', '.shx'] if check_crs: shp_exts.append('.prj') missing_exts = list( filter(lambda e: basename + e not in lower_filenames, shp_exts)) if len(missing_exts) > 0: detail = { 'missing_extensions': missing_exts, 'path': os.path.relpath(main_filename, input_files.saved_paths_dir), } if '.prj' in missing_exts: detail['suggestion'] = 'Missing .prj file can be fixed also ' \ 'by setting "crs" parameter.' raise LaymanError(18, detail) input_file_dir = get_layer_input_file_dir(workspace, layername) filename_mapping, _ = get_file_name_mappings(input_files.raw_paths, main_filename, layername, input_file_dir) if not ignore_existing_files: conflict_paths = [ filename_mapping[k] for k, v in filename_mapping.items() if v is not None and os.path.exists(os.path.join(input_file_dir, v)) ] if len(conflict_paths) > 0: raise LaymanError(3, conflict_paths)
def check_reserved_workspace_names(workspace_name): if workspace_name in settings.RESERVED_WORKSPACE_NAMES: raise LaymanError(35, { 'reserved_by': 'RESERVED_WORKSPACE_NAMES', 'workspace': workspace_name })
def patch(workspace, layername): app.logger.info(f"PATCH Layer, actor={g.user}") info = util.get_complete_layer_info(cached=True) kwargs = { 'title': info.get('title', info['name']) or '', 'description': info.get('description', '') or '', } # FILE sent_file_streams = [] sent_file_paths = [] if 'file' in request.files: sent_file_streams = [ f for f in request.files.getlist("file") if len(f.filename) > 0 ] if len(sent_file_streams) == 0 and len(request.form.getlist('file')) > 0: sent_file_paths = [ filename for filename in request.form.getlist('file') if len(filename) > 0 ] input_files = fs_util.InputFiles(sent_streams=sent_file_streams, sent_paths=sent_file_paths) # CRS crs_id = None if len(input_files.raw_paths) > 0 and len(request.form.get('crs', '')) > 0: crs_id = request.form['crs'] if crs_id not in settings.INPUT_SRS_LIST: raise LaymanError(2, { 'parameter': 'crs', 'supported_values': settings.INPUT_SRS_LIST }) check_crs = crs_id is None # TITLE if len(request.form.get('title', '')) > 0: kwargs['title'] = request.form['title'] # DESCRIPTION if len(request.form.get('description', '')) > 0: kwargs['description'] = request.form['description'] # SLD style_file = None if 'style' in request.files and not request.files['style'].filename == '': style_file = request.files['style'] elif 'sld' in request.files and not request.files['sld'].filename == '': style_file = request.files['sld'] delete_from = None style_type = None if style_file: style_type = input_style.get_style_type_from_file_storage(style_file) kwargs['style_type'] = style_type kwargs['store_in_geoserver'] = style_type.store_in_geoserver delete_from = 'layman.layer.qgis.wms' if len(input_files.raw_paths) > 0: delete_from = 'layman.layer.filesystem.input_file' # FILE NAMES use_chunk_upload = bool(input_files.sent_paths) if delete_from == 'layman.layer.filesystem.input_file': if not (use_chunk_upload and input_files.is_one_archive): input_file.check_filenames(workspace, layername, input_files, check_crs, ignore_existing_files=True) # file checks if not use_chunk_upload: temp_dir = tempfile.mkdtemp(prefix="layman_") input_file.save_layer_files(workspace, layername, input_files, check_crs, output_dir=temp_dir) if input_files.raw_paths: file_type = input_file.get_file_type( input_files.raw_or_archived_main_file_path) else: file_type = layman_util.get_publication_info(workspace, LAYER_TYPE, layername, context={ 'keys': ['file'] })['file']['file_type'] if style_type: style_type_for_check = style_type.code else: style_type_for_check = layman_util.get_publication_info( workspace, LAYER_TYPE, layername, context={'keys': ['style_type']})['style_type'] if file_type == settings.FILE_TYPE_RASTER and style_type_for_check == 'qml': raise LaymanError(48, f'Raster layers are not allowed to have QML style.') props_to_refresh = util.get_same_or_missing_prop_names( workspace, layername) kwargs['metadata_properties_to_refresh'] = props_to_refresh layer_result = {} if delete_from is not None: request_method = request.method.lower() deleted = util.delete_layer(workspace, layername, source=delete_from, http_method=request_method) if style_file is None: try: style_file = deleted['style']['file'] except KeyError: pass style_type = input_style.get_style_type_from_file_storage(style_file) kwargs['style_type'] = style_type kwargs['store_in_geoserver'] = style_type.store_in_geoserver if style_file: input_style.save_layer_file(workspace, layername, style_file, style_type) kwargs.update({ 'crs_id': crs_id, 'http_method': request_method, 'metadata_properties_to_refresh': props_to_refresh, }) if delete_from == 'layman.layer.filesystem.input_file': if use_chunk_upload: files_to_upload = input_chunk.save_layer_files_str( workspace, layername, input_files, check_crs) layer_result.update({ 'files_to_upload': files_to_upload, }) kwargs.update({ 'check_crs': check_crs, }) else: shutil.move( temp_dir, input_file.get_layer_input_file_dir(workspace, layername)) kwargs.update({'actor_name': authn.get_authn_username()}) rest_util.setup_patch_access_rights(request.form, kwargs) util.pre_publication_action_check( workspace, layername, kwargs, ) util.patch_layer( workspace, layername, kwargs, delete_from, 'layman.layer.filesystem.input_chunk' if use_chunk_upload else delete_from) app.logger.info('PATCH Layer changes done') info = util.get_complete_layer_info(workspace, layername) info.update(layer_result) return jsonify(info), 200
def post(workspace): app.logger.info(f"POST Maps, user={g.user}") # FILE if 'file' in request.files and not request.files['file'].filename == '': file = request.files["file"] else: raise LaymanError(1, {'parameter': 'file'}) file_json = util.check_file(file) # NAME unsafe_mapname = request.form.get('name', '') if len(unsafe_mapname) == 0: unsafe_mapname = input_file.get_unsafe_mapname(file_json) mapname = util.to_safe_map_name(unsafe_mapname) util.check_mapname(mapname) info = util.get_map_info(workspace, mapname) if info: raise LaymanError(24, {'mapname': mapname}) # TITLE if len(request.form.get('title', '')) > 0: title = request.form['title'] elif len(file_json.get('title', '')) > 0: title = file_json['title'] else: title = mapname # DESCRIPTION if len(request.form.get('description', '')) > 0: description = request.form['description'] else: description = file_json.get('abstract', '') mapurl = url_for('rest_workspace_map.get', mapname=mapname, workspace=workspace) redis_util.create_lock(workspace, MAP_TYPE, mapname, request.method) try: map_result = { 'name': mapname, 'url': mapurl, } actor_name = authn.get_authn_username() kwargs = { 'title': title, 'description': description, 'actor_name': actor_name } rest_common.setup_post_access_rights(request.form, kwargs, actor_name) util.pre_publication_action_check( workspace, mapname, kwargs, ) # register map uuid uuid_str = uuid.assign_map_uuid(workspace, mapname) kwargs['uuid'] = uuid_str map_result.update({ 'uuid': uuid_str, }) file = FileStorage(io.BytesIO(json.dumps(file_json).encode()), file.filename) input_file.save_map_files(workspace, mapname, [file]) util.post_map(workspace, mapname, kwargs, 'layman.map.filesystem.input_file') except Exception as exception: try: if util.is_map_chain_ready(workspace, mapname): redis_util.unlock_publication(workspace, MAP_TYPE, mapname) finally: redis_util.unlock_publication(workspace, MAP_TYPE, mapname) raise exception # app.logger.info('uploaded map '+mapname) return jsonify([map_result]), 200