def test_get_user_infos(): with app.app_context(): user_util.get_user_infos() user_util.get_user_infos('test2') user_util.get_user_infos('asůldghwíeghsdlkfj') username = '******' iss_sub = {'issuer_id': 'mock_test_users_test', 'sub': '5'} userinfo = { **iss_sub, "claims": { "email": "*****@*****.**", "name": "test ensure user", "preferred_username": '******', "given_name": "test", "family_name": "user", "middle_name": "ensure", } } with app.app_context(): id_workspace = workspace_util.ensure_workspace(username) user_util.ensure_user(id_workspace, userinfo) user_infos = user_util.get_user_infos(username) assert {username} == user_infos.keys() user_infos = user_util.get_user_infos(iss_sub=iss_sub) assert {username} == user_infos.keys()
def test_data_language_countries(country_table): workspace, layername = country_table # print(f"username={username}, layername={layername}") with layman.app_context(): col_names = db.get_text_column_names(workspace, layername) assert len(col_names) == 63 with layman.app_context(): langs = db.get_text_languages(workspace, layername) assert set(langs) == set([ 'ara', 'ben', 'chi', 'eng', 'fre', 'gre', 'hin', 'hun', 'jpn', 'kor', 'pol', 'por', 'rus', 'tur', 'vie', ])
def test_get_most_frequent_lower_distance(country110m_table, country50m_table, country10m_table, data200road_table, sm5building_table): _, layername_110m = country110m_table _, layername_50m = country50m_table _, layername_10m = country10m_table _, layername_200k = data200road_table workspace, layername_5k = sm5building_table with layman.app_context(): sd_110m = db.guess_scale_denominator(workspace, layername_110m) assert 25000000 <= sd_110m <= 500000000 assert sd_110m == 100000000 with layman.app_context(): sd_50m = db.guess_scale_denominator(workspace, layername_50m) assert 10000000 <= sd_50m <= 250000000 assert sd_50m == 10000000 with layman.app_context(): sd_10m = db.guess_scale_denominator(workspace, layername_10m) assert 2500000 <= sd_10m <= 50000000 assert sd_10m == 2500000 with layman.app_context(): sd_200k = db.guess_scale_denominator(workspace, layername_200k) assert 50000 <= sd_200k <= 1000000 assert sd_200k == 100000 with layman.app_context(): sd_5k = db.guess_scale_denominator(workspace, layername_5k) assert 1000 <= sd_5k <= 25000 assert sd_5k == 5000
def test_data_language_roads(road_table): workspace, layername = road_table # print(f"username={username}, layername={layername}") with layman.app_context(): col_names = db.get_text_column_names(workspace, layername) assert set(col_names) == set([ 'cislouseku', 'dpr_smer_p', 'etah1', 'etah2', 'etah3', 'etah4', 'fid_zbg', 'jmeno', 'kruh_obj_k', 'kruh_obj_p', 'peazkom1', 'peazkom2', 'peazkom3', 'peazkom4', 'r_indsil7', 'silnice', 'silnice_bs', 'typsil_k', 'typsil_p', 'vym_tahy_k', 'vym_tahy_p' ]) with layman.app_context(): langs = db.get_text_languages(workspace, layername) assert langs == ['cze']
def provide_map(client): with app.app_context(): workspace = TEST_WORKSPACE mapname = TEST_MAP rest_path = url_for('rest_workspace_maps.post', workspace=workspace) file_paths = [ 'sample/layman.map/full.json', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, 'name': mapname, }) assert response.status_code == 200 finally: for file_path in files: file_path[0].close() wait_till_ready(workspace, mapname) yield response.get_json()[0] with app.app_context(): rest_path = url_for('rest_workspace_map.delete_map', workspace=workspace, mapname=mapname) response = client.delete(rest_path) assert response.status_code == 200
def test_abort_import_layer_vector_file(): workspace = 'testuser1' layername = 'ne_10m_admin_0_countries' src_dir = 'tmp/naturalearth/10m/cultural' with layman.app_context(): input_file_dir = ensure_layer_input_file_dir(workspace, layername) filename = layername + '.geojson' main_filepath = os.path.join(input_file_dir, filename) crs_id = None shutil.copy( os.path.join(src_dir, filename), input_file_dir ) def abort_layer_import(): process = db.import_layer_vector_file_async(workspace, layername, main_filepath, crs_id) time1 = time.time() while process.poll() is None: if time.time() - time1 > 0.1: # print('terminating process') process.terminate() time.sleep(0.1) return_code = process.poll() return return_code return_code = abort_layer_import() assert return_code != 0 with layman.app_context(): layerdir = get_layer_dir(workspace, layername) shutil.rmtree(layerdir)
def test_bbox(): workspace = 'test_bbox_workspace' map = 'test_bbox_map' process_client.publish_workspace_map( workspace, map, ) with app.app_context(): info = util.get_map_info(workspace, map) assert_util.assert_same_bboxes(info['bounding_box'], test_data.SMALL_MAP_BBOX, 0.00001) process_client.patch_workspace_map(workspace, map, file_paths=[ 'test/data/bbox/map_3_3-5_5.json', ]) with app.app_context(): info = util.get_map_info(workspace, map) assert_util.assert_same_bboxes(info['bounding_box'], [3000, 3000, 5000, 5000], 0.1) process_client.delete_workspace_map(workspace, map)
def test_user_workspace(workspace): ensure_all_publications() is_private_workspace = workspace in data.USERS all_sources = [] for type_def in layman_util.get_publication_types( use_cache=False).values(): all_sources += type_def['internal_sources'] providers = layman_util.get_providers_from_source_names(all_sources) for provider in providers: with app.app_context(): usernames = provider.get_usernames() if not is_private_workspace: assert workspace not in usernames, (workspace, provider) with app.app_context(): usernames = layman_util.get_usernames(use_cache=False) workspaces = layman_util.get_workspaces(use_cache=False) if is_private_workspace: assert workspace in usernames else: assert workspace not in usernames assert workspace in workspaces
def test_delete_map(client): with app.app_context(): username = '******' mapname = 'administrativni_cleneni_libereckeho_kraje' rest_path = url_for('rest_workspace_map.delete_map', workspace=username, mapname=mapname) response = client.delete(rest_path) assert response.status_code == 200 resp_json = response.get_json() uuid_str = resp_json['uuid'] md_record_url = f"http://micka:80/record/basic/m-{uuid_str}" response = requests.get(md_record_url, auth=settings.CSW_BASIC_AUTHN) response.raise_for_status() assert 'Záznam nenalezen' in response.text assert mapname not in response.text publication_counter.decrease() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{MAP_TYPE}': publication_counter.get() }) with app.app_context(): rest_path = url_for('rest_workspace_map.delete_map', workspace=username, mapname=mapname) response = client.delete(rest_path) assert response.status_code == 404 resp_json = response.get_json() assert resp_json['code'] == 26 uuid.check_redis_consistency(expected_publ_num_by_type={ f'{MAP_TYPE}': publication_counter.get() })
def do_test(wfs_query, attribute_names): # Test, that unauthorized user will not cause new attribute with app.app_context(): old_db_attributes = db.get_all_column_names(username, layername1) for attr_name in attribute_names: assert attr_name not in old_db_attributes, f"old_db_attributes={old_db_attributes}, attr_name={attr_name}" with pytest.raises(GS_Error) as exc: process_client.post_wfst(wfs_query, headers=authn_headers2, workspace=username) assert exc.value.data['status_code'] == 400 with app.app_context(): new_db_attributes = db.get_all_column_names(username, layername1) for attr_name in attribute_names: assert attr_name not in new_db_attributes, f"new_db_attributes={new_db_attributes}, attr_name={attr_name}" # Test, that authorized user will cause new attribute process_client.post_wfst(wfs_query, headers=authn_headers1, workspace=username) with app.app_context(): new_db_attributes = db.get_all_column_names(username, layername1) for attr_name in attribute_names: assert attr_name in new_db_attributes, f"new_db_attributes={new_db_attributes}, attr_name={attr_name}"
def test_bbox(): workspace = 'test_bbox_workspace' layer = 'test_bbox_layer' process_client.publish_workspace_layer( workspace, layer, ) with app.app_context(): info = util.get_layer_info(workspace, layer) assert_util.assert_same_bboxes(info['bounding_box'], test_data.SMALL_LAYER_BBOX, 0.00001) process_client.patch_workspace_layer( workspace, layer, file_paths=[ 'test/data/bbox/layer_3_3-5_5.geojson', ]) with app.app_context(): info = util.get_layer_info(workspace, layer) assert_util.assert_same_bboxes(info['bounding_box'], [3000, 3000, 5000, 5000], 0.1) process_client.delete_workspace_layer(workspace, layer)
def assert_all_sources_bbox(workspace, layer, expected_bbox_3857, *, expected_native_bbox=None, expected_native_crs=None): with app.app_context(): info = layman_util.get_publication_info(workspace, LAYER_TYPE, layer, context={'key': ['bounding_box', 'native_bounding_box', 'native_crs']}) bbox_3857 = tuple(info['bounding_box']) native_bbox = tuple(info['native_bounding_box']) native_crs = info['native_crs'] assert_same_bboxes(expected_bbox_3857, bbox_3857, 0.00001) if expected_native_bbox is not None: assert_same_bboxes(expected_native_bbox, native_bbox, 0) assert expected_native_crs == native_crs assert_wfs_bbox(workspace, layer, expected_bbox_3857) assert_wms_bbox(workspace, layer, expected_bbox_3857) if expected_native_bbox is not None: assert_wfs_bbox(workspace, layer, expected_native_bbox, expected_bbox_crs=expected_native_crs) assert_wms_bbox(workspace, layer, expected_native_bbox, expected_bbox_crs=expected_native_crs) with app.app_context(): expected_bbox_4326 = bbox_util.transform(expected_bbox_3857, crs_from=crs_def.EPSG_3857, crs_to=crs_def.EPSG_4326, ) md_comparison = get_workspace_layer_metadata_comparison(workspace, layer) csw_prefix = settings.CSW_PROXY_URL csw_src_key = get_source_key_from_metadata_comparison(md_comparison, csw_prefix) assert csw_src_key is not None prop_key = 'extent' md_props = md_comparison['metadata_properties'] assert md_props[prop_key]['equal'] is True, md_props[prop_key] assert md_props[prop_key]['equal_or_null'] is True, md_props[prop_key] csw_bbox_4326 = tuple(md_props[prop_key]['values'][csw_src_key]) assert_same_bboxes(expected_bbox_4326, csw_bbox_4326, 0.001)
def assert_wfs_bbox(workspace, layer, expected_bbox): wfs_layer = f"{workspace}:{layer}" with app.app_context(): wfs_get_capabilities = wfs.get_wfs_proxy(workspace) wfs_bbox_4326 = wfs_get_capabilities.contents[wfs_layer].boundingBoxWGS84 with app.app_context(): wfs_bbox_3857 = bbox_util.transform(wfs_bbox_4326, 4326, 3857, ) assert_same_bboxes(expected_bbox, wfs_bbox_3857, 0.00001)
def assert_wfs_bbox(workspace, layer, expected_bbox, *, expected_bbox_crs='EPSG:3857'): wfs_layer = f"{workspace}:{layer}" with app.app_context(): wfs_get_capabilities = wfs.get_wfs_proxy(workspace) wfs_bbox_4326 = wfs_get_capabilities.contents[wfs_layer].boundingBoxWGS84 with app.app_context(): wfs_bbox = bbox_util.transform(wfs_bbox_4326, crs_from=crs_def.EPSG_4326, crs_to=expected_bbox_crs, ) assert_same_bboxes(expected_bbox, wfs_bbox, 0.00001)
def test_populated_places_table(populated_places_table): workspace, layername = populated_places_table print(f"workspace={workspace}, layername={layername}") with layman.app_context(): col_names = db.get_text_column_names(workspace, layername) assert len(col_names) == 31 with layman.app_context(): langs = db.get_text_languages(workspace, layername) assert set(langs) == set(['chi', 'eng', 'rus'])
def test_fill_project_template(workspace, publ_type, publication): ensure_publication(workspace, publ_type, publication) qgs_path = f'{settings.LAYMAN_QGIS_DATA_DIR}/{publication}.qgs' wms_url = f'{settings.LAYMAN_QGIS_URL}?MAP={qgs_path}' wms_version = '1.3.0' layer_info = process_client.get_workspace_publication(publ_type, workspace, publication) layer_uuid = layer_info['uuid'] with pytest.raises(requests.exceptions.HTTPError) as excinfo: WebMapService(wms_url, version=wms_version) assert excinfo.value.response.status_code == 500 with app.app_context(): layer_bbox = layer_db.get_bbox(workspace, publication) layer_crs = layer_db.get_crs(workspace, publication) layer_bbox = layer_bbox if not bbox_util.is_empty(layer_bbox) else crs_def.CRSDefinitions[layer_crs].default_bbox with app.app_context(): qml_path = qgis_util.get_original_style_path(workspace, publication) parser = ET.XMLParser(remove_blank_text=True) qml_xml = ET.parse(qml_path, parser=parser) exp_min_scale = data.PUBLICATIONS[(workspace, publ_type, publication)][data.TEST_DATA].get('min_scale') if exp_min_scale is not None: assert qml_xml.getroot().attrib['minScale'] == exp_min_scale with app.app_context(): db_types = layer_db.get_geometry_types(workspace, publication) db_cols = [ col for col in layer_db.get_all_column_infos(workspace, publication) if col.name not in ['wkb_geometry', 'ogc_fid'] ] qml_geometry = qgis_util.get_qml_geometry_from_qml(qml_xml) source_type = qgis_util.get_source_type(db_types, qml_geometry) with app.app_context(): layer_qml_str = qgis_util.fill_layer_template(workspace, publication, layer_uuid, layer_bbox, layer_crs, qml_xml, source_type, db_cols) layer_qml = ET.fromstring(layer_qml_str.encode('utf-8'), parser=parser) if exp_min_scale is not None: assert layer_qml.attrib['minScale'] == exp_min_scale with app.app_context(): qgs_str = qgis_util.fill_project_template(workspace, publication, layer_uuid, layer_qml_str, layer_crs, settings.LAYMAN_OUTPUT_SRS_LIST, layer_bbox, source_type) with open(qgs_path, "w") as qgs_file: print(qgs_str, file=qgs_file) wmsi = WebMapService(wms_url, version=wms_version) assert publication in wmsi.contents wms_layer = wmsi.contents[publication] exp_output_srs = set(settings.LAYMAN_OUTPUT_SRS_LIST) assert exp_output_srs.issubset(set(wms_layer.crsOptions)) wms_layer_bbox = next((tuple(bbox_crs[:4]) for bbox_crs in wms_layer.crs_list if bbox_crs[4] == layer_crs)) assert_util.assert_same_bboxes(wms_layer_bbox, layer_bbox, 0.1) os.remove(qgs_path) with pytest.raises(requests.exceptions.HTTPError) as excinfo: WebMapService(wms_url, version=wms_version) assert excinfo.value.response.status_code == 500
def single_point_table(testuser1): file_path = 'sample/layman.layer/single_point.shp' username = testuser1 layername = 'single_point' with layman.app_context(): db.import_layer_vector_file(username, layername, file_path, None) yield username, layername with layman.app_context(): delete_layer(username, layername)
def test_abortable_task_chain(): task_names = [ 'layman.layer.filesystem.tasks.refresh_input_chunk', 'layman.layer.db.tasks.refresh_table', 'layman.layer.geoserver.tasks.refresh_wfs', ] tasks = [ getattr(importlib.import_module(taskname.rsplit('.', 1)[0]), taskname.rsplit('.', 1)[1]) for taskname in task_names ] check_crs = False task_options = { 'crs_id': 'EPSG:4326', 'description': 'bla', 'title': 'bla', 'ensure_user': True, 'check_crs': check_crs, } filenames = ['abc.geojson'] workspace = 'test_abort_user' layername = 'test_abort_layer2' with app.app_context(): input_chunk.save_layer_files_str(workspace, layername, filenames, check_crs) task_chain = chain(*[ tasks_util.get_task_signature(workspace, layername, t, task_options, 'layername') for t in tasks ]) task_result = task_chain() results = [task_result] prev_result = task_result while prev_result.parent is not None: prev_result = prev_result.parent results.insert(0, prev_result) assert len(results) == 3 results_copy = [ AbortableAsyncResult(task_result.task_id, backend=celery_app.backend) for task_result in results ] time.sleep(1) assert results[0].state == results_copy[0].state == 'STARTED' assert results[1].state == results_copy[1].state == 'PENDING' assert results[2].state == results_copy[2].state == 'PENDING' with app.app_context(): celery_util.abort_task_chain(results_copy) # first one is failure, because it throws AbortedException assert results[0].state == results_copy[0].state == 'FAILURE' # second one (and all others) was revoked, but it was not started at all because of previous failure, so it's pending for ever assert results[1].state == results_copy[1].state == 'ABORTED' assert results[2].state == results_copy[2].state == 'ABORTED' with app.app_context(): input_chunk.delete_layer(workspace, layername)
def empty_table(testuser1): file_path = 'sample/layman.layer/empty.shp' username = testuser1 layername = 'empty' with layman.app_context(): db.import_layer_vector_file(username, layername, file_path, None) yield username, layername with layman.app_context(): delete_layer(username, layername)
def test_custom_srs_list(ensure_layer): workspace = 'test_custom_srs_list_workspace' layer_sld1 = 'test_custom_srs_list_sld_layer1' layer_sld2 = 'test_custom_srs_list_sld_layer2' layer_qgis1 = 'test_custom_srs_list_qgis_layer1' layer_qgis2 = 'test_custom_srs_list_qgis_layer2' source_style_file_path = 'sample/style/small_layer.qml' output_crs_list = {f'EPSG:{srid}' for srid in OUTPUT_SRS_LIST} assert settings.LAYMAN_OUTPUT_SRS_LIST != output_crs_list process.ensure_layman_function(process.LAYMAN_DEFAULT_SETTINGS) ensure_layer(workspace, layer_sld1) ensure_layer(workspace, layer_qgis1, style_file=source_style_file_path) with app.app_context(): init_output_epsg_codes_set = { crs.replace(':', '::') for crs in settings.LAYMAN_OUTPUT_SRS_LIST } assert_gs_wms_output_srs_list(workspace, layer_sld1, settings.LAYMAN_OUTPUT_SRS_LIST) assert_wfs_output_srs_list(workspace, layer_sld1, init_output_epsg_codes_set) assert not qgis_wms.get_layer_info(workspace, layer_sld1) assert_gs_wms_output_srs_list(workspace, layer_qgis1, settings.LAYMAN_OUTPUT_SRS_LIST) assert_wfs_output_srs_list(workspace, layer_qgis1, init_output_epsg_codes_set) assert_qgis_output_srs_list(workspace, layer_qgis1, settings.LAYMAN_OUTPUT_SRS_LIST) assert_qgis_wms_output_srs_list(workspace, layer_qgis1, settings.LAYMAN_OUTPUT_SRS_LIST) process.ensure_layman_function({ 'LAYMAN_OUTPUT_SRS_LIST': ','.join([str(code) for code in OUTPUT_SRS_LIST]) }) ensure_layer(workspace, layer_sld2) ensure_layer(workspace, layer_qgis2, style_file=source_style_file_path) output_epsg_codes_set = {crs.replace(':', '::') for crs in output_crs_list} with app.app_context(): for layer in [ layer_sld1, layer_sld2, ]: assert_gs_wms_output_srs_list(workspace, layer, output_crs_list) assert_wfs_output_srs_list(workspace, layer, output_epsg_codes_set) assert not qgis_wms.get_layer_info(workspace, layer) for layer in [ layer_qgis1, layer_qgis2, ]: assert_gs_wms_output_srs_list(workspace, layer, output_crs_list) assert_wfs_output_srs_list(workspace, layer, output_epsg_codes_set) assert_qgis_output_srs_list(workspace, layer, output_crs_list) assert_qgis_wms_output_srs_list(workspace, layer, output_crs_list)
def single_point_table(): file_path = 'sample/layman.layer/single_point.shp' workspace = WORKSPACE layername = 'single_point' with layman.app_context(): ensure_layer_input_file_dir(workspace, layername) db.import_layer_vector_file(workspace, layername, file_path, None) yield workspace, layername with layman.app_context(): delete_layer(workspace, layername)
def boundary_table(): file_path = 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_boundary_lines_land.shp' workspace = WORKSPACE layername = 'hranice' with layman.app_context(): db.ensure_workspace(workspace) ensure_layer_input_file_dir(workspace, layername) db.import_layer_vector_file(workspace, layername, file_path, None) yield workspace, layername with layman.app_context(): delete_layer(workspace, layername)
def sm5building_table(): file_path = 'tmp/sm5/vektor/Budova.shp' workspace = WORKSPACE layername = 'sm5_building' with layman.app_context(): db.ensure_workspace(workspace) ensure_layer_input_file_dir(workspace, layername) db.import_layer_vector_file(workspace, layername, file_path, None) yield workspace, layername with layman.app_context(): delete_layer(workspace, layername)
def road_table(): file_path = 'sample/data/upper_attr.geojson' workspace = WORKSPACE layername = 'silnice' with layman.app_context(): db.ensure_workspace(workspace) ensure_layer_input_file_dir(workspace, layername) db.import_layer_vector_file(workspace, layername, file_path, None) yield workspace, layername with layman.app_context(): delete_layer(workspace, layername)
def data200road_table(): file_path = 'tmp/data200/trans/RoadL.shp' workspace = WORKSPACE layername = 'data200_road' with layman.app_context(): db.ensure_workspace(workspace) ensure_layer_input_file_dir(workspace, layername) db.import_layer_vector_file(workspace, layername, file_path, None) yield workspace, layername with layman.app_context(): delete_layer(workspace, layername)
def country10m_table(): file_path = 'tmp/naturalearth/10m/cultural/ne_10m_admin_0_countries.geojson' workspace = WORKSPACE layername = 'staty10m' with layman.app_context(): db.ensure_workspace(workspace) ensure_layer_input_file_dir(workspace, layername) db.import_layer_vector_file(workspace, layername, file_path, None) yield workspace, layername with layman.app_context(): delete_layer(workspace, layername)
def populated_places_table(): file_path = 'tmp/naturalearth/110m/cultural/ne_110m_populated_places.geojson' workspace = WORKSPACE layername = 'ne_110m_populated_places' with layman.app_context(): db.ensure_workspace(workspace) ensure_layer_input_file_dir(workspace, layername) db.import_layer_vector_file(workspace, layername, file_path, None) yield workspace, layername with layman.app_context(): delete_layer(workspace, layername)
def test_updated_at(publication_type): workspace = 'test_update_at_workspace' publication = 'test_update_at_publication' query = f''' select p.updated_at from {db_schema}.publications p inner join {db_schema}.workspaces w on p.id_workspace = w.id where w.name = %s and p.type = %s and p.name = %s ;''' timestamp1 = datetime.datetime.now(datetime.timezone.utc) process_client.publish_workspace_publication(publication_type, workspace, publication) timestamp2 = datetime.datetime.now(datetime.timezone.utc) with app.app_context(): results = db_util.run_query(query, (workspace, publication_type, publication)) assert len(results) == 1 and len(results[0]) == 1, results updated_at_db = results[0][0] assert timestamp1 < updated_at_db < timestamp2 info = process_client.get_workspace_publication(publication_type, workspace, publication) updated_at_rest_str = info['updated_at'] updated_at_rest = parse(updated_at_rest_str) assert timestamp1 < updated_at_rest < timestamp2 timestamp3 = datetime.datetime.now(datetime.timezone.utc) process_client.patch_workspace_publication(publication_type, workspace, publication, title='Title') timestamp4 = datetime.datetime.now(datetime.timezone.utc) with app.app_context(): results = db_util.run_query(query, (workspace, publication_type, publication)) assert len(results) == 1 and len(results[0]) == 1, results updated_at_db = results[0][0] assert timestamp3 < updated_at_db < timestamp4 info = process_client.get_workspace_publication(publication_type, workspace, publication) updated_at_rest_str = info['updated_at'] updated_at_rest = parse(updated_at_rest_str) assert timestamp3 < updated_at_rest < timestamp4 process_client.delete_workspace_publication(publication_type, workspace, publication)
def empty_table(): file_path = 'sample/layman.layer/empty.shp' workspace = WORKSPACE layername = 'empty' with layman.app_context(): db.ensure_workspace(workspace) ensure_layer_input_file_dir(workspace, layername) with layman.app_context(): db.import_layer_vector_file(workspace, layername, file_path, None) yield workspace, layername with layman.app_context(): delete_layer(workspace, layername)
def preserve_data_version_table(): copy_table = f'''create table {DB_SCHEMA}.data_version_backup as table {DB_SCHEMA}.data_version;''' with app.app_context(): db_util.run_statement(copy_table) yield copy_table_back = f''' DROP TABLE IF EXISTS {DB_SCHEMA}.data_version; create table {DB_SCHEMA}.data_version as table {DB_SCHEMA}.data_version_backup; DROP TABLE IF EXISTS {DB_SCHEMA}.data_version_backup; ''' with app.app_context(): db_util.run_statement(copy_table_back)