def test_get_publication_layman_status(publ_type, error_params): workspace = 'test_get_publication_layman_status_workspace' publication = 'test_get_publication_layman_status_publication' process_client.publish_workspace_publication(publ_type, workspace, publication, check_response_fn=common.empty_method_returns_true,) info = process_client.get_workspace_publication(publ_type, workspace, publication,) assert 'layman_metadata' in info, f'info={info}' assert 'publication_status' in info['layman_metadata'], f'info={info}' assert info['layman_metadata']['publication_status'] == 'UPDATING', f'info={info}' process_client.wait_for_publication_status(workspace, publ_type, publication) info = process_client.get_workspace_publication(publ_type, workspace, publication, ) assert 'layman_metadata' in info, f'info={info}' assert 'publication_status' in info['layman_metadata'], f'info={info}' assert info['layman_metadata']['publication_status'] == 'COMPLETE', f'info={info}' if error_params: process_client.patch_workspace_publication(publ_type, workspace, publication, **error_params, ) info = process_client.get_workspace_publication(publ_type, workspace, publication, ) assert 'layman_metadata' in info, f'info={info}' assert 'publication_status' in info['layman_metadata'], f'info={info}' assert info['layman_metadata']['publication_status'] == 'INCOMPLETE', f'info={info}' process_client.delete_workspace_publication(publ_type, workspace, publication)
def test_delete_publications(self, publ_type): def check_delete(headers, after_delete_publications, remaining_publications): delete_json = process_client.delete_workspace_publications( publ_type, owner, headers=headers) publication_set = { publication['name'] for publication in delete_json } assert after_delete_publications == publication_set get_json = process_client.get_workspace_publications( publ_type, workspace=owner, headers=authn_headers_owner) publication_set = {publication['name'] for publication in get_json} assert remaining_publications == publication_set owner = self.owner authn_headers_owner = self.authn_headers_owner authn_headers_deleter = self.authn_headers_deleter publication_a = 'test_delete_publications_publication_a' publication_b = 'test_delete_publications_publication_b' publications = [ (publication_a, { 'read': 'EVERYONE', 'write': owner }), (publication_b, { 'read': 'EVERYONE', 'write': 'EVERYONE' }), ] for (name, access_rights) in publications: process_client.publish_workspace_publication( publ_type, owner, name, access_rights=access_rights, headers=authn_headers_owner) response = process_client.get_workspace_publications( publ_type, workspace=owner, headers=authn_headers_owner) assert len(response) == len(publications) # Delete by other user with rights only for one layer check_delete(authn_headers_deleter, { publication_b, }, { publication_a, }) # Delete by owner, everything is deleted check_delete(authn_headers_owner, { publication_a, }, set())
def post_publication(cls, publication, params=None, scope='function'): params = params or {} assert scope in {'function', 'class'} if scope == 'class': cls.publications_to_cleanup_on_class_end.add(publication) else: cls.publications_to_cleanup_on_function_end.add(publication) process_client.publish_workspace_publication(publication.type, publication.workspace, publication.name, **params)
def test_updated_at(publication_type): workspace = 'test_update_at_workspace' publication = 'test_update_at_publication' query = f''' select p.updated_at from {db_schema}.publications p inner join {db_schema}.workspaces w on p.id_workspace = w.id where w.name = %s and p.type = %s and p.name = %s ;''' timestamp1 = datetime.datetime.now(datetime.timezone.utc) process_client.publish_workspace_publication(publication_type, workspace, publication) timestamp2 = datetime.datetime.now(datetime.timezone.utc) with app.app_context(): results = db_util.run_query(query, (workspace, publication_type, publication)) assert len(results) == 1 and len(results[0]) == 1, results updated_at_db = results[0][0] assert timestamp1 < updated_at_db < timestamp2 info = process_client.get_workspace_publication(publication_type, workspace, publication) updated_at_rest_str = info['updated_at'] updated_at_rest = parse(updated_at_rest_str) assert timestamp1 < updated_at_rest < timestamp2 timestamp3 = datetime.datetime.now(datetime.timezone.utc) process_client.patch_workspace_publication(publication_type, workspace, publication, title='Title') timestamp4 = datetime.datetime.now(datetime.timezone.utc) with app.app_context(): results = db_util.run_query(query, (workspace, publication_type, publication)) assert len(results) == 1 and len(results[0]) == 1, results updated_at_db = results[0][0] assert timestamp3 < updated_at_db < timestamp4 info = process_client.get_workspace_publication(publication_type, workspace, publication) updated_at_rest_str = info['updated_at'] updated_at_rest = parse(updated_at_rest_str) assert timestamp3 < updated_at_rest < timestamp4 process_client.delete_workspace_publication(publication_type, workspace, publication)
def test_wrong_post(publ_type): def check_response(exception): assert exception.value.http_code == 400 assert exception.value.code == 43 assert exception.value.message == 'Wrong access rights.' workspace = 'test_wrong_post_workspace' publication = 'test_wrong_post_publication' with pytest.raises(LaymanError) as exc_info: process_client.publish_workspace_publication(publ_type, workspace, publication, access_rights={'read': 'EVRBODY'}, ) check_response(exc_info) with pytest.raises(LaymanError) as exc_info: process_client.publish_workspace_publication(publ_type, workspace, publication, access_rights={'write': 'EVRBODY'}, ) check_response(exc_info) with pytest.raises(LaymanError) as exc_info: process_client.publish_workspace_publication(publ_type, workspace, publication, access_rights={'read': 'EVRBODY', 'write': 'EVRBODY'}, ) check_response(exc_info) process_client.publish_workspace_publication(publ_type, workspace, publication) with pytest.raises(LaymanError) as exc_info: process_client.patch_workspace_publication(publ_type, workspace, publication, access_rights={'read': 'EVRBODY'}, ) check_response(exc_info) with pytest.raises(LaymanError) as exc_info: process_client.patch_workspace_publication(publ_type, workspace, publication, access_rights={'write': 'EVRBODY'}, ) check_response(exc_info) with pytest.raises(LaymanError) as exc_info: process_client.patch_workspace_publication(publ_type, workspace, publication, access_rights={'read': 'EVRBODY', 'write': 'EVRBODY'}, ) check_response(exc_info) process_client.patch_workspace_publication(publ_type, workspace, publication) process_client.delete_workspace_publication(publ_type, workspace, publication)
def provide_data(self): for publication_type in process_client.PUBLICATION_TYPES: process_client.publish_workspace_publication(publication_type, self.workspace, self.publication, **self.common_params,) yield for publication_type in process_client.PUBLICATION_TYPES: process_client.delete_workspace_publication(publication_type, self.workspace, self.publication, )
def test_patch_after_feature_change_concurrency(publication_type): workspace = 'test_wfst_concurrency_workspace' publication = 'test_wfst_concurrency_layer' process_client.publish_workspace_publication( publication_type, workspace, publication, ) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert not queue lock = redis.get_publication_lock(workspace, publication_type, publication) assert not lock process_client.patch_after_feature_change(workspace, publication_type, publication) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert len(queue) == 0, queue lock = redis.get_publication_lock(workspace, publication_type, publication) assert lock == common_const.PUBLICATION_LOCK_FEATURE_CHANGE process_client.patch_workspace_publication( publication_type, workspace, publication, title='New title', check_response_fn=empty_method_returns_true) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert len(queue) == 0, queue lock = redis.get_publication_lock(workspace, publication_type, publication) assert lock == common_const.PUBLICATION_LOCK_FEATURE_CHANGE process_client.patch_after_feature_change(workspace, publication_type, publication) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert len(queue) == 1, queue assert queue == [ 'layman.util::patch_after_feature_change', ] lock = redis.get_publication_lock(workspace, publication_type, publication) assert lock == common_const.PUBLICATION_LOCK_FEATURE_CHANGE process_client.patch_after_feature_change(workspace, publication_type, publication) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert len(queue) == 1, queue assert queue == [ 'layman.util::patch_after_feature_change', ] lock = redis.get_publication_lock(workspace, publication_type, publication) assert lock == common_const.PUBLICATION_LOCK_FEATURE_CHANGE process_client.wait_for_publication_status(workspace, publication_type, publication) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert not queue, queue lock = redis.get_publication_lock(workspace, publication_type, publication) assert not lock process_client.delete_workspace_publication( publication_type, workspace, publication, ) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert not queue, queue lock = redis.get_publication_lock(workspace, publication_type, publication) assert not lock
def test_bbox_crop(): def assert_out_of_the_box_publications(expected_count): query = f'''select count(*) from {DB_SCHEMA}.publications p where st_xMin(p.bbox) < -20026376.39 or st_yMin(p.bbox) < -20048966.10 or st_xMax(p.bbox) > 20026376.39 or st_yMax(p.bbox) > 20048966.10 ;''' with app.app_context(): cnt = db_util.run_query(query) assert cnt[0][0] == expected_count, cnt main_workspace = 'test_bbox_crop_workspace' publications = [ ( process_client.LAYER_TYPE, main_workspace, 'test_bbox_crop_layer', { 'file_paths': [ 'sample/layman.layer/small_layer.cpg', 'sample/layman.layer/small_layer.dbf', 'sample/layman.layer/small_layer.prj', 'sample/layman.layer/small_layer.shp', 'sample/layman.layer/small_layer.shx', ], }, ), ( process_client.LAYER_TYPE, main_workspace, 'test_bbox_crop_qml_layer', { 'file_paths': [ 'sample/layman.layer/small_layer.cpg', 'sample/layman.layer/small_layer.dbf', 'sample/layman.layer/small_layer.prj', 'sample/layman.layer/small_layer.shp', 'sample/layman.layer/small_layer.shx', ], 'style_file': 'sample/style/small_layer.qml' }, ), ( process_client.MAP_TYPE, main_workspace, 'test_bbox_crop_map', dict(), ), ] for publication_type, workspace, publication, params in publications: process_client.publish_workspace_publication(publication_type, workspace, publication, **params) big_bbox = ( -20026376.39 - 1, -20048966.10 - 1, 20026376.39 + 1, 20048966.10 + 1, ) query = f'''update {DB_SCHEMA}.publications set bbox = ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s ,%s)) where type = %s and name = %s and id_workspace = (select w.id from {DB_SCHEMA}.workspaces w where w.name = %s);''' for publication_type, workspace, publication, _ in publications: params = big_bbox + ( publication_type, publication, workspace, ) with app.app_context(): db_util.run_statement(query, params) assert_out_of_the_box_publications(len(publications)) with app.app_context(): upgrade_v1_14.crop_bbox() assert_out_of_the_box_publications(0) for publication_type, workspace, publication, _ in publications: if publication_type == process_client.LAYER_TYPE: assert_util.assert_all_sources_bbox(workspace, publication, ( -20026376.39, -20048966.10, 20026376.39, 20048966.10, )) for publication_type, workspace, publication, _ in publications: process_client.delete_workspace_publication(publication_type, workspace, publication)