def test_get_publication_layman_status(publ_type, error_params): workspace = 'test_get_publication_layman_status_workspace' publication = 'test_get_publication_layman_status_publication' process_client.publish_workspace_publication(publ_type, workspace, publication, check_response_fn=common.empty_method_returns_true,) info = process_client.get_workspace_publication(publ_type, workspace, publication,) assert 'layman_metadata' in info, f'info={info}' assert 'publication_status' in info['layman_metadata'], f'info={info}' assert info['layman_metadata']['publication_status'] == 'UPDATING', f'info={info}' process_client.wait_for_publication_status(workspace, publ_type, publication) info = process_client.get_workspace_publication(publ_type, workspace, publication, ) assert 'layman_metadata' in info, f'info={info}' assert 'publication_status' in info['layman_metadata'], f'info={info}' assert info['layman_metadata']['publication_status'] == 'COMPLETE', f'info={info}' if error_params: process_client.patch_workspace_publication(publ_type, workspace, publication, **error_params, ) info = process_client.get_workspace_publication(publ_type, workspace, publication, ) assert 'layman_metadata' in info, f'info={info}' assert 'publication_status' in info['layman_metadata'], f'info={info}' assert info['layman_metadata']['publication_status'] == 'INCOMPLETE', f'info={info}' process_client.delete_workspace_publication(publ_type, workspace, publication)
def cleanup_publications(request, publications): if request.session.testsfailed == 0 and not request.config.option.nocleanup: for publication in publications: if util.get_publication_exists(publication): headers = util.get_publication_header(publication) process_client.delete_workspace_publication( publication.type, publication.workspace, publication.name, headers=headers)
def test_updated_at(publication_type): workspace = 'test_update_at_workspace' publication = 'test_update_at_publication' query = f''' select p.updated_at from {db_schema}.publications p inner join {db_schema}.workspaces w on p.id_workspace = w.id where w.name = %s and p.type = %s and p.name = %s ;''' timestamp1 = datetime.datetime.now(datetime.timezone.utc) process_client.publish_workspace_publication(publication_type, workspace, publication) timestamp2 = datetime.datetime.now(datetime.timezone.utc) with app.app_context(): results = db_util.run_query(query, (workspace, publication_type, publication)) assert len(results) == 1 and len(results[0]) == 1, results updated_at_db = results[0][0] assert timestamp1 < updated_at_db < timestamp2 info = process_client.get_workspace_publication(publication_type, workspace, publication) updated_at_rest_str = info['updated_at'] updated_at_rest = parse(updated_at_rest_str) assert timestamp1 < updated_at_rest < timestamp2 timestamp3 = datetime.datetime.now(datetime.timezone.utc) process_client.patch_workspace_publication(publication_type, workspace, publication, title='Title') timestamp4 = datetime.datetime.now(datetime.timezone.utc) with app.app_context(): results = db_util.run_query(query, (workspace, publication_type, publication)) assert len(results) == 1 and len(results[0]) == 1, results updated_at_db = results[0][0] assert timestamp3 < updated_at_db < timestamp4 info = process_client.get_workspace_publication(publication_type, workspace, publication) updated_at_rest_str = info['updated_at'] updated_at_rest = parse(updated_at_rest_str) assert timestamp3 < updated_at_rest < timestamp4 process_client.delete_workspace_publication(publication_type, workspace, publication)
def ensure_test_data(liferay_mock, request): # pylint: disable=unused-argument yield if request.node.testsfailed == 0 and not request.config.option.nocleanup: process.ensure_layman_function(process.LAYMAN_DEFAULT_SETTINGS) for workspace, publ_type, publication in data.PUBLICATIONS: if test_util.get_publication_exists( Publication(workspace, publ_type, publication)): headers = data.HEADERS.get( data.PUBLICATIONS[(workspace, publ_type, publication)][data.TEST_DATA].get( 'users_can_write', [None])[0]) process_client.delete_workspace_publication(publ_type, workspace, publication, headers=headers) assert_publication_after_delete(workspace, publ_type, publication)
def test_wrong_post(publ_type): def check_response(exception): assert exception.value.http_code == 400 assert exception.value.code == 43 assert exception.value.message == 'Wrong access rights.' workspace = 'test_wrong_post_workspace' publication = 'test_wrong_post_publication' with pytest.raises(LaymanError) as exc_info: process_client.publish_workspace_publication(publ_type, workspace, publication, access_rights={'read': 'EVRBODY'}, ) check_response(exc_info) with pytest.raises(LaymanError) as exc_info: process_client.publish_workspace_publication(publ_type, workspace, publication, access_rights={'write': 'EVRBODY'}, ) check_response(exc_info) with pytest.raises(LaymanError) as exc_info: process_client.publish_workspace_publication(publ_type, workspace, publication, access_rights={'read': 'EVRBODY', 'write': 'EVRBODY'}, ) check_response(exc_info) process_client.publish_workspace_publication(publ_type, workspace, publication) with pytest.raises(LaymanError) as exc_info: process_client.patch_workspace_publication(publ_type, workspace, publication, access_rights={'read': 'EVRBODY'}, ) check_response(exc_info) with pytest.raises(LaymanError) as exc_info: process_client.patch_workspace_publication(publ_type, workspace, publication, access_rights={'write': 'EVRBODY'}, ) check_response(exc_info) with pytest.raises(LaymanError) as exc_info: process_client.patch_workspace_publication(publ_type, workspace, publication, access_rights={'read': 'EVRBODY', 'write': 'EVRBODY'}, ) check_response(exc_info) process_client.patch_workspace_publication(publ_type, workspace, publication) process_client.delete_workspace_publication(publ_type, workspace, publication)
def provide_data(self): for publication_type in process_client.PUBLICATION_TYPES: process_client.publish_workspace_publication(publication_type, self.workspace, self.publication, **self.common_params,) yield for publication_type in process_client.PUBLICATION_TYPES: process_client.delete_workspace_publication(publication_type, self.workspace, self.publication, )
def clear_data(self): yield process_client.delete_workspace_publication(self.publication_type, self.username, self.publication_name, headers=self.authz_headers)
def test_patch_after_feature_change_concurrency(publication_type): workspace = 'test_wfst_concurrency_workspace' publication = 'test_wfst_concurrency_layer' process_client.publish_workspace_publication( publication_type, workspace, publication, ) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert not queue lock = redis.get_publication_lock(workspace, publication_type, publication) assert not lock process_client.patch_after_feature_change(workspace, publication_type, publication) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert len(queue) == 0, queue lock = redis.get_publication_lock(workspace, publication_type, publication) assert lock == common_const.PUBLICATION_LOCK_FEATURE_CHANGE process_client.patch_workspace_publication( publication_type, workspace, publication, title='New title', check_response_fn=empty_method_returns_true) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert len(queue) == 0, queue lock = redis.get_publication_lock(workspace, publication_type, publication) assert lock == common_const.PUBLICATION_LOCK_FEATURE_CHANGE process_client.patch_after_feature_change(workspace, publication_type, publication) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert len(queue) == 1, queue assert queue == [ 'layman.util::patch_after_feature_change', ] lock = redis.get_publication_lock(workspace, publication_type, publication) assert lock == common_const.PUBLICATION_LOCK_FEATURE_CHANGE process_client.patch_after_feature_change(workspace, publication_type, publication) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert len(queue) == 1, queue assert queue == [ 'layman.util::patch_after_feature_change', ] lock = redis.get_publication_lock(workspace, publication_type, publication) assert lock == common_const.PUBLICATION_LOCK_FEATURE_CHANGE process_client.wait_for_publication_status(workspace, publication_type, publication) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert not queue, queue lock = redis.get_publication_lock(workspace, publication_type, publication) assert not lock process_client.delete_workspace_publication( publication_type, workspace, publication, ) queue = celery.get_run_after_chain_queue(workspace, publication_type, publication) assert not queue, queue lock = redis.get_publication_lock(workspace, publication_type, publication) assert not lock
def test_bbox_crop(): def assert_out_of_the_box_publications(expected_count): query = f'''select count(*) from {DB_SCHEMA}.publications p where st_xMin(p.bbox) < -20026376.39 or st_yMin(p.bbox) < -20048966.10 or st_xMax(p.bbox) > 20026376.39 or st_yMax(p.bbox) > 20048966.10 ;''' with app.app_context(): cnt = db_util.run_query(query) assert cnt[0][0] == expected_count, cnt main_workspace = 'test_bbox_crop_workspace' publications = [ ( process_client.LAYER_TYPE, main_workspace, 'test_bbox_crop_layer', { 'file_paths': [ 'sample/layman.layer/small_layer.cpg', 'sample/layman.layer/small_layer.dbf', 'sample/layman.layer/small_layer.prj', 'sample/layman.layer/small_layer.shp', 'sample/layman.layer/small_layer.shx', ], }, ), ( process_client.LAYER_TYPE, main_workspace, 'test_bbox_crop_qml_layer', { 'file_paths': [ 'sample/layman.layer/small_layer.cpg', 'sample/layman.layer/small_layer.dbf', 'sample/layman.layer/small_layer.prj', 'sample/layman.layer/small_layer.shp', 'sample/layman.layer/small_layer.shx', ], 'style_file': 'sample/style/small_layer.qml' }, ), ( process_client.MAP_TYPE, main_workspace, 'test_bbox_crop_map', dict(), ), ] for publication_type, workspace, publication, params in publications: process_client.publish_workspace_publication(publication_type, workspace, publication, **params) big_bbox = ( -20026376.39 - 1, -20048966.10 - 1, 20026376.39 + 1, 20048966.10 + 1, ) query = f'''update {DB_SCHEMA}.publications set bbox = ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s ,%s)) where type = %s and name = %s and id_workspace = (select w.id from {DB_SCHEMA}.workspaces w where w.name = %s);''' for publication_type, workspace, publication, _ in publications: params = big_bbox + ( publication_type, publication, workspace, ) with app.app_context(): db_util.run_statement(query, params) assert_out_of_the_box_publications(len(publications)) with app.app_context(): upgrade_v1_14.crop_bbox() assert_out_of_the_box_publications(0) for publication_type, workspace, publication, _ in publications: if publication_type == process_client.LAYER_TYPE: assert_util.assert_all_sources_bbox(workspace, publication, ( -20026376.39, -20048966.10, 20026376.39, 20048966.10, )) for publication_type, workspace, publication, _ in publications: process_client.delete_workspace_publication(publication_type, workspace, publication)