def test_wfs_proxy(): username = '******' layername1 = 'ne_countries' username2 = 'testproxy2' authn_headers1 = get_authz_headers(username) process_client.reserve_username(username, headers=authn_headers1) process_client.publish_workspace_layer(username, layername1, headers=authn_headers1) data_xml = data_wfs.get_wfs20_insert_points(username, layername1) process_client.post_wfst(data_xml, headers=authn_headers1, workspace=username) # Testing, that user1 is able to write his own layer through general WFS endpoint process_client.post_wfst(data_xml, headers=authn_headers1) # Testing, that user2 is not able to write to layer of user1 authn_headers2 = get_authz_headers(username2) process_client.reserve_username(username2, headers=authn_headers2) with pytest.raises(GS_Error) as exc: process_client.post_wfst(data_xml, headers=authn_headers2, workspace=username) assert exc.value.data['status_code'] == 400 # Testing, that user2 is not able to write user1's layer through general WFS endpoint with pytest.raises(GS_Error) as exc: process_client.post_wfst(data_xml, headers=authn_headers2) assert exc.value.data['status_code'] == 400 # Test anonymous with pytest.raises(GS_Error) as exc: process_client.post_wfst(data_xml, workspace=username) assert exc.value.data['status_code'] == 400 # Test fraud header headers_fraud = { settings.LAYMAN_GS_AUTHN_HTTP_HEADER_ATTRIBUTE: username, } with pytest.raises(GS_Error) as exc: process_client.post_wfst(data_xml, headers=headers_fraud) assert exc.value.data['status_code'] == 400 process_client.delete_workspace_layer(username, layername1, headers=authn_headers1)
def test_access_rights(access_rights_and_expected_list, use_file): owner_authn_headers = client_util.get_authz_headers(USERNAME) other_authn_headers = client_util.get_authz_headers(USERNAME2) post_method = client_util.publish_workspace_layer patch_method = client_util.patch_workspace_layer full_access_rights = { 'read': access_rights_and_expected_list[0]['read'], 'write': access_rights_and_expected_list[0]['write'], } roles_to_test = full_access_rights.copy() for idx, access_rights_and_expected in enumerate( access_rights_and_expected_list): write_method = patch_method if idx > 0 else post_method access_rights = {} for right_type in ['read', 'write']: if access_rights_and_expected.get(right_type): roles_to_test[right_type] = access_rights_and_expected[ right_type] access_rights[right_type] = access_rights_and_expected[ right_type] write_method( USERNAME, LAYERNAME, access_rights={ key: ','.join(value) for key, value in access_rights.items() }, headers=owner_authn_headers, file_paths=[ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson' ] if use_file else None) client_util.assert_workspace_layers(USERNAME, [LAYERNAME], owner_authn_headers) assert_gs_user_and_roles(USERNAME) assert_gs_layer_data_security(USERNAME, LAYERNAME, roles_to_test) assert_layman_layer_access_rights(USERNAME, LAYERNAME, roles_to_test) assert_wms_access(USERNAME, owner_authn_headers, [LAYERNAME]) assert_wms_access( USERNAME, other_authn_headers, access_rights_and_expected['expected_other_user_layers']) assert_wms_access( USERNAME, None, access_rights_and_expected['expected_anonymous_layers']) client_util.delete_workspace_layer(USERNAME, LAYERNAME, headers=owner_authn_headers)
def test_wms_ows_proxy(service_endpoint): username = '******' layername = 'test_wms_ows_proxy_layer' authn_headers = get_authz_headers(username) process_client.ensure_reserved_username(username, headers=authn_headers) process_client.publish_workspace_layer(username, layername, headers=authn_headers) wms_url = geoserver_client.get_wms_url(username, service_endpoint) layer_info = process_client.get_workspace_layer(username, layername, headers=authn_headers) tn_bbox = get_square_bbox(layer_info['bounding_box']) from layman.layer.geoserver.wms import VERSION response = get_layer_thumbnail(wms_url, layername, tn_bbox, headers=authn_headers, wms_version=VERSION) response.raise_for_status() assert 'image' in response.headers['content-type'] process_client.delete_workspace_layer(username, layername, headers=authn_headers)
def test_get_users_workspaces(): public_workspace = 'test_get_users_workspaces_workspace' user = '******' publication = 'test_get_users_workspaces_publication' authz_headers = process_client.get_authz_headers(user) process_client.ensure_reserved_username(user, authz_headers) for publication_type in process_client.PUBLICATION_TYPES: process_client.publish_workspace_publication(publication_type, public_workspace, publication) all_sources = [] for type_def in util.get_publication_types(use_cache=False).values(): all_sources += type_def['internal_sources'] providers = util.get_providers_from_source_names(all_sources) for provider in providers: with app.app_context(): usernames = provider.get_usernames() assert public_workspace not in usernames, (publication_type, provider) with app.app_context(): usernames = util.get_usernames(use_cache=False) workspaces = util.get_workspaces(use_cache=False) assert user in usernames assert public_workspace not in usernames assert user in workspaces assert public_workspace in workspaces process_client.delete_workspace_publication(publication_type, public_workspace, publication)
def prep_test_get_publication_info(): auth_header_owner = process_client.get_authz_headers(USER_OWNER) auth_header_without = process_client.get_authz_headers(USER_WITHOUT_RIGHTS) process_client.ensure_reserved_username(USER_OWNER, headers=auth_header_owner) process_client.ensure_reserved_username(USER_WITHOUT_RIGHTS, headers=auth_header_without) access_rights = {'read': USER_OWNER, 'write': USER_OWNER} process_client.publish_workspace_layer(USER_OWNER, LAYER, headers=auth_header_owner, access_rights=access_rights) process_client.publish_workspace_map(USER_OWNER, MAP, headers=auth_header_owner, access_rights=access_rights) yield process_client.delete_workspace_map(USER_OWNER, MAP, headers=auth_header_owner) process_client.delete_workspace_layer(USER_OWNER, LAYER, headers=auth_header_owner)
def test_check_user_wms(): user = '******' + settings.LAYMAN_GS_WMS_WORKSPACE_POSTFIX auth_headers = process_client.get_authz_headers(user) with pytest.raises(LaymanError) as exc_info: process_client.reserve_username(user, headers=auth_headers) assert exc_info.value.http_code == 400 assert exc_info.value.code == 45 assert exc_info.value.data['workspace_name'] == user
def ensure_user(): # needs liferay_mock and ensure_layman fixtures for tmp_username in [USERNAME, USERNAME2]: authn_headers1 = client_util.get_authz_headers(tmp_username) client_util.ensure_reserved_username(tmp_username, headers=authn_headers1) assert_gs_user_and_roles(tmp_username)
def test_geoserver_remove_users_for_public_workspaces(): workspace = 'test_geoserver_remove_users_for_public_workspaces_workspace' user = '******' auth_headers = process_client.get_authz_headers(user) layer = 'test_geoserver_remove_users_for_public_workspaces_layer' gs_rolename = gs_util.username_to_rolename(workspace) gs_rolename2 = gs_util.username_to_rolename(user) process_client.publish_workspace_layer(workspace, layer) process_client.ensure_reserved_username(user, auth_headers) with app.app_context(): gs_provider.ensure_whole_user(workspace, auth) usernames = gs_util.get_usernames(auth) assert workspace in usernames assert user in usernames roles = gs_util.get_roles(auth) assert gs_rolename in roles assert gs_rolename2 in roles workspaces = gs_util.get_all_workspaces(auth) assert workspace in workspaces assert user in workspaces upgrade_v1_9.geoserver_remove_users_for_public_workspaces() usernames = gs_util.get_usernames(auth) assert workspace not in usernames, usernames assert user in usernames roles = gs_util.get_roles(auth) assert gs_rolename not in roles, roles assert gs_rolename2 in roles workspaces = gs_util.get_all_workspaces(auth) assert workspace in workspaces, workspaces assert user in workspaces process_client.delete_workspace_layer(workspace, layer) process_client.publish_workspace_layer(workspace, layer) process_client.delete_workspace_layer(workspace, layer) process_client.publish_workspace_layer(workspace, layer + '2') process_client.delete_workspace_layer(workspace, layer + '2')
def test_map_with_unauthorized_layer(): username1 = 'test_map_with_unauthorized_layer_user1' layername1 = 'test_map_with_unauthorized_layer_layer1' mapname1 = 'test_map_with_unauthorized_layer_map1' username2 = 'test_map_with_unauthorized_layer_user2' layername2 = 'test_map_with_unauthorized_layer_layer2' user1_authz_headers = process_client.get_authz_headers(username1) user2_authz_headers = process_client.get_authz_headers(username2) process_client.reserve_username(username1, headers=user1_authz_headers) process_client.reserve_username(username2, headers=user2_authz_headers) process_client.publish_workspace_layer(username1, layername1, headers=user1_authz_headers) process_client.publish_workspace_layer(username2, layername2, headers=user2_authz_headers) # assert users have access only to their own layer process_client.assert_workspace_layers(username1, [layername1], headers=user1_authz_headers) process_client.assert_workspace_layers(username1, [], headers=user2_authz_headers) process_client.assert_workspace_layers(username2, [layername2], headers=user2_authz_headers) process_client.assert_workspace_layers(username2, [], headers=user1_authz_headers) # publish map composed of layers of both users, read for everyone process_client.publish_workspace_map( username1, mapname1, file_paths=['sample/layman.map/internal_url_unauthorized_layer.json'], access_rights={ 'read': 'EVERYONE', 'write': f"{username1},{username2}", }, headers=user1_authz_headers, ) process_client.assert_workspace_maps(username1, [mapname1], headers=user1_authz_headers) process_client.assert_workspace_maps(username1, [mapname1], headers=user2_authz_headers) layer1_uuid = process_client.get_workspace_layer( username1, layername1, headers=user1_authz_headers)['uuid'] layer2_uuid = process_client.get_workspace_layer( username2, layername2, headers=user2_authz_headers)['uuid'] # assert that metadata property operates_on contains only layers visible to publisher, whoever is asking and has read access to the map assert_operates_on(username1, mapname1, [(layer1_uuid, layername1)], authz_headers=user1_authz_headers) assert_operates_on(username1, mapname1, [(layer1_uuid, layername1)], authz_headers=user2_authz_headers) process_client.patch_workspace_map(username1, mapname1, headers=user2_authz_headers) # assert that metadata property operates_on contains only layers visible to last publisher, whoever is asking and has read access to the map assert_operates_on(username1, mapname1, [(layer2_uuid, layername2)], authz_headers=user1_authz_headers) assert_operates_on(username1, mapname1, [(layer2_uuid, layername2)], authz_headers=user2_authz_headers) # clean up process_client.delete_workspace_map(username1, mapname1, headers=user1_authz_headers) process_client.delete_workspace_layer(username1, layername1, headers=user1_authz_headers) process_client.delete_workspace_layer(username2, layername2, headers=user2_authz_headers)
class TestGetPublicationInfosClass: layer_both = 'test_get_publication_infos_layer_both' layer_read = 'test_get_publication_infos_layer_read' layer_none = 'test_get_publication_infos_layer_none' owner = 'test_get_publication_infos_user_owner' actor = 'test_get_publication_infos_user_actor' authz_headers_owner = process_client.get_authz_headers(owner) authz_headers_actor = process_client.get_authz_headers(actor) @pytest.fixture(scope="class") def provide_publications(self): username = self.owner authz_headers = self.authz_headers_owner layer_both = self.layer_both layer_read = self.layer_read layer_none = self.layer_none process_client.ensure_reserved_username(username, headers=authz_headers) process_client.publish_workspace_layer(username, layer_both, headers=authz_headers, access_rights={ 'read': 'EVERYONE', 'write': 'EVERYONE' }) process_client.publish_workspace_layer(username, layer_read, headers=authz_headers, access_rights={ 'read': 'EVERYONE', 'write': username }) process_client.publish_workspace_layer(username, layer_none, headers=authz_headers, access_rights={ 'read': username, 'write': username }) yield process_client.delete_workspace_layer(username, layer_both, headers=authz_headers) process_client.delete_workspace_layer(username, layer_read, headers=authz_headers) process_client.delete_workspace_layer(username, layer_none, headers=authz_headers) @pytest.mark.parametrize( 'publ_type, context, expected_publications', [ ( LAYER_TYPE, { 'actor_name': actor, 'access_type': 'read' }, {layer_both, layer_read}, ), ( LAYER_TYPE, { 'actor_name': actor, 'access_type': 'write' }, {layer_both}, ), ], ) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'provide_publications') def test_get_publication_infos(self, publ_type, context, expected_publications): with app.app_context(): infos = util.get_publication_infos(self.owner, publ_type, context) publ_set = set(publication_name for (workspace, publication_type, publication_name) in infos.keys()) assert publ_set == expected_publications, publ_set
class TestGetPublications: workspace1 = 'test_get_publications_workspace1' workspace2 = 'test_get_publications_workspace2' authn_headers_user2 = process_client.get_authz_headers(workspace2) publication_1e_2_4x6_6 = 'test_get_publications_publication1e_2_4x6_6' publication_1e_3_3x3_3 = 'test_get_publications_publication1e_3_3x3_3' publication_1e_3_7x5_9 = 'test_get_publications_publication1e_3_7x5_9' publication_2e_3_3x5_5 = 'test_get_publications_publication2e_3_3x5_5' publication_2o_2_2x4_4 = 'test_get_publications_publication2o_2_2x4_4' publications = [ (workspace1, publication_1e_2_4x6_6, { 'title': 'Příliš jiný žluťoučký kůň úpěl ďábelské ódy (publication)', 'bbox': (2000, 4000, 6000, 6000), }), (workspace1, publication_1e_3_3x3_3, { 'title': 'Jednobodová publikace (publication)', 'bbox': (3000, 3000, 3000, 3000), }), (workspace1, publication_1e_3_7x5_9, { 'title': 'Public publication in public workspace (publication)', 'bbox': (3000, 7000, 5000, 9000), }), ( workspace2, publication_2e_3_3x5_5, { 'title': '\'Too yellow horse\' means "Příliš žluťoučký kůň". (publication)', 'bbox': (3000, 3000, 5000, 5000), 'access_rights': { 'read': {settings.RIGHTS_EVERYONE_ROLE}, 'write': {settings.RIGHTS_EVERYONE_ROLE} }, 'actor': workspace2, }, ), ( workspace2, publication_2o_2_2x4_4, { 'title': 'Příliš jiný žluťoučký kůň úpěl ďábelské ódy (publication)', 'actor': workspace2, 'access_rights': { 'read': {workspace2}, 'write': {workspace2} }, 'bbox': (2000, 2000, 4000, 4000), }, ), ] @pytest.fixture(scope="class") def provide_data(self): process_client.ensure_reserved_username(self.workspace2, self.authn_headers_user2) prime_db_schema_client.ensure_workspace(self.workspace1) for publ_type in process_client.PUBLICATION_TYPES: for workspace, publ_name, publ_params in self.publications: prime_db_schema_client.post_workspace_publication( publ_type, workspace, publ_name, **publ_params) yield prime_db_schema_client.clear_workspaces( [self.workspace1, self.workspace2]) @staticmethod def assert_response(response, expected_publications, expected_headers): infos = response.json() info_publications = [(info['workspace'], info['name']) for info in infos] assert set(expected_publications) == set(info_publications) assert expected_publications == info_publications for header, value in expected_headers.items(): assert header in response.headers, response.headers assert value == response.headers[header], response.headers @staticmethod @pytest.mark.parametrize( 'headers, query_params, expected_publications, expected_headers', [ ( authn_headers_user2, {}, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( None, {}, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '4', 'Content-Range': 'items 1-4/4' }, ), ( authn_headers_user2, { 'full_text_filter': 'kůň' }, [ (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( None, { 'full_text_filter': 'The Fačřš_tÚŮTŤsa " a34432[;] ;.\\Ra\'\'ts' }, list(), { 'X-Total-Count': '0', 'Content-Range': 'items 0-0/0' }, ), ( authn_headers_user2, { 'full_text_filter': '\'Too yellow horse\' means "Příliš žluťoučký kůň".' }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( authn_headers_user2, { 'full_text_filter': 'mean' }, [ (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1' }, ), ( authn_headers_user2, { 'full_text_filter': 'jiný další kůň' }, [ (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( authn_headers_user2, { 'full_text_filter': 'jiný další kůň', 'order_by': 'full_text' }, [ (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( authn_headers_user2, { 'full_text_filter': 'workspace publication' }, [ (workspace1, publication_1e_3_7x5_9), (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'full_text_filter': 'TOUCK' }, [ (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( authn_headers_user2, { 'order_by': 'title' }, [ (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace1, publication_1e_3_7x5_9), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'order_by': 'last_change' }, [ (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_3_7x5_9), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'order_by_list': ['bbox'], 'ordering_bbox': ','.join(str(c) for c in (2999, 2999, 5001, 5001)) }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'order_by_list': ['bbox'], 'ordering_bbox': ','.join(str(c) for c in (3001, 3001, 3001, 3001)) }, [ (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)) }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( authn_headers_user2, { 'bbox_filter': ','.join(str(c) for c in (4001, 4001, 4001, 4001)) }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '2', 'Content-Range': 'items 1-2/2' }, ), ( authn_headers_user2, { 'limit': 2 }, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), # (workspace2, publication_2e_3_3x5_5), # (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-2/5' }, ), ( authn_headers_user2, { 'offset': 1 }, [ # (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 2-5/5' }, ), ( authn_headers_user2, { 'limit': 1, 'offset': 1 }, [ # (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), # (workspace2, publication_2e_3_3x5_5), # (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 2-2/5' }, ), ( authn_headers_user2, { 'limit': 0, 'offset': 0 }, [], { 'X-Total-Count': '5', 'Content-Range': 'items 0-0/5' }, ), ( authn_headers_user2, { 'limit': 6, 'offset': 3 }, [ # (workspace1, publication_1e_2_4x6_6), # (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 4-5/5' }, ), ( authn_headers_user2, { 'order_by': 'title', 'full_text_filter': 'ódy', 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)), 'limit': 1, }, [ (workspace1, publication_1e_2_4x6_6), # (workspace2, publication_2o_2_2x4_4), limit ], { 'X-Total-Count': '2', 'Content-Range': 'items 1-1/2' }, ), ( authn_headers_user2, { 'order_by': 'title', 'full_text_filter': 'ódy', 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)), 'offset': 1, }, [ # (workspace1, publication_1e_2_4x6_6), offset (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2' }, ), ( authn_headers_user2, { 'order_by': 'bbox', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2e_3_3x5_5), # (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3' }, ), ( authn_headers_user2, { 'full_text_filter': 'prilis yellow', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), # (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3' }, ), ( authn_headers_user2, { 'order_by': 'title', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), # (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3' }, ), ( authn_headers_user2, { 'order_by': 'last_change', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), # (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3' }, ), ]) @pytest.mark.parametrize('publication_type', process_client.PUBLICATION_TYPES) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'provide_data') def test_get_publications( publication_type, headers, query_params, expected_publications, expected_headers, ): response = process_client.get_publications_response( publication_type, headers=headers, query_params=query_params) TestGetPublications.assert_response(response, expected_publications, expected_headers) @staticmethod @pytest.mark.parametrize( 'workspace, headers, query_params, expected_publications, expected_headers', [ ( workspace1, authn_headers_user2, {}, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, {}, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'full_text_filter': 'kůň' }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'full_text_filter': 'The Fačřš_tÚŮTŤsa " a34432[;] ;.\\Ra\'\'ts' }, [], { 'X-Total-Count': '0', 'Content-Range': 'items 0-0/0', }, ), ( workspace1, None, { 'full_text_filter': '\'Too yellow horse\' means "Příliš žluťoučký kůň".' }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'full_text_filter': 'mean' }, [], { 'X-Total-Count': '0', 'Content-Range': 'items 0-0/0', }, ), ( workspace1, None, { 'full_text_filter': 'jiný další kůň' }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'full_text_filter': 'jiný další kůň', 'order_by': 'full_text' }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'full_text_filter': 'workspace publication' }, [ (workspace1, publication_1e_3_7x5_9), (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'order_by': 'title' }, [ (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'order_by': 'last_change' }, [ (workspace1, publication_1e_3_7x5_9), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'order_by_list': ['bbox'], 'ordering_bbox': ','.join(str(c) for c in (2999, 2999, 5001, 5001)) }, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'order_by_list': ['bbox'], 'ordering_bbox': ','.join(str(c) for c in (3001, 3001, 3001, 3001)) }, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)) }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'bbox_filter': ','.join(str(c) for c in (4001, 4001, 4001, 4001)) }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'limit': 1 }, [ (workspace1, publication_1e_2_4x6_6), # (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-1/3', }, ), ( workspace1, None, { 'offset': 1 }, [ # (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-3/3', }, ), ( workspace1, None, { 'limit': 1, 'offset': 1 }, [ # (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3', }, ), ( workspace1, None, { 'limit': 0, 'offset': 0 }, [ # (workspace1, publication_1e_2_4x6_6), # (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 0-0/3', }, ), ( workspace1, None, { 'limit': 6, 'offset': 2 }, [ # (workspace1, publication_1e_2_4x6_6), # (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 3-3/3', }, ), ( workspace1, None, { 'order_by': 'title', 'full_text_filter': 'ódy', 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)), 'limit': 1, }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'order_by': 'title', 'full_text_filter': 'ódy', 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)), 'offset': 1, }, [ # (workspace1, publication_1e_2_4x6_6), offset ], { 'X-Total-Count': '1', 'Content-Range': 'items 0-0/1', }, ), ( workspace2, authn_headers_user2, { 'order_by': 'bbox', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2', }, ), ( workspace2, authn_headers_user2, { 'full_text_filter': 'prilis yellow', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2', }, ), ( workspace2, authn_headers_user2, { 'order_by': 'title', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2', }, ), ( workspace2, authn_headers_user2, { 'order_by': 'last_change', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2', }, ), ]) @pytest.mark.parametrize('publication_type', process_client.PUBLICATION_TYPES) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'provide_data') def test_get_workspace_publications( publication_type, workspace, headers, query_params, expected_publications, expected_headers, ): response = process_client.get_workspace_publications_response( publication_type, workspace, headers=headers, query_params=query_params) TestGetPublications.assert_response(response, expected_publications, expected_headers)
class TestDeletePublicationsClass: owner = 'test_delete_publications_owner' deleter = 'test_delete_publications_deleter' authn_headers_owner = process_client.get_authz_headers(owner) authn_headers_deleter = process_client.get_authz_headers(deleter) @pytest.fixture(scope="class") def provide_data(self): process_client.ensure_reserved_username(self.owner, self.authn_headers_owner) process_client.ensure_reserved_username(self.deleter, self.authn_headers_deleter) yield @pytest.mark.parametrize('publ_type', process_client.PUBLICATION_TYPES) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'provide_data') def test_delete_publications(self, publ_type): def check_delete(headers, after_delete_publications, remaining_publications): delete_json = process_client.delete_workspace_publications( publ_type, owner, headers=headers) publication_set = { publication['name'] for publication in delete_json } assert after_delete_publications == publication_set get_json = process_client.get_workspace_publications( publ_type, workspace=owner, headers=authn_headers_owner) publication_set = {publication['name'] for publication in get_json} assert remaining_publications == publication_set owner = self.owner authn_headers_owner = self.authn_headers_owner authn_headers_deleter = self.authn_headers_deleter publication_a = 'test_delete_publications_publication_a' publication_b = 'test_delete_publications_publication_b' publications = [ (publication_a, { 'read': 'EVERYONE', 'write': owner }), (publication_b, { 'read': 'EVERYONE', 'write': 'EVERYONE' }), ] for (name, access_rights) in publications: process_client.publish_workspace_publication( publ_type, owner, name, access_rights=access_rights, headers=authn_headers_owner) response = process_client.get_workspace_publications( publ_type, workspace=owner, headers=authn_headers_owner) assert len(response) == len(publications) # Delete by other user with rights only for one layer check_delete(authn_headers_deleter, { publication_b, }, { publication_a, }) # Delete by owner, everything is deleted check_delete(authn_headers_owner, { publication_a, }, set())
class TestSoapClass: username = '******' publ_name_prefix = 'test_rest_soap_' authz_headers = process_client.get_authz_headers(username) access_rights_rowo = {'read': f"{username}", 'write': f"{username}"} access_rights_rewo = { 'read': f"{username},EVERYONE", 'write': f"{username}" } access_rights_rewe = { 'read': f"{username},EVERYONE", 'write': f"{username},EVERYONE" } publication_type = None publication_name = None @pytest.fixture(scope='class') def reserve_username(self): process_client.ensure_reserved_username(self.username, headers=self.authz_headers) yield @pytest.fixture() def clear_data(self): yield process_client.delete_workspace_publication(self.publication_type, self.username, self.publication_name, headers=self.authz_headers) @pytest.mark.flaky(reruns=5, reruns_delay=2) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'reserve_username', 'clear_data') @pytest.mark.parametrize( 'params_and_expected_list', [ # (input access rights, expected public visibility of metadata record) [(access_rights_rowo, False), (access_rights_rewe, True)], [(access_rights_rewo, True)], [(access_rights_rewe, True), (access_rights_rowo, False)], ]) @pytest.mark.parametrize('publ_type', process_client.PUBLICATION_TYPES) def test_soap_authz(self, publ_type, params_and_expected_list): username = self.username publ_name_prefix = self.publ_name_prefix authz_headers = self.authz_headers post_method = process_client.publish_workspace_publication patch_method = process_client.patch_workspace_publication publ_name = f"{publ_name_prefix}{publ_type.split('.')[-1]}" self.publication_type = publ_type self.publication_name = publ_name for idx, (access_rights, anonymous_visibility) in enumerate(params_and_expected_list): write_method = patch_method if idx > 0 else post_method write_method(publ_type, username, publ_name, headers=authz_headers, access_rights=access_rights) publ_uuid = process_client.get_workspace_publication( publ_type, username, publ_name, headers=authz_headers)['uuid'] publ_muuid = f"m-{publ_uuid}" assert micka_util.get_number_of_records(publ_muuid, use_authn=True) > 0 anon_number_of_records = micka_util.get_number_of_records( publ_muuid, use_authn=False) assert bool(anon_number_of_records) == anonymous_visibility, \ f"muuid={publ_muuid}, access_rights={access_rights}, number_of_records={anon_number_of_records}"
def test_patch_current_user_without_username(): username1 = 'test_patch_current_user_user1' username2 = 'test_patch_current_user_user2' user1_authn_headers = process_client.get_authz_headers(username1) user2_authn_headers = process_client.get_authz_headers(username2) # reserve username with app.app_context(): rest_path = url_for('rest_current_user.patch', adjust_username='******') response = requests.patch(rest_path, headers=user1_authn_headers) assert response.status_code == 200, response.text # check if it was reserved with app.app_context(): rest_path = url_for('rest_current_user.get') response = requests.get(rest_path, headers=user1_authn_headers) assert response.status_code == 200, response.text resp_json = response.json() assert resp_json['authenticated'] is True assert 'username' in resp_json exp_username = '******' exp_sub = '20142' assert resp_json['username'] == exp_username assert resp_json['claims']['sub'] == exp_sub iss_id = liferay.__name__ from layman.authn.redis import _get_issid_sub_2_username_key rds_key = _get_issid_sub_2_username_key(iss_id, exp_sub) rds = settings.LAYMAN_REDIS assert rds.get(rds_key) == exp_username from layman.authn.filesystem import get_authn_info authn_info = get_authn_info(exp_username) assert authn_info['iss_id'] == iss_id assert authn_info['sub'] == exp_sub # re-reserve username with app.app_context(): rest_path = url_for('rest_current_user.patch', adjust_username='******') response = requests.patch(rest_path, headers=user1_authn_headers) assert response.status_code == 400, response.text r_json = response.json() assert r_json['code'] == 34 assert r_json['detail']['username'] == exp_username # reserve same username by other user with app.app_context(): rest_path = url_for('rest_current_user.patch') response = requests.patch(rest_path, data={ 'username': exp_username, }, headers=user2_authn_headers) assert response.status_code == 409, response.text r_json = response.json() assert r_json['code'] == 35 assert 'detail' not in r_json # reserve other username by other user with app.app_context(): rest_path = url_for('rest_current_user.patch') exp_username2 = 'test_patch_current_user_user2' exp_sub2 = '20143' response = requests.patch(rest_path, data={ 'username': exp_username2, }, headers=user2_authn_headers) assert response.status_code == 200, response.text resp_json = response.json() assert 'username' in resp_json assert resp_json['username'] == exp_username2 assert resp_json['claims']['sub'] == exp_sub2 # test map metadata workspace = exp_username exp_email = 'test_patch_current_user_user1_email' + '@liferay.com' exp_name = 'FirstName MiddleName LastName' mapname = 'map1' process_client.publish_workspace_map(workspace, mapname, headers=user1_authn_headers) with app.app_context(): rest_path = url_for('rest_workspace_map_file.get', workspace=workspace, mapname=mapname) response = requests.get(rest_path, headers=user1_authn_headers) assert response.status_code == 200, response.text resp_json = response.json() assert resp_json['name'] == mapname user_info = resp_json['user'] assert {'email', 'name'} == set(user_info.keys()) assert user_info['name'] == exp_name assert user_info['email'] == exp_email process_client.delete_workspace_map(workspace, mapname, headers=user1_authn_headers)
class TestRestApiClass: layername = 'test_authorize_decorator_layer' mapname = 'test_authorize_decorator_map' username = '******' authz_headers = process_client.get_authz_headers(username) @pytest.fixture(scope="class") def provide_publications(self): username = self.username authz_headers = self.authz_headers layername = self.layername mapname = self.mapname process_client.ensure_reserved_username(username, headers=authz_headers) process_client.publish_workspace_layer(username, layername, headers=authz_headers) process_client.publish_workspace_map(username, mapname, headers=authz_headers) yield process_client.delete_workspace_layer(username, layername, headers=authz_headers) process_client.delete_workspace_map(username, mapname, headers=authz_headers) @staticmethod def assert_response(response, exp_status_code, exp_data): assert response.status_code == exp_status_code, response.text if exp_status_code == 200 and exp_data is not None: resp_json = response.json() if callable(exp_data): assert exp_data( resp_json), f"resp_json={resp_json}, exp_data={exp_data}" else: assert resp_json == exp_data elif exp_status_code != 200 and exp_data is not None: resp_json = response.json() assert resp_json[ 'code'] == exp_data, f"resp_json={resp_json}, exp_data={exp_data}" @staticmethod def has_single_layer(r_json): return {li['name'] for li in r_json} == {TestRestApiClass.layername} @staticmethod def has_single_map(r_json): return {li['name'] for li in r_json} == {TestRestApiClass.mapname} @staticmethod def has_no_publication(r_json): return {li['name'] for li in r_json} == set() @pytest.mark.parametrize( "rest_action, url_for_params, authz_status_code, authz_response, unauthz_status_code, unauthz_response", [ ('rest_workspace_layers.get', {}, 200, has_single_layer.__func__, 200, has_no_publication.__func__), ('rest_workspace_layer.get', { 'layername': layername }, 200, None, 404, 15), ('rest_workspace_layer_metadata_comparison.get', { 'layername': layername }, 200, None, 404, 15), ('rest_workspace_layer_style.get', { 'layername': layername }, 200, None, 404, 15), ('rest_workspace_layer_thumbnail.get', { 'layername': layername }, 200, None, 404, 15), ('rest_workspace_layer_chunk.get', { 'layername': layername }, 400, 20, 404, 15), ('rest_workspace_maps.get', {}, 200, has_single_map.__func__, 200, has_no_publication.__func__), ('rest_workspace_map.get', { 'mapname': mapname }, 200, None, 404, 26), ('rest_workspace_map_file.get', { 'mapname': mapname }, 200, None, 404, 26), ('rest_workspace_map_metadata_comparison.get', { 'mapname': mapname }, 200, None, 404, 26), ('rest_workspace_map_thumbnail.get', { 'mapname': mapname }, 200, None, 404, 26), ], ) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'provide_publications') def test_authorize_publications_decorator_on_rest_api( self, rest_action, url_for_params, authz_status_code, authz_response, unauthz_status_code, unauthz_response, ): username = self.username authz_headers = self.authz_headers patch_method = None publ_name = None if '_layer' in rest_action: patch_method = process_client.patch_workspace_layer publ_name = self.layername elif '_map' in rest_action: patch_method = process_client.patch_workspace_map publ_name = self.mapname assert publ_name url_for_params['workspace'] = username with app.app_context(): rest_url = url_for(rest_action, **url_for_params) patch_method(username, publ_name, headers=authz_headers, access_rights={ 'read': username, 'write': username, }) response = requests.get(rest_url, headers=authz_headers) self.assert_response(response, authz_status_code, authz_response) response = requests.get(rest_url) self.assert_response(response, unauthz_status_code, unauthz_response) patch_method(username, publ_name, headers=authz_headers, access_rights={ 'read': settings.RIGHTS_EVERYONE_ROLE, 'write': settings.RIGHTS_EVERYONE_ROLE, }) response = requests.get(rest_url, headers=authz_headers) self.assert_response(response, authz_status_code, authz_response) response = requests.get(rest_url) self.assert_response(response, authz_status_code, authz_response)
def test_missing_attribute_authz(): username = '******' layername1 = 'testmissingattr_authz_layer' username2 = 'testmissingattr_authz2' authn_headers1 = get_authz_headers(username) authn_headers2 = get_authz_headers(username2) def do_test(wfs_query, attribute_names): # Test, that unauthorized user will not cause new attribute with app.app_context(): old_db_attributes = db.get_all_column_names(username, layername1) for attr_name in attribute_names: assert attr_name not in old_db_attributes, f"old_db_attributes={old_db_attributes}, attr_name={attr_name}" with pytest.raises(GS_Error) as exc: process_client.post_wfst(wfs_query, headers=authn_headers2, workspace=username) assert exc.value.data['status_code'] == 400 with app.app_context(): new_db_attributes = db.get_all_column_names(username, layername1) for attr_name in attribute_names: assert attr_name not in new_db_attributes, f"new_db_attributes={new_db_attributes}, attr_name={attr_name}" # Test, that authorized user will cause new attribute process_client.post_wfst(wfs_query, headers=authn_headers1, workspace=username) with app.app_context(): new_db_attributes = db.get_all_column_names(username, layername1) for attr_name in attribute_names: assert attr_name in new_db_attributes, f"new_db_attributes={new_db_attributes}, attr_name={attr_name}" process_client.reserve_username(username, headers=authn_headers1) process_client.publish_workspace_layer( username, layername1, file_paths=[ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ], headers=authn_headers1) # Testing, that user2 is not able to write to layer of user1 process_client.reserve_username(username2, headers=authn_headers2) # INSERT attr_names = ['inexisting_attribute_auth1', 'inexisting_attribute_auth2'] data_xml = data_wfs.get_wfs20_insert_points_new_attr( username, layername1, attr_names) do_test(data_xml, attr_names) # UPDATE attr_names = ['inexisting_attribute_auth3', 'inexisting_attribute_auth4'] data_xml = data_wfs.get_wfs20_update_points_new_attr( username, layername1, attr_names) do_test(data_xml, attr_names) process_client.delete_workspace_layer(username, layername1, headers=authn_headers1)
def test_missing_attribute(style_file, ): username = '******' layername = 'inexisting_attribute_layer' layername2 = 'inexisting_attribute_layer2' authn_headers = get_authz_headers(username) process_client.ensure_reserved_username(username, headers=authn_headers) process_client.publish_workspace_layer( username, layername, file_paths=[ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ], style_file=style_file, headers=authn_headers, ) process_client.publish_workspace_layer( username, layername2, file_paths=[ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ], style_file=style_file, headers=authn_headers, ) with app.app_context(): style_type = layer_util.get_layer_info(username, layername, context={ 'keys': ['style_type'], })['style_type'] def wfs_post(workspace, attr_names_list, data_xml): with app.app_context(): wfs_url = f"http://{settings.LAYMAN_SERVER_NAME}/geoserver/{workspace}/wfs" old_db_attributes = {} old_wfs_properties = {} for layer, attr_names in attr_names_list: # test that all attr_names are not yet presented in DB table old_db_attributes[layer] = db.get_all_column_names( workspace, layer) for attr_name in attr_names: assert attr_name not in old_db_attributes[ layer], f"old_db_attributes={old_db_attributes[layer]}, attr_name={attr_name}" layer_schema = get_wfs_schema(wfs_url, typename=f"{workspace}:{layer}", version=geoserver_wfs.VERSION, headers=authn_headers) old_wfs_properties[layer] = sorted( layer_schema['properties'].keys()) if style_type == 'qml': assert qgis_wms.get_layer_info(workspace, layer) old_qgis_attributes = qgis_util.get_layer_attribute_names( workspace, layer) assert all( attr_name not in old_qgis_attributes for attr_name in attr_names), (attr_names, old_qgis_attributes) process_client.post_wfst(data_xml, headers=authn_headers, workspace=username) new_db_attributes = {} new_wfs_properties = {} for layer, attr_names in attr_names_list: # test that exactly all attr_names were created in DB table new_db_attributes[layer] = db.get_all_column_names( workspace, layer) for attr_name in attr_names: assert attr_name in new_db_attributes[ layer], f"new_db_attributes={new_db_attributes[layer]}, attr_name={attr_name}" assert set(attr_names).union(set( old_db_attributes[layer])) == set(new_db_attributes[layer]) # test that exactly all attr_names were distinguished also in WFS feature type layer_schema = get_wfs_schema(wfs_url, typename=f"{workspace}:{layer}", version=geoserver_wfs.VERSION, headers=authn_headers) new_wfs_properties[layer] = sorted( layer_schema['properties'].keys()) for attr_name in attr_names: assert attr_name in new_wfs_properties[ layer], f"new_wfs_properties={new_wfs_properties[layer]}, attr_name={attr_name}" assert set(attr_names).union(set(old_wfs_properties[layer])) == set(new_wfs_properties[layer]),\ set(new_wfs_properties[layer]).difference(set(attr_names).union(set(old_wfs_properties[layer]))) if style_type == 'qml': assert qgis_wms.get_layer_info(workspace, layer) new_qgis_attributes = qgis_util.get_layer_attribute_names( workspace, layer) assert all( attr_name in new_qgis_attributes for attr_name in attr_names), (attr_names, new_qgis_attributes) else: assert not qgis_wms.get_layer_info(workspace, layer) attr_names = ['inexisting_attribute_attr', 'inexisting_attribute_attr1a'] data_xml = data_wfs.get_wfs20_insert_points_new_attr( username, layername, attr_names) wfs_post(username, [(layername, attr_names)], data_xml) attr_names2 = ['inexisting_attribute_attr2'] data_xml = data_wfs.get_wfs20_update_points_new_attr( username, layername, attr_names2) wfs_post(username, [(layername, attr_names2)], data_xml) attr_names3 = ['inexisting_attribute_attr3'] data_xml = data_wfs.get_wfs20_update_points_new_attr( username, layername, attr_names3, with_attr_namespace=True) wfs_post(username, [(layername, attr_names3)], data_xml) attr_names4 = ['inexisting_attribute_attr4'] data_xml = data_wfs.get_wfs20_update_points_new_attr(username, layername, attr_names4, with_filter=True) wfs_post(username, [(layername, attr_names4)], data_xml) attr_names5 = ['inexisting_attribute_attr5'] data_xml = data_wfs.get_wfs20_replace_points_new_attr( username, layername, attr_names5) wfs_post(username, [(layername, attr_names5)], data_xml) attr_names_i1 = ['inexisting_attribute_attr_complex_i1'] attr_names_i2 = ['inexisting_attribute_attr_complex_i2'] attr_names_u = ['inexisting_attribute_attr_complex_u'] attr_names_r = ['inexisting_attribute_attr_complex_r'] attr_names_complex = [(layername, attr_names_i1 + attr_names_r), (layername2, attr_names_i2 + attr_names_u)] data_xml = data_wfs.get_wfs20_complex_new_attr( workspace=username, layername1=layername, layername2=layername2, attr_names_insert1=attr_names_i1, attr_names_insert2=attr_names_i2, attr_names_update=attr_names_u, attr_names_replace=attr_names_r) wfs_post(username, attr_names_complex, data_xml) attr_names6 = ['inexisting_attribute_attr6'] data_xml = data_wfs.get_wfs10_insert_points_new_attr( username, layername, attr_names6) wfs_post(username, [(layername, attr_names6)], data_xml) attr_names7 = ['inexisting_attribute_attr7'] data_xml = data_wfs.get_wfs11_insert_points_new_attr( username, layername, attr_names7) wfs_post(username, [(layername, attr_names7)], data_xml) attr_names8 = ['inexisting_attribute_attr8'] data_xml = data_wfs.get_wfs10_update_points_new(username, layername, attr_names8, with_attr_namespace=True) wfs_post(username, [(layername, attr_names8)], data_xml) attr_names9 = ['inexisting_attribute_attr9'] data_xml = data_wfs.get_wfs10_update_points_new(username, layername, attr_names9, with_filter=True) wfs_post(username, [(layername, attr_names9)], data_xml) attr_names10 = ['inexisting_attribute_attr10'] data_xml = data_wfs.get_wfs11_insert_polygon_new_attr( username, layername, attr_names10) wfs_post(username, [(layername, attr_names10)], data_xml) process_client.delete_workspace_layer(username, layername, headers=authn_headers) process_client.delete_workspace_layer(username, layername2, headers=authn_headers)
class TestPublicWorkspaceClass: publication_name = 'test_public_workspace_variable_publication' username = '******' workspace_name = 'test_public_workspace_variable_workspace' user_authz_headers = process_client.get_authz_headers(username) @pytest.fixture(scope="class") def setup_test_public_workspace_variable(self): username = self.username user_authz_headers = self.user_authz_headers env_vars = dict(process.AUTHN_SETTINGS) process.ensure_layman_function(env_vars) process_client.reserve_username(username, headers=user_authz_headers) yield @staticmethod @pytest.mark.usefixtures('liferay_mock', 'setup_test_public_workspace_variable') @pytest.mark.parametrize("publish_method, delete_method, workspace_suffix", [ ( process_client.publish_workspace_layer, process_client.delete_workspace_layer, '_layer', ), ( process_client.publish_workspace_map, process_client.delete_workspace_map, '_map', ), ]) @pytest.mark.parametrize( "create_public_workspace, publish_in_public_workspace, workspace_prefix, publication_name, authz_headers," "user_can_create, anonymous_can_publish, anonymous_can_create,", [ ( 'EVERYONE', 'EVERYONE', workspace_name + 'ee', publication_name, user_authz_headers, True, True, True, ), ( username, username, workspace_name + 'uu', publication_name, user_authz_headers, True, False, False, ), ( '', '', workspace_name + 'nn', publication_name, user_authz_headers, False, False, False, ), ( username, 'EVERYONE', workspace_name + 'ue', publication_name, user_authz_headers, True, True, False, ), ], ) def test_public_workspace_variable( create_public_workspace, publish_in_public_workspace, workspace_prefix, publication_name, authz_headers, user_can_create, anonymous_can_publish, anonymous_can_create, publish_method, delete_method, workspace_suffix, ): def can_not_publish( workspace_name, publication_name, publish_method, authz_headers=None, ): with pytest.raises(LaymanError) as exc_info: publish_method( workspace_name, publication_name, headers=authz_headers, ) assert exc_info.value.http_code == 403 assert exc_info.value.code == 30 assert exc_info.value.message == 'Unauthorized access' workspace_name = workspace_prefix + workspace_suffix workspace_name2 = workspace_name + '2' layername2 = publication_name + '2' env_vars = dict(process.AUTHN_SETTINGS) env_vars['GRANT_CREATE_PUBLIC_WORKSPACE'] = create_public_workspace env_vars[ 'GRANT_PUBLISH_IN_PUBLIC_WORKSPACE'] = publish_in_public_workspace process.ensure_layman_function(env_vars) if user_can_create: publish_method(workspace_name, publication_name, headers=authz_headers) if anonymous_can_publish: publish_method(workspace_name, layername2) delete_method(workspace_name, layername2) delete_method(workspace_name, publication_name, headers=authz_headers) else: can_not_publish(workspace_name, publication_name, publish_method, authz_headers) if anonymous_can_create: publish_method(workspace_name2, publication_name) delete_method(workspace_name2, publication_name) else: can_not_publish(workspace_name2, publication_name, publish_method)