class TestSoapClass: username = '******' publ_name_prefix = 'test_rest_soap_' authz_headers = process_client.get_authz_headers(username) access_rights_rowo = {'read': f"{username}", 'write': f"{username}"} access_rights_rewo = {'read': f"{username},EVERYONE", 'write': f"{username}"} access_rights_rewe = {'read': f"{username},EVERYONE", 'write': f"{username},EVERYONE"} publication_type = None publication_name = None @pytest.fixture(scope='class') def reserve_username(self): process_client.ensure_reserved_username(self.username, headers=self.authz_headers) yield @pytest.fixture() def clear_data(self): yield process_client.delete_workspace_publication(self.publication_type, self.username, self.publication_name, headers=self.authz_headers) @pytest.mark.flaky(reruns=5, reruns_delay=2) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'reserve_username', 'clear_data') @pytest.mark.parametrize('params_and_expected_list', [ # (input access rights, expected public visibility of metadata record) [(access_rights_rowo, False), (access_rights_rewe, True)], [(access_rights_rewo, True)], [(access_rights_rewe, True), (access_rights_rowo, False)], ]) @pytest.mark.parametrize('publ_type', process_client.PUBLICATION_TYPES) @pytest.mark.irritating def test_soap_authz(self, publ_type, params_and_expected_list): username = self.username publ_name_prefix = self.publ_name_prefix authz_headers = self.authz_headers post_method = process_client.publish_workspace_publication patch_method = process_client.patch_workspace_publication publ_name = f"{publ_name_prefix}{publ_type.split('.')[-1]}" self.publication_type = publ_type self.publication_name = publ_name for idx, (access_rights, anonymous_visibility) in enumerate(params_and_expected_list): write_method = patch_method if idx > 0 else post_method write_method(publ_type, username, publ_name, headers=authz_headers, access_rights=access_rights) publ_uuid = process_client.get_workspace_publication(publ_type, username, publ_name, headers=authz_headers)['uuid'] publ_muuid = f"m-{publ_uuid}" assert micka_util.get_number_of_records(publ_muuid, use_authn=True) > 0 anon_number_of_records = micka_util.get_number_of_records(publ_muuid, use_authn=False) assert bool(anon_number_of_records) == anonymous_visibility, \ f"muuid={publ_muuid}, access_rights={access_rights}, number_of_records={anon_number_of_records}"
def test_check_user_wms(): user = '******' + settings.LAYMAN_GS_WMS_WORKSPACE_POSTFIX auth_headers = process_client.get_authz_headers(user) with pytest.raises(LaymanError) as exc_info: process_client.reserve_username(user, headers=auth_headers) assert exc_info.value.http_code == 400 assert exc_info.value.code == 45 assert exc_info.value.data['workspace_name'] == user
class TestPublicWorkspaceClass: publication_name = 'test_public_workspace_variable_publication' username = '******' workspace_name = 'test_public_workspace_variable_workspace' user_authz_headers = process_client.get_authz_headers(username) @pytest.fixture(scope="class") def setup_test_public_workspace_variable(self): username = self.username user_authz_headers = self.user_authz_headers env_vars = dict(process.AUTHN_SETTINGS) process.ensure_layman_function(env_vars) process_client.reserve_username(username, headers=user_authz_headers) yield @staticmethod @pytest.mark.usefixtures('liferay_mock', 'setup_test_public_workspace_variable') @pytest.mark.parametrize("publish_method, delete_method, workspace_suffix", [ ( process_client.publish_workspace_layer, process_client.delete_workspace_layer, '_layer', ), ( process_client.publish_workspace_map, process_client.delete_workspace_map, '_map', ), ]) @pytest.mark.parametrize( "create_public_workspace, publish_in_public_workspace, workspace_prefix, publication_name, authz_headers," "user_can_create, anonymous_can_publish, anonymous_can_create,", [ ( 'EVERYONE', 'EVERYONE', workspace_name + 'ee', publication_name, user_authz_headers, True, True, True, ), ( username, username, workspace_name + 'uu', publication_name, user_authz_headers, True, False, False, ), ( '', '', workspace_name + 'nn', publication_name, user_authz_headers, False, False, False, ), ( username, 'EVERYONE', workspace_name + 'ue', publication_name, user_authz_headers, True, True, False, ), ], ) def test_public_workspace_variable( create_public_workspace, publish_in_public_workspace, workspace_prefix, publication_name, authz_headers, user_can_create, anonymous_can_publish, anonymous_can_create, publish_method, delete_method, workspace_suffix, ): def can_not_publish( workspace_name, publication_name, publish_method, authz_headers=None, ): with pytest.raises(LaymanError) as exc_info: publish_method( workspace_name, publication_name, headers=authz_headers, ) assert exc_info.value.http_code == 403 assert exc_info.value.code == 30 assert exc_info.value.message == 'Unauthorized access' workspace_name = workspace_prefix + workspace_suffix workspace_name2 = workspace_name + '2' layername2 = publication_name + '2' env_vars = dict(process.AUTHN_SETTINGS) env_vars['GRANT_CREATE_PUBLIC_WORKSPACE'] = create_public_workspace env_vars[ 'GRANT_PUBLISH_IN_PUBLIC_WORKSPACE'] = publish_in_public_workspace process.ensure_layman_function(env_vars) if user_can_create: publish_method(workspace_name, publication_name, headers=authz_headers) if anonymous_can_publish: publish_method(workspace_name, layername2) delete_method(workspace_name, layername2) delete_method(workspace_name, publication_name, headers=authz_headers) else: can_not_publish(workspace_name, publication_name, publish_method, authz_headers) if anonymous_can_create: publish_method(workspace_name2, publication_name) delete_method(workspace_name2, publication_name) else: can_not_publish(workspace_name2, publication_name, publish_method)
def get_publication_header(publication): writer = get_publication_writer(publication) headers = None if writer == settings.RIGHTS_EVERYONE_ROLE else process_client.get_authz_headers( writer) return headers
def test_missing_attribute(style_file, ): username = '******' layername = 'inexisting_attribute_layer' layername2 = 'inexisting_attribute_layer2' authn_headers = get_authz_headers(username) process_client.ensure_reserved_username(username, headers=authn_headers) process_client.publish_workspace_layer( username, layername, file_paths=[ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ], style_file=style_file, headers=authn_headers, ) process_client.publish_workspace_layer( username, layername2, file_paths=[ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ], style_file=style_file, headers=authn_headers, ) with app.app_context(): style_type = layman_util.get_publication_info( username, process_client.LAYER_TYPE, layername, context={ 'keys': ['style_type'], })['style_type'] def wfs_post(workspace, attr_names_list, data_xml): with app.app_context(): wfs_url = f"http://{settings.LAYMAN_SERVER_NAME}/geoserver/{workspace}/wfs" old_db_attributes = {} old_wfs_properties = {} for layer, attr_names in attr_names_list: # test that all attr_names are not yet presented in DB table old_db_attributes[layer] = db.get_all_column_names( workspace, layer) for attr_name in attr_names: assert attr_name not in old_db_attributes[ layer], f"old_db_attributes={old_db_attributes[layer]}, attr_name={attr_name}" layer_schema = get_wfs_schema(wfs_url, typename=f"{workspace}:{layer}", version=geoserver_wfs.VERSION, headers=authn_headers) old_wfs_properties[layer] = sorted( layer_schema['properties'].keys()) if style_type == 'qml': assert qgis_wms.get_layer_info(workspace, layer) old_qgis_attributes = qgis_util.get_layer_attribute_names( workspace, layer) assert all( attr_name not in old_qgis_attributes for attr_name in attr_names), (attr_names, old_qgis_attributes) process_client.post_wfst(data_xml, headers=authn_headers, workspace=username) new_db_attributes = {} new_wfs_properties = {} for layer, attr_names in attr_names_list: # test that exactly all attr_names were created in DB table new_db_attributes[layer] = db.get_all_column_names( workspace, layer) for attr_name in attr_names: assert attr_name in new_db_attributes[ layer], f"new_db_attributes={new_db_attributes[layer]}, attr_name={attr_name}" assert set(attr_names).union(set( old_db_attributes[layer])) == set(new_db_attributes[layer]) # test that exactly all attr_names were distinguished also in WFS feature type layer_schema = get_wfs_schema(wfs_url, typename=f"{workspace}:{layer}", version=geoserver_wfs.VERSION, headers=authn_headers) new_wfs_properties[layer] = sorted( layer_schema['properties'].keys()) for attr_name in attr_names: assert attr_name in new_wfs_properties[ layer], f"new_wfs_properties={new_wfs_properties[layer]}, attr_name={attr_name}" assert set(attr_names).union(set(old_wfs_properties[layer])) == set(new_wfs_properties[layer]),\ set(new_wfs_properties[layer]).difference(set(attr_names).union(set(old_wfs_properties[layer]))) if style_type == 'qml': assert qgis_wms.get_layer_info(workspace, layer) new_qgis_attributes = qgis_util.get_layer_attribute_names( workspace, layer) assert all( attr_name in new_qgis_attributes for attr_name in attr_names), (attr_names, new_qgis_attributes) else: assert not qgis_wms.get_layer_info(workspace, layer) attr_names = ['inexisting_attribute_attr', 'inexisting_attribute_attr1a'] data_xml = data_wfs.get_wfs20_insert_points_new_attr( username, layername, attr_names) wfs_post(username, [(layername, attr_names)], data_xml) attr_names2 = ['inexisting_attribute_attr2'] data_xml = data_wfs.get_wfs20_update_points_new_attr( username, layername, attr_names2) wfs_post(username, [(layername, attr_names2)], data_xml) attr_names3 = ['inexisting_attribute_attr3'] data_xml = data_wfs.get_wfs20_update_points_new_attr( username, layername, attr_names3, with_attr_namespace=True) wfs_post(username, [(layername, attr_names3)], data_xml) attr_names4 = ['inexisting_attribute_attr4'] data_xml = data_wfs.get_wfs20_update_points_new_attr(username, layername, attr_names4, with_filter=True) wfs_post(username, [(layername, attr_names4)], data_xml) attr_names5 = ['inexisting_attribute_attr5'] data_xml = data_wfs.get_wfs20_replace_points_new_attr( username, layername, attr_names5) wfs_post(username, [(layername, attr_names5)], data_xml) attr_names_i1 = ['inexisting_attribute_attr_complex_i1'] attr_names_i2 = ['inexisting_attribute_attr_complex_i2'] attr_names_u = ['inexisting_attribute_attr_complex_u'] attr_names_r = ['inexisting_attribute_attr_complex_r'] attr_names_complex = [(layername, attr_names_i1 + attr_names_r), (layername2, attr_names_i2 + attr_names_u)] data_xml = data_wfs.get_wfs20_complex_new_attr( workspace=username, layername1=layername, layername2=layername2, attr_names_insert1=attr_names_i1, attr_names_insert2=attr_names_i2, attr_names_update=attr_names_u, attr_names_replace=attr_names_r) wfs_post(username, attr_names_complex, data_xml) attr_names6 = ['inexisting_attribute_attr6'] data_xml = data_wfs.get_wfs10_insert_points_new_attr( username, layername, attr_names6) wfs_post(username, [(layername, attr_names6)], data_xml) attr_names7 = ['inexisting_attribute_attr7'] data_xml = data_wfs.get_wfs11_insert_points_new_attr( username, layername, attr_names7) wfs_post(username, [(layername, attr_names7)], data_xml) attr_names8 = ['inexisting_attribute_attr8'] data_xml = data_wfs.get_wfs10_update_points_new(username, layername, attr_names8, with_attr_namespace=True) wfs_post(username, [(layername, attr_names8)], data_xml) attr_names9 = ['inexisting_attribute_attr9'] data_xml = data_wfs.get_wfs10_update_points_new(username, layername, attr_names9, with_filter=True) wfs_post(username, [(layername, attr_names9)], data_xml) attr_names10 = ['inexisting_attribute_attr10'] data_xml = data_wfs.get_wfs11_insert_polygon_new_attr( username, layername, attr_names10) wfs_post(username, [(layername, attr_names10)], data_xml) process_client.delete_workspace_layer(username, layername, headers=authn_headers) process_client.delete_workspace_layer(username, layername2, headers=authn_headers)
def test_missing_attribute_authz(): username = '******' layername1 = 'testmissingattr_authz_layer' username2 = 'testmissingattr_authz2' authn_headers1 = get_authz_headers(username) authn_headers2 = get_authz_headers(username2) def do_test(wfs_query, attribute_names): # Test, that unauthorized user will not cause new attribute with app.app_context(): old_db_attributes = db.get_all_column_names(username, layername1) for attr_name in attribute_names: assert attr_name not in old_db_attributes, f"old_db_attributes={old_db_attributes}, attr_name={attr_name}" with pytest.raises(GS_Error) as exc: process_client.post_wfst(wfs_query, headers=authn_headers2, workspace=username) assert exc.value.data['status_code'] == 400 with app.app_context(): new_db_attributes = db.get_all_column_names(username, layername1) for attr_name in attribute_names: assert attr_name not in new_db_attributes, f"new_db_attributes={new_db_attributes}, attr_name={attr_name}" # Test, that authorized user will cause new attribute process_client.post_wfst(wfs_query, headers=authn_headers1, workspace=username) with app.app_context(): new_db_attributes = db.get_all_column_names(username, layername1) for attr_name in attribute_names: assert attr_name in new_db_attributes, f"new_db_attributes={new_db_attributes}, attr_name={attr_name}" process_client.reserve_username(username, headers=authn_headers1) process_client.publish_workspace_layer( username, layername1, file_paths=[ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ], headers=authn_headers1) # Testing, that user2 is not able to write to layer of user1 process_client.reserve_username(username2, headers=authn_headers2) # INSERT attr_names = ['inexisting_attribute_auth1', 'inexisting_attribute_auth2'] data_xml = data_wfs.get_wfs20_insert_points_new_attr( username, layername1, attr_names) do_test(data_xml, attr_names) # UPDATE attr_names = ['inexisting_attribute_auth3', 'inexisting_attribute_auth4'] data_xml = data_wfs.get_wfs20_update_points_new_attr( username, layername1, attr_names) do_test(data_xml, attr_names) process_client.delete_workspace_layer(username, layername1, headers=authn_headers1)
def test_patch_current_user_without_username(): username1 = 'test_patch_current_user_user1' username2 = 'test_patch_current_user_user2' user1_authn_headers = process_client.get_authz_headers(username1) user2_authn_headers = process_client.get_authz_headers(username2) # reserve username with app.app_context(): rest_path = url_for('rest_current_user.patch', adjust_username='******') response = requests.patch(rest_path, headers=user1_authn_headers) assert response.status_code == 200, response.text # check if it was reserved with app.app_context(): rest_path = url_for('rest_current_user.get') response = requests.get(rest_path, headers=user1_authn_headers) assert response.status_code == 200, response.text resp_json = response.json() assert resp_json['authenticated'] is True assert 'username' in resp_json exp_username = '******' exp_sub = '20142' assert resp_json['username'] == exp_username assert resp_json['claims']['sub'] == exp_sub iss_id = liferay.__name__ from layman.authn.redis import _get_issid_sub_2_username_key rds_key = _get_issid_sub_2_username_key(iss_id, exp_sub) rds = settings.LAYMAN_REDIS assert rds.get(rds_key) == exp_username from layman.authn.filesystem import get_authn_info authn_info = get_authn_info(exp_username) assert authn_info['iss_id'] == iss_id assert authn_info['sub'] == exp_sub # re-reserve username with app.app_context(): rest_path = url_for('rest_current_user.patch', adjust_username='******') response = requests.patch(rest_path, headers=user1_authn_headers) assert response.status_code == 400, response.text r_json = response.json() assert r_json['code'] == 34 assert r_json['detail']['username'] == exp_username # reserve same username by other user with app.app_context(): rest_path = url_for('rest_current_user.patch') response = requests.patch(rest_path, data={ 'username': exp_username, }, headers=user2_authn_headers) assert response.status_code == 409, response.text r_json = response.json() assert r_json['code'] == 35 assert 'detail' not in r_json # reserve other username by other user with app.app_context(): rest_path = url_for('rest_current_user.patch') exp_username2 = 'test_patch_current_user_user2' exp_sub2 = '20143' response = requests.patch(rest_path, data={ 'username': exp_username2, }, headers=user2_authn_headers) assert response.status_code == 200, response.text resp_json = response.json() assert 'username' in resp_json assert resp_json['username'] == exp_username2 assert resp_json['claims']['sub'] == exp_sub2 # test map metadata workspace = exp_username exp_email = 'test_patch_current_user_user1_email' + '@liferay.com' exp_name = 'FirstName MiddleName LastName' mapname = 'map1' process_client.publish_workspace_map(workspace, mapname, headers=user1_authn_headers) with app.app_context(): rest_path = url_for('rest_workspace_map_file.get', workspace=workspace, mapname=mapname) response = requests.get(rest_path, headers=user1_authn_headers) assert response.status_code == 200, response.text resp_json = response.json() assert resp_json['name'] == mapname user_info = resp_json['user'] assert {'email', 'name'} == set(user_info.keys()) assert user_info['name'] == exp_name assert user_info['email'] == exp_email process_client.delete_workspace_map(workspace, mapname, headers=user1_authn_headers)
def generate(workspace): username = workspace + '_user' username_2 = workspace + '_user_2' return { Publication(workspace, consts.LAYER_TYPE, 'layer_wfs_proxy'): [ { consts.KEY_ACTION: { consts.KEY_CALL: Action(process_client.publish_workspace_publication, layers.SMALL_LAYER.definition), consts.KEY_RESPONSE_ASSERTS: [ Action(processing.response.valid_post, dict()), ], }, consts.KEY_FINAL_ASSERTS: [ *publication.IS_LAYER_COMPLETE_AND_CONSISTENT, Action(publication.internal.correct_values_in_detail, layers.SMALL_LAYER.info_values), Action(publication.internal.thumbnail_equals, { 'exp_thumbnail': layers.SMALL_LAYER.thumbnail, }), ], }, wfst_insert_action(workspace=workspace), wfst_insert_action(), ], Publication(workspace, consts.LAYER_TYPE, 'layer_wfs_proxy_authz'): [ { consts.KEY_ACTION: { consts.KEY_CALL: Action( process_client.ensure_reserved_username, { 'username': username, 'headers': process_client.get_authz_headers( username=username), }), }, }, { consts.KEY_ACTION: { consts.KEY_CALL: Action( process_client.ensure_reserved_username, { 'username': username_2, 'headers': process_client.get_authz_headers( username=username_2), }), }, }, { consts.KEY_ACTION: { consts.KEY_CALL: Action( process_client.publish_workspace_publication, { **layers.SMALL_LAYER.definition, 'headers': process_client.get_authz_headers( username=username), }), consts.KEY_RESPONSE_ASSERTS: [ Action(processing.response.valid_post, dict()), ], }, consts.KEY_FINAL_ASSERTS: [ *publication.IS_LAYER_COMPLETE_AND_CONSISTENT, Action( publication.internal.correct_values_in_detail, { **layers.SMALL_LAYER.info_values, 'exp_publication_detail': { **layers.SMALL_LAYER.info_values.get( 'exp_publication_detail', dict()), 'access_rights': { 'read': [username], 'write': [username] }, } }), Action(publication.internal.thumbnail_equals, { 'exp_thumbnail': layers.SMALL_LAYER.thumbnail, }), ], }, wfst_insert_action( workspace=workspace, headers=process_client.get_authz_headers(username=username)), wfst_insert_action(headers=process_client.get_authz_headers( username=username)), wfst_insert_action( workspace=workspace, headers=process_client.get_authz_headers(username=username_2), wrong_input=True, ), wfst_insert_action( headers=process_client.get_authz_headers(username=username_2), wrong_input=True, ), wfst_insert_action( workspace=workspace, wrong_input=True, ), # Test fraud header, that it is deleted by Layman Proxy wfst_insert_action( headers={ settings.LAYMAN_GS_AUTHN_HTTP_HEADER_ATTRIBUTE: username, }, wrong_input=True, ), ], }
class TestGetPublications: workspace1 = 'test_get_publications_workspace1' workspace2 = 'test_get_publications_workspace2' authn_headers_user2 = process_client.get_authz_headers(workspace2) publication_1e_2_4x6_6 = 'test_get_publications_publication1e_2_4x6_6' publication_1e_3_3x3_3 = 'test_get_publications_publication1e_3_3x3_3' publication_1e_3_7x5_9 = 'test_get_publications_publication1e_3_7x5_9' publication_2e_3_3x5_5 = 'test_get_publications_publication2e_3_3x5_5' publication_2o_2_2x4_4 = 'test_get_publications_publication2o_2_2x4_4' publications = [ (workspace1, publication_1e_2_4x6_6, { 'title': 'Příliš jiný žluťoučký kůň úpěl ďábelské ódy (publication)', 'bbox': (2000, 4000, 6000, 6000), 'crs': crs_def.EPSG_3857, }), (workspace1, publication_1e_3_3x3_3, { 'title': 'Jednobodová publikace (publication)', 'bbox': (3000, 3000, 3000, 3000), 'crs': crs_def.EPSG_3857, }), (workspace1, publication_1e_3_7x5_9, { 'title': 'Public publication in public workspace (publication)', 'bbox': (3000, 7000, 5000, 9000), 'crs': crs_def.EPSG_3857, }), ( workspace2, publication_2e_3_3x5_5, { 'title': '\'Too yellow horse\' means "Příliš žluťoučký kůň". (publication)', 'bbox': (3000, 3000, 5000, 5000), 'crs': crs_def.EPSG_3857, 'access_rights': { 'read': {settings.RIGHTS_EVERYONE_ROLE}, 'write': {settings.RIGHTS_EVERYONE_ROLE} }, 'actor': workspace2, }, ), ( workspace2, publication_2o_2_2x4_4, { 'title': 'Příliš jiný žluťoučký kůň úpěl ďábelské ódy (publication)', 'actor': workspace2, 'access_rights': { 'read': {workspace2}, 'write': {workspace2} }, 'bbox': (2000, 2000, 4000, 4000), 'crs': crs_def.EPSG_3857, }, ), ] @pytest.fixture(scope="class") def provide_data(self): process_client.ensure_reserved_username(self.workspace2, self.authn_headers_user2) prime_db_schema_client.ensure_workspace(self.workspace1) for publ_type in process_client.PUBLICATION_TYPES: for workspace, publ_name, publ_params in self.publications: prime_db_schema_client.post_workspace_publication( publ_type, workspace, publ_name, **publ_params) yield prime_db_schema_client.clear_workspaces( [self.workspace1, self.workspace2]) @staticmethod def assert_response(response, expected_publications, expected_headers): infos = response.json() info_publications = [(info['workspace'], info['name']) for info in infos] assert set(expected_publications) == set(info_publications) assert expected_publications == info_publications for header, value in expected_headers.items(): assert header in response.headers, response.headers assert value == response.headers[header], response.headers @staticmethod @pytest.mark.parametrize( 'headers, query_params, expected_publications, expected_headers', [ ( authn_headers_user2, {}, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( None, {}, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '4', 'Content-Range': 'items 1-4/4' }, ), ( authn_headers_user2, { 'full_text_filter': 'kůň' }, [ (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( None, { 'full_text_filter': 'The Fačřš_tÚŮTŤsa " a34432[;] ;.\\Ra\'\'ts' }, list(), { 'X-Total-Count': '0', 'Content-Range': 'items 0-0/0' }, ), ( authn_headers_user2, { 'full_text_filter': '\'Too yellow horse\' means "Příliš žluťoučký kůň".' }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( authn_headers_user2, { 'full_text_filter': 'mean' }, [ (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1' }, ), ( authn_headers_user2, { 'full_text_filter': 'jiný další kůň' }, [ (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( authn_headers_user2, { 'full_text_filter': 'jiný další kůň', 'order_by': 'full_text' }, [ (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( authn_headers_user2, { 'full_text_filter': 'workspace publication' }, [ (workspace1, publication_1e_3_7x5_9), (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'full_text_filter': 'TOUCK' }, [ (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( authn_headers_user2, { 'order_by': 'title' }, [ (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace1, publication_1e_3_7x5_9), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'order_by': 'last_change' }, [ (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_3_7x5_9), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'order_by_list': ['bbox'], 'ordering_bbox': ','.join(str(c) for c in (2999, 2999, 5001, 5001)) }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'order_by_list': ['bbox'], 'ordering_bbox': ','.join( str(c) for c in ( 0.0269405, 0.0269405, 0.0449247, 0.0449247)), # EPSG:3857 (2999, 2999, 5001, 5001) 'ordering_bbox_crs': 'EPSG:4326', }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'order_by_list': ['bbox'], 'ordering_bbox': ','.join(str(c) for c in (3001, 3001, 3001, 3001)) }, [ (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'order_by_list': ['bbox'], 'ordering_bbox': ','.join(str(c) for c in (3001, 3001, 3001, 3001)), 'ordering_bbox_crs': 'EPSG:3857', }, [ (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-5/5' }, ), ( authn_headers_user2, { 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)) }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3' }, ), ( authn_headers_user2, { 'bbox_filter': ','.join(str(c) for c in (4001, 4001, 4001, 4001)) }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '2', 'Content-Range': 'items 1-2/2' }, ), ( authn_headers_user2, { 'limit': 2 }, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), # (workspace2, publication_2e_3_3x5_5), # (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 1-2/5' }, ), ( authn_headers_user2, { 'offset': 1 }, [ # (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 2-5/5' }, ), ( authn_headers_user2, { 'limit': 1, 'offset': 1 }, [ # (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), # (workspace2, publication_2e_3_3x5_5), # (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 2-2/5' }, ), ( authn_headers_user2, { 'limit': 0, 'offset': 0 }, [], { 'X-Total-Count': '5', 'Content-Range': 'items 0-0/5' }, ), ( authn_headers_user2, { 'limit': 6, 'offset': 3 }, [ # (workspace1, publication_1e_2_4x6_6), # (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '5', 'Content-Range': 'items 4-5/5' }, ), ( authn_headers_user2, { 'order_by': 'title', 'full_text_filter': 'ódy', 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)), 'limit': 1, }, [ (workspace1, publication_1e_2_4x6_6), # (workspace2, publication_2o_2_2x4_4), limit ], { 'X-Total-Count': '2', 'Content-Range': 'items 1-1/2' }, ), ( authn_headers_user2, { 'order_by': 'title', 'full_text_filter': 'ódy', 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)), 'offset': 1, }, [ # (workspace1, publication_1e_2_4x6_6), offset (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2' }, ), ( authn_headers_user2, { 'order_by': 'bbox', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2e_3_3x5_5), # (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3' }, ), ( authn_headers_user2, { 'full_text_filter': 'prilis yellow', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'bbox_filter_crs': crs_def.EPSG_3857, 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), # (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3' }, ), ( authn_headers_user2, { 'order_by': 'title', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), # (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3' }, ), ( authn_headers_user2, { 'order_by': 'title', 'full_text_filter': 'prilis', 'bbox_filter': ','.join( str(c) for c in ( 0.0179663, 0.0179663, 0.0538989, 0.0538989)), # EPSG:3857 (2000, 2000, 6000, 6000) 'bbox_filter_crs': crs_def.EPSG_4326, 'offset': 1, 'limit': 1, }, [ # (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), # (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3' }, ), ( authn_headers_user2, { 'order_by': 'bbox', 'bbox_filter': ','.join( str(c) for c in ( 0.0179663, 0.0179663, 0.0538989, 0.0538989)), # EPSG:3857 (2000, 2000, 6000, 6000) 'bbox_filter_crs': crs_def.EPSG_4326, 'ordering_bbox': ','.join(str(c) for c in (2999, 2999, 5001, 5001)), 'ordering_bbox_crs': crs_def.EPSG_3857, }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace1, publication_1e_3_3x3_3), ], { 'X-Total-Count': '4', 'Content-Range': 'items 1-4/4' }, ), ( authn_headers_user2, { 'order_by': 'bbox', 'bbox_filter': ','.join( str(c) for c in ( 0.0179663, 0.0179663, 0.0538989, 0.0538989)), # EPSG:3857 (2000, 2000, 6000, 6000) 'bbox_filter_crs': crs_def.EPSG_4326, 'ordering_bbox': ','.join( str(c) for c in ( 0.0269405, 0.0269405, 0.0449247, 0.0449247)), # EPSG:3857 (2999, 2999, 5001, 5001) }, [ (workspace2, publication_2e_3_3x5_5), (workspace1, publication_1e_2_4x6_6), (workspace2, publication_2o_2_2x4_4), (workspace1, publication_1e_3_3x3_3), ], { 'X-Total-Count': '4', 'Content-Range': 'items 1-4/4' }, ), ( authn_headers_user2, { 'order_by': 'last_change', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), # (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3' }, ), ]) @pytest.mark.parametrize('publication_type', process_client.PUBLICATION_TYPES) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'provide_data') def test_get_publications( publication_type, headers, query_params, expected_publications, expected_headers, ): response = process_client.get_publications_response( publication_type, headers=headers, query_params=query_params) TestGetPublications.assert_response(response, expected_publications, expected_headers) @staticmethod @pytest.mark.parametrize( 'workspace, headers, query_params, expected_publications, expected_headers', [ ( workspace1, authn_headers_user2, {}, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, {}, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'full_text_filter': 'kůň' }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'full_text_filter': 'The Fačřš_tÚŮTŤsa " a34432[;] ;.\\Ra\'\'ts' }, [], { 'X-Total-Count': '0', 'Content-Range': 'items 0-0/0', }, ), ( workspace1, None, { 'full_text_filter': '\'Too yellow horse\' means "Příliš žluťoučký kůň".' }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'full_text_filter': 'mean' }, [], { 'X-Total-Count': '0', 'Content-Range': 'items 0-0/0', }, ), ( workspace1, None, { 'full_text_filter': 'jiný další kůň' }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'full_text_filter': 'jiný další kůň', 'order_by': 'full_text' }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'full_text_filter': 'workspace publication' }, [ (workspace1, publication_1e_3_7x5_9), (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'order_by': 'title' }, [ (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'order_by': 'last_change' }, [ (workspace1, publication_1e_3_7x5_9), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'order_by_list': ['bbox'], 'ordering_bbox': ','.join( str(c) for c in ( 0.0269405, 0.0269405, 0.0449247, 0.0449247)), # EPSG:3857 (2999, 2999, 5001, 5001) 'ordering_bbox_crs': 'EPSG:4326' }, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'order_by_list': ['bbox'], 'ordering_bbox': ','.join(str(c) for c in (3001, 3001, 3001, 3001)), 'ordering_bbox_crs': 'EPSG:3857' }, [ (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-3/3', }, ), ( workspace1, None, { 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)) }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'bbox_filter': ','.join(str(c) for c in (4001, 4001, 4001, 4001)) }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'limit': 1 }, [ (workspace1, publication_1e_2_4x6_6), # (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 1-1/3', }, ), ( workspace1, None, { 'offset': 1 }, [ # (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-3/3', }, ), ( workspace1, None, { 'limit': 1, 'offset': 1 }, [ # (workspace1, publication_1e_2_4x6_6), (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 2-2/3', }, ), ( workspace1, None, { 'limit': 0, 'offset': 0 }, [ # (workspace1, publication_1e_2_4x6_6), # (workspace1, publication_1e_3_3x3_3), # (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 0-0/3', }, ), ( workspace1, None, { 'limit': 6, 'offset': 2 }, [ # (workspace1, publication_1e_2_4x6_6), # (workspace1, publication_1e_3_3x3_3), (workspace1, publication_1e_3_7x5_9), ], { 'X-Total-Count': '3', 'Content-Range': 'items 3-3/3', }, ), ( workspace1, None, { 'order_by': 'title', 'full_text_filter': 'ódy', 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)), 'limit': 1, }, [ (workspace1, publication_1e_2_4x6_6), ], { 'X-Total-Count': '1', 'Content-Range': 'items 1-1/1', }, ), ( workspace1, None, { 'order_by': 'title', 'full_text_filter': 'ódy', 'bbox_filter': ','.join(str(c) for c in (3001, 3001, 4999, 4999)), 'offset': 1, }, [ # (workspace1, publication_1e_2_4x6_6), offset ], { 'X-Total-Count': '1', 'Content-Range': 'items 0-0/1', }, ), ( workspace2, authn_headers_user2, { 'order_by': 'bbox', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2', }, ), ( workspace2, authn_headers_user2, { 'full_text_filter': 'prilis yellow', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2e_3_3x5_5), (workspace2, publication_2o_2_2x4_4), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2', }, ), ( workspace2, authn_headers_user2, { 'order_by': 'title', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'bbox_filter_crs': crs_def.EPSG_3857, 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2', }, ), ( workspace2, authn_headers_user2, { 'order_by': 'title', 'full_text_filter': 'prilis', 'bbox_filter': ','.join( str(c) for c in (0.0179663, 0.0179663, 0.0538989, 0.0538989)), # EPSG:3857 (2000, 2000, 6000, 6000) 'bbox_filter_crs': crs_def.EPSG_4326, 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2', }, ), ( workspace2, authn_headers_user2, { 'order_by': 'last_change', 'full_text_filter': 'prilis', 'bbox_filter': ','.join(str(c) for c in (2000, 2000, 6000, 6000)), 'offset': 1, 'limit': 1, }, [ # (workspace2, publication_2o_2_2x4_4), (workspace2, publication_2e_3_3x5_5), ], { 'X-Total-Count': '2', 'Content-Range': 'items 2-2/2', }, ), ]) @pytest.mark.parametrize('publication_type', process_client.PUBLICATION_TYPES) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'provide_data') def test_get_workspace_publications( publication_type, workspace, headers, query_params, expected_publications, expected_headers, ): response = process_client.get_workspace_publications_response( publication_type, workspace, headers=headers, query_params=query_params) TestGetPublications.assert_response(response, expected_publications, expected_headers)
class TestDeletePublicationsClass: owner = 'test_delete_publications_owner' deleter = 'test_delete_publications_deleter' authn_headers_owner = process_client.get_authz_headers(owner) authn_headers_deleter = process_client.get_authz_headers(deleter) @pytest.fixture(scope="class") def provide_data(self): process_client.ensure_reserved_username(self.owner, self.authn_headers_owner) process_client.ensure_reserved_username(self.deleter, self.authn_headers_deleter) yield @pytest.mark.parametrize('publ_type', process_client.PUBLICATION_TYPES) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'provide_data') def test_delete_publications(self, publ_type): def check_delete(headers, after_delete_publications, remaining_publications): delete_json = process_client.delete_workspace_publications( publ_type, owner, headers=headers) publication_set = { publication['name'] for publication in delete_json } assert after_delete_publications == publication_set get_json = process_client.get_workspace_publications( publ_type, workspace=owner, headers=authn_headers_owner) publication_set = {publication['name'] for publication in get_json} assert remaining_publications == publication_set owner = self.owner authn_headers_owner = self.authn_headers_owner authn_headers_deleter = self.authn_headers_deleter publication_a = 'test_delete_publications_publication_a' publication_b = 'test_delete_publications_publication_b' publications = [ (publication_a, { 'read': 'EVERYONE', 'write': owner }), (publication_b, { 'read': 'EVERYONE', 'write': 'EVERYONE' }), ] for (name, access_rights) in publications: process_client.publish_workspace_publication( publ_type, owner, name, access_rights=access_rights, headers=authn_headers_owner) response = process_client.get_workspace_publications( publ_type, workspace=owner, headers=authn_headers_owner) assert len(response) == len(publications) # Delete by other user with rights only for one layer check_delete(authn_headers_deleter, { publication_b, }, { publication_a, }) # Delete by owner, everything is deleted check_delete(authn_headers_owner, { publication_a, }, set())
class TestRestApiClass: layername = 'test_authorize_decorator_layer' mapname = 'test_authorize_decorator_map' username = '******' authz_headers = process_client.get_authz_headers(username) @pytest.fixture(scope="class") def provide_publications(self): username = self.username authz_headers = self.authz_headers layername = self.layername mapname = self.mapname process_client.ensure_reserved_username(username, headers=authz_headers) process_client.publish_workspace_layer(username, layername, headers=authz_headers) process_client.publish_workspace_map(username, mapname, headers=authz_headers) yield process_client.delete_workspace_layer(username, layername, headers=authz_headers) process_client.delete_workspace_map(username, mapname, headers=authz_headers) @staticmethod def assert_response(response, exp_status_code, exp_data): assert response.status_code == exp_status_code, response.text if exp_status_code == 200 and exp_data is not None: resp_json = response.json() if callable(exp_data): assert exp_data(resp_json), f"resp_json={resp_json}, exp_data={exp_data}" else: assert resp_json == exp_data elif exp_status_code != 200 and exp_data is not None: resp_json = response.json() assert resp_json['code'] == exp_data, f"resp_json={resp_json}, exp_data={exp_data}" @staticmethod def has_single_layer(r_json): return {li['name'] for li in r_json} == {TestRestApiClass.layername} @staticmethod def has_single_map(r_json): return {li['name'] for li in r_json} == {TestRestApiClass.mapname} @staticmethod def has_no_publication(r_json): return {li['name'] for li in r_json} == set() @pytest.mark.parametrize( "rest_action, url_for_params, authz_status_code, authz_response, unauthz_status_code, unauthz_response", [ ('rest_workspace_layers.get', {}, 200, has_single_layer.__func__, 200, has_no_publication.__func__), ('rest_workspace_layer.get', {'layername': layername}, 200, None, 404, 15), ('rest_workspace_layer_metadata_comparison.get', {'layername': layername}, 200, None, 404, 15), ('rest_workspace_layer_style.get', {'layername': layername}, 200, None, 404, 15), ('rest_workspace_layer_thumbnail.get', {'layername': layername}, 200, None, 404, 15), ('rest_workspace_layer_chunk.get', {'layername': layername}, 400, 20, 404, 15), ('rest_workspace_maps.get', {}, 200, has_single_map.__func__, 200, has_no_publication.__func__), ('rest_workspace_map.get', {'mapname': mapname}, 200, None, 404, 26), ('rest_workspace_map_file.get', {'mapname': mapname}, 200, None, 404, 26), ('rest_workspace_map_metadata_comparison.get', {'mapname': mapname}, 200, None, 404, 26), ('rest_workspace_map_thumbnail.get', {'mapname': mapname}, 200, None, 404, 26), ], ) @pytest.mark.usefixtures('liferay_mock', 'ensure_layman', 'provide_publications') def test_authorize_publications_decorator_on_rest_api( self, rest_action, url_for_params, authz_status_code, authz_response, unauthz_status_code, unauthz_response, ): username = self.username authz_headers = self.authz_headers patch_method = None publ_name = None if '_layer' in rest_action: patch_method = process_client.patch_workspace_layer publ_name = self.layername elif '_map' in rest_action: patch_method = process_client.patch_workspace_map publ_name = self.mapname assert publ_name url_for_params['workspace'] = username with app.app_context(): rest_url = url_for(rest_action, **url_for_params) patch_method(username, publ_name, headers=authz_headers, access_rights={ 'read': username, 'write': username, }) response = requests.get(rest_url, headers=authz_headers) self.assert_response(response, authz_status_code, authz_response) response = requests.get(rest_url) self.assert_response(response, unauthz_status_code, unauthz_response) patch_method(username, publ_name, headers=authz_headers, access_rights={ 'read': settings.RIGHTS_EVERYONE_ROLE, 'write': settings.RIGHTS_EVERYONE_ROLE, }) response = requests.get(rest_url, headers=authz_headers) self.assert_response(response, authz_status_code, authz_response) response = requests.get(rest_url) self.assert_response(response, authz_status_code, authz_response)