def setUpClass(cls): super().setUpClass() cls.user_admin = get_user_model().objects.get(username="******") admin, _ = get_user_model().objects.get_or_create(username="******") if check_ogc_backend(geoserver.BACKEND_PACKAGE): cls.dataset_coast_line = create_single_dataset('san_andres_y_providencia_coastline') cls.dataset_highway = create_single_dataset('san_andres_y_providencia_highway') # create a map from loaded layers cls.map_composition = Map() admin_user = get_user_model().objects.get(username="******") cls.map_composition.create_from_dataset_list( admin_user, [cls.dataset_coast_line, cls.dataset_highway], "composition", "abstract" ) # update MapLayers to correctly show layers' location with DisableDjangoSignals(): for maplayer in cls.map_composition.datasets: if maplayer.name in [cls.dataset_coast_line.alternate, cls.dataset_highway.alternate]: maplayer.local = True maplayer.save(force_update=True) maplayer.refresh_from_db() cls.map_composition.refresh_from_db()
def setUpClass(cls): super().setUpClass() cls.user_admin = get_user_model().objects.get(username="******") admin, _ = get_user_model().objects.get_or_create(username="******") if check_ogc_backend(geoserver.BACKEND_PACKAGE): cls.dataset_coast_line = create_single_dataset( 'san_andres_y_providencia_coastline') cls.dataset_highway = create_single_dataset( 'san_andres_y_providencia_highway') # create a map from loaded layers admin_user = get_user_model().objects.get(username="******") cls.map_composition = Map.objects.create( title="composition", abstract="abstract", owner=admin_user, ) cls.map_composition.id MapLayer.objects.create( map=cls.map_composition, extra_params={}, name="geonode:san_andres_y_providencia_coastline", store=None, current_style=None, ows_url=None, local=True) MapLayer.objects.create( map=cls.map_composition, extra_params={}, name="geonode:san_andres_y_providencia_highway", store=None, current_style=None, ows_url=None, local=True) # update MapLayers to correctly show layers' location with DisableDjangoSignals(): for maplayer in cls.map_composition.maplayers.iterator(): if maplayer.name in [ cls.dataset_coast_line.alternate, cls.dataset_highway.alternate ]: maplayer.local = True maplayer.save(force_update=True) maplayer.refresh_from_db() cls.map_composition.refresh_from_db()
def test_remove_permissions(self): with self.settings(DEFAULT_ANONYMOUS_VIEW_PERMISSION=True): dt = create_single_dataset("test_dataset") map = create_single_map("test_exec_map") self.assertFalse(self.rm.remove_permissions("invalid", instance=None)) self.assertTrue(self.rm.remove_permissions(map.uuid, instance=map)) self.assertTrue(self.rm.remove_permissions(dt.uuid, instance=dt))
def test_set_permissions(self): norman = get_user_model().objects.get(username="******") anonymous = get_user_model().objects.get(username="******") dt = create_single_dataset("test_perms_dataset") public_group, _public_created = GroupProfile.objects.get_or_create( slug='public_group', title='public_group', access='public') private_group, _private_created = GroupProfile.objects.get_or_create( slug='private_group', title='private_group', access='private') perm_spec = { "users": { "AnonymousUser": ['change_dataset_style', 'view_resourcebase'], "norman": ['view_resourcebase', 'change_dataset_style'], }, "groups": { "public_group": ['view_resourcebase'], "private_group": ['view_resourcebase', 'change_resourcebase'] } } self.assertTrue(self.rm.set_permissions(dt.uuid, instance=dt, permissions=perm_spec)) self.assertFalse(self.rm.set_permissions("invalid_uuid", instance=None, permissions=perm_spec)) # Test permissions assigned self.assertTrue(norman.has_perm('change_dataset_style', dt)) self.assertFalse(norman.has_perm('change_resourcebase', dt)) # Test with no specified permissions with patch('geonode.security.utils.skip_registered_members_common_group') as mock_v: mock_v.return_value = True with self.settings(DEFAULT_ANONYMOUS_DOWNLOAD_PERMISSION=False, DEFAULT_ANONYMOUS_VIEW_PERMISSION=False): self.assertTrue(self.rm.remove_permissions(dt.uuid, instance=dt)) self.assertFalse(anonymous.has_perm('view_resourcebase', dt)) self.assertFalse(anonymous.has_perm('download_resourcebase', dt))
def setUp(self): self.keyword = [ { "keywords": ["features", "test_dataset"], "thesaurus": {"date": None, "datetype": None, "title": None}, "type": "theme", }, { "keywords": ["no conditions to access and use"], "thesaurus": { "date": "2020-10-30T16:58:34", "datetype": "publication", "title": "Test for ordering", }, "type": None, }, { "keywords": ["ad", "af"], "thesaurus": { "date": "2008-06-01", "datetype": "publication", "title": "GEMET - INSPIRE themes, version 1.0", }, "type": None, }, {"keywords": ["Global"], "thesaurus": {"date": None, "datetype": None, "title": None}, "type": "place"}, ] self.dataset = create_single_dataset('keyword-handler') self.sut = KeywordHandler( instance=self.dataset, keywords=self.keyword )
def test_delete(self): doc = create_single_doc("test_delete_doc") dt = create_single_dataset("test_delete_dataset") map = create_single_map("test_delete_dataset") Service.objects.create( base_url="http://fake_test", owner=self.user) # Add dataset to a map MapLayer.objects.create(map=map, name=dt.alternate, stack_order=1).save() # Create the rating for dataset OverallRating.objects.create( category=2, object_id=dt.id, content_type=ContentType.objects.get(model='dataset'), rating=3) create_dataset_data(dt.resourcebase_ptr_id) res = self.rm.delete(doc.uuid, instance=doc) self.assertTrue(res) # Before dataset delete self.assertEqual(MapLayer.objects.filter(name='geonode:test_delete_dataset').count(), 1) self.assertEqual(OverallRating.objects.filter(object_id=dt.id).count(), 1) # try deleting with no default style, havest_job withalternate as resource_id, with uploads # TODO res = self.rm.delete(dt.uuid, instance=dt) self.assertTrue(res) # After dataset delete self.assertEqual(MapLayer.objects.filter(name='geonode:test_delete_dataset').count(), 0) self.assertEqual(OverallRating.objects.filter(object_id=dt.id).count(), 0)
def setUpClass(cls): super().setUpClass() create_models(type=cls.get_type, integration=cls.get_integration) all_public() cls.user_admin = get_user_model().objects.get(username="******") if check_ogc_backend(geoserver.BACKEND_PACKAGE): cls.dataset_coast_line = create_single_dataset('san_andres_y_providencia_coastline')
def setUp(self): self.resource = create_single_dataset('foo_dataset') r = RequestFactory() self.url = urljoin(settings.SITEURL, reverse("download", args={self.resource.id})) r.get(self.url) admin = get_user_model().objects.get(username='******') r.user = admin self.context = {'request': r}
def test_replace(self, mock_validator): dt = create_single_dataset("test_replace_dataset") mock_validator.return_value = True self.rm.replace(dt, vals={"name": "new_name_test_replace_dataset"}) self.assertEqual(dt.name, "new_name_test_replace_dataset") # test with failing validator mock_validator.return_value = False self.rm.replace(dt, vals={"name": "new_name2"}) self.assertEqual(dt.name, "new_name_test_replace_dataset")
def test_set_workflow_permissions(self): dt = create_single_dataset("test_workflow_dataset") self.assertFalse( self.rm.set_workflow_permissions('invalid_uuid', instance=None)) self.assertTrue( self.rm.set_workflow_permissions(dt.uuid, instance=dt, approved=True, published=True))
def test_update(self): dt = create_single_dataset("test_update_dataset") vals = { "name": "new_name_test_update_dataset" } res = self.rm.update(dt.uuid, vals=vals, keywords=["testing"], regions=["not_known", "Africa"]) self.assertIn("Africa", res.regions.values_list("name", flat=True)) self.assertTrue(all(x in res.keywords.values_list("name", flat=True) for x in ["testing", "not_known"])) self.assertEqual(res.name, vals["name"])
def test_given_resource_base_object_will_assign_subtype_as_content_type( self): test_user = get_user_model().objects.first() ''' If the input object is a ResourceBase, in favorite content type, should be saved he subtype content type (Doc, Dataset, Map or GeoApp) ''' create_single_dataset('foo_dataset') resource = ResourceBase.objects.get(title='foo_dataset') created_fav = Favorite.objects.create_favorite(resource, test_user) self.assertEqual('dataset', created_fav.content_type.model) ''' If the input object is a subtype, should save the relative content type ''' test_document_1 = Document.objects.first() Favorite.objects.create_favorite(test_document_1, test_user) fav = Favorite.objects.last() ct = ContentType.objects.get_for_model(test_document_1) self.assertEqual(fav.content_type, ct)
def test_revise_resource_value_in_replace_should_return_none_for_not_existing_dataset( self): layer = create_single_dataset('fake_dataset') _gs_import_session_info = self.geoserver_manager._execute_resource_import( layer, list(self.files_as_dict.values()), self.user, action_type="replace") self.assertEqual(_gs_import_session_info.import_session.state, enumerations.STATE_PENDING)
def setUp(self): self.files = os.path.join(gisdata.GOOD_DATA, "vector/san_andres_y_providencia_water.shp") self.files_as_dict, self.tmpdir = get_files(self.files) self.cat = gs_catalog self.user = get_user_model().objects.get(username="******") self.sut = create_single_dataset("san_andres_y_providencia_water.shp") self.sut.name = 'san_andres_y_providencia_water' self.sut.save() self.geoserver_url = settings.GEOSERVER_LOCATION self.geoserver_manager = GeoServerResourceManager()
def test_append(self, mock_validator): mock_validator.return_value = True dt = create_single_dataset("test_append_dataset") # Before append self.assertEqual(dt.name, "test_append_dataset") # After append self.rm.append(dt, vals={"name": "new_name_test_append_dataset"}) self.assertEqual(dt.name, "new_name_test_append_dataset") # test with failing validator mock_validator.return_value = False self.rm.append(dt, vals={"name": "new_name2"}) self.assertEqual(dt.name, "new_name_test_append_dataset")
def test_validate_resource(self): doc = create_single_doc("test_delete_doc") dt = create_single_dataset("test_delete_dataset") map = create_single_map("test_delete_dataset") with self.assertRaises(Exception): # append is for only datasets self.rm._validate_resource(doc, action_type="append") self.assertTrue(self.rm._validate_resource(doc, action_type="replace")) self.assertTrue(self.rm._validate_resource(dt, action_type="replace")) self.assertTrue(self.rm._validate_resource(map, action_type="replace")) with self.assertRaises(ObjectDoesNotExist): # TODO In function rais this only when object is not found self.rm._validate_resource(dt, action_type="invalid")
def setUp(self): self.dataset = create_single_dataset('metadata-storer') self.uuid = self.dataset.uuid self.abstract = self.dataset.abstract self.custom = { "processes": { "uuid": "abc123cfde", "abstract": "updated abstract" }, "second-stage": { "title": "Updated Title", "abstract": "another update" }, }
def test_storage_manager_replace_single_file(self, path, strg): ''' Will test that the function returns the expected result and that the StorageManager function as been called with the expected parameters ''' path.return_value = '/opt/full/path/to/file' strg.return_value = '/opt/full/path/to/file' expected = '/opt/full/path/to/file' dataset = create_single_dataset('storage_manager') dataset.files = ['/opt/full/path/to/file2'] dataset.save() with open('geonode/base/fixtures/test_sld.sld') as new_file: output = self.sut().replace(dataset, new_file) self.assertListEqual([expected], output['files'])
def test_storage_manager_replace_files_list(self): # , path, strg): ''' Will test that the function returns the expected result and that the StorageManager function as been called with the expected parameters ''' # path.return_value = '/opt/full/path/to/file' # strg.return_value = '/opt/full/path/to/file' old_files = ['/opt/full/path/to/file', '/opt/full/path/to/file'] new_files = [ os.path.join(f"{self.project_root}", "tests/data/test_sld.sld"), os.path.join(f"{self.project_root}", "tests/data/test_data.json") ] dataset = create_single_dataset('storage_manager') dataset.files = old_files dataset.save() output = self.sut().replace(dataset, new_files) self.assertEqual(2, len(output['files'])) self.assertTrue('file.sld' in output['files'][0]) self.assertTrue('file.json' in output['files'][1])
def test_set_thumbnail(self): doc = create_single_doc("test_thumb_doc") dt = create_single_dataset("test_thumb_dataset") self.assertFalse(self.rm.set_thumbnail("invalid_uuid")) self.assertTrue(self.rm.set_thumbnail(dt.uuid, instance=dt)) self.assertTrue(self.rm.set_thumbnail(doc.uuid, instance=doc))
def setUp(self): self.layer = create_single_dataset("dataset_name") self.map = create_single_map("map_name") self.doc = create_single_doc("doc_name") self.request = self.__request_factory()
def test_copy(self): dt = create_single_dataset("test_copy_dataset") # test with no reference object provided self.assertIsNone(self.rm.copy(None)) res = self.rm.copy(dt) self.assertEqual(res.perms, dt.perms)
def test_ogc_server_defaults(self): """ Tests that OGC_SERVER_SETTINGS are built if they do not exist in the settings. """ from django.urls import reverse, resolve from ..ows import _wcs_get_capabilities, _wfs_get_capabilities, _wms_get_capabilities OGC_SERVER = {'default': dict()} defaults = self.OGC_DEFAULT_SETTINGS.get('default') ogc_settings = OGC_Servers_Handler(OGC_SERVER)['default'] self.assertEqual(ogc_settings.server, defaults) self.assertEqual(ogc_settings.rest, f"{defaults['LOCATION']}rest") self.assertEqual(ogc_settings.ows, f"{defaults['LOCATION']}ows") # Make sure we get None vs a KeyError when the key does not exist self.assertIsNone(ogc_settings.SFDSDFDSF) # Testing REST endpoints route = resolve('/gs/rest/layers').route self.assertEqual(route, '^gs/rest/layers') route = resolve('/gs/rest/imports').route self.assertEqual(route, '^gs/rest/imports') route = resolve('/gs/rest/sldservice').route self.assertEqual(route, '^gs/rest/sldservice') store_resolver = resolve('/gs/rest/stores/geonode_data/') self.assertEqual(store_resolver.url_name, 'stores') self.assertEqual(store_resolver.kwargs['store_type'], 'geonode_data') self.assertEqual(store_resolver.route, '^gs/rest/stores/(?P<store_type>\\w+)/$') sld_resolver = resolve('/gs/rest/styles') self.assertIsNone(sld_resolver.url_name) self.assertTrue('workspace' not in sld_resolver.kwargs) self.assertEqual(sld_resolver.kwargs['proxy_path'], '/gs/rest/styles') self.assertEqual(sld_resolver.kwargs['downstream_path'], 'rest/styles') self.assertEqual(sld_resolver.route, '^gs/rest/styles') sld_resolver = resolve('/gs/rest/workspaces/geonode/styles') self.assertIsNone(sld_resolver.url_name) self.assertEqual(sld_resolver.kwargs['workspace'], 'geonode') self.assertEqual(sld_resolver.kwargs['proxy_path'], '/gs/rest/workspaces') self.assertEqual(sld_resolver.kwargs['downstream_path'], 'rest/workspaces') self.assertEqual(sld_resolver.route, '^gs/rest/workspaces/(?P<workspace>\\w+)') # Testing OWS endpoints wcs = _wcs_get_capabilities() logger.debug(wcs) self.assertIsNotNone(wcs) try: wcs_url = urljoin(settings.SITEURL, reverse('ows_endpoint')) except Exception: wcs_url = urljoin(ogc_settings.PUBLIC_LOCATION, 'ows') self.assertTrue(wcs.startswith(wcs_url)) self.assertIn("service=WCS", wcs) self.assertIn("request=GetCapabilities", wcs) self.assertIn("version=2.0.1", wcs) wfs = _wfs_get_capabilities() logger.debug(wfs) self.assertIsNotNone(wfs) try: wfs_url = urljoin(settings.SITEURL, reverse('ows_endpoint')) except Exception: wfs_url = urljoin(ogc_settings.PUBLIC_LOCATION, 'ows') self.assertTrue(wfs.startswith(wfs_url)) self.assertIn("service=WFS", wfs) self.assertIn("request=GetCapabilities", wfs) self.assertIn("version=1.1.0", wfs) wms = _wms_get_capabilities() logger.debug(wms) self.assertIsNotNone(wms) try: wms_url = urljoin(settings.SITEURL, reverse('ows_endpoint')) except Exception: wms_url = urljoin(ogc_settings.PUBLIC_LOCATION, 'ows') self.assertTrue(wms.startswith(wms_url)) self.assertIn("service=WMS", wms) self.assertIn("request=GetCapabilities", wms) self.assertIn("version=1.3.0", wms) # Test OWS Download Links from geonode.geoserver.ows import wcs_links, wfs_links, wms_links instance = create_single_dataset("san_andres_y_providencia_water") instance.name = 'san_andres_y_providencia_water' instance.save() bbox = instance.bbox srid = instance.srid height = 512 width = 512 # Default Style (expect exception since we are offline) style = get_sld_for(gs_catalog, instance) logger.error( f" style -------------------------------------------> {style}") if isinstance(style, str): style = gs_catalog.get_style(instance.name, workspace=instance.workspace) self.assertIsNotNone(style) self.assertFalse(isinstance(style, str)) instance.default_style, _ = Style.objects.get_or_create( name=style.name, defaults=dict(sld_title=style.sld_title, sld_body=style.sld_body)) self.assertIsNotNone(instance.default_style) self.assertIsNotNone(instance.default_style.name) # WMS Links wms_links = wms_links(f"{ogc_settings.public_url}wms?", instance.alternate, bbox, srid, height, width) self.assertIsNotNone(wms_links) self.assertEqual(len(wms_links), 3) wms_url = urljoin(ogc_settings.PUBLIC_LOCATION, 'wms') identifier = urlencode({'layers': instance.alternate}) for _link in wms_links: logger.debug(f'{wms_url} --> {_link[3]}') self.assertTrue(wms_url in _link[3]) logger.debug(f'{identifier} --> {_link[3]}') self.assertTrue(identifier in _link[3]) # WFS Links wfs_links = wfs_links(f"{ogc_settings.public_url}wfs?", instance.alternate, bbox, srid) self.assertIsNotNone(wfs_links) self.assertEqual(len(wfs_links), 6) wfs_url = urljoin(ogc_settings.PUBLIC_LOCATION, 'wfs') identifier = urlencode({'typename': instance.alternate}) for _link in wfs_links: logger.debug(f'{wfs_url} --> {_link[3]}') self.assertTrue(wfs_url in _link[3]) logger.debug(f'{identifier} --> {_link[3]}') self.assertTrue(identifier in _link[3]) # WCS Links wcs_links = wcs_links(f"{ogc_settings.public_url}wcs?", instance.alternate, bbox, srid) self.assertIsNotNone(wcs_links) self.assertEqual(len(wcs_links), 2) wcs_url = urljoin(ogc_settings.PUBLIC_LOCATION, 'wcs') identifier = urlencode( {'coverageid': instance.alternate.replace(':', '__', 1)}) for _link in wcs_links: logger.debug(f'{wcs_url} --> {_link[3]}') self.assertTrue(wcs_url in _link[3]) logger.debug(f'{identifier} --> {_link[3]}') self.assertTrue(identifier in _link[3])
def test_group_activity_pages_render(self): """ Verify Activity List pages """ self.assertTrue(self.client.login(username="******", password="******")) response = self.client.get("/groups/") self.assertEqual(200, response.status_code) response = self.client.get("/groups/group/bar/activity/") self.assertEqual(200, response.status_code) self.assertContains(response, 'Datasets', count=3, status_code=200, msg_prefix='', html=False) self.assertContains(response, 'Maps', count=3, status_code=200, msg_prefix='', html=False) self.assertContains(response, 'Documents', count=3, status_code=200, msg_prefix='', html=False) self.assertContains(response, '<a href="/datasets/:geonode:CA">CA</a>', count=0, status_code=200, msg_prefix='', html=False) self.assertContains(response, 'uploaded', count=0, status_code=200, msg_prefix='', html=False) dataset = create_single_dataset('single_point.shp') try: # Add test to test perms being sent to the front end. dataset.set_default_permissions() perms_info = dataset.get_all_level_info() # Ensure there is only one group 'anonymous' by default self.assertEqual(len(perms_info['groups'].keys()), 1) # Add the foo group to the dataset object groups dataset.set_permissions({'groups': {'bar': ['view_resourcebase']}}) perms_info = _perms_info_json(dataset) # Ensure foo is in the perms_info output self.assertCountEqual( json.loads(perms_info)['groups'], {'bar': ['view_resourcebase']}) dataset.group = self.bar.group dataset.save() response = self.client.get("/groups/group/bar/activity/") self.assertEqual(200, response.status_code) _log(response) self.assertContains( response, f'<a href="{dataset.detail_url}">geonode:single_point.shp</a>', count=2, status_code=200, msg_prefix='', html=False) self.assertContains(response, 'uploaded', count=2, status_code=200, msg_prefix='', html=False) finally: dataset.set_default_permissions() dataset.group = None dataset.save()