def filter_by_resource_ids(self, object_list, permitted_ids): """Filter Style queryset by permitted resource ids.""" if check_ogc_backend(geoserver.BACKEND_PACKAGE): return object_list.filter(layer_styles__id__in=permitted_ids) elif check_ogc_backend(qgis_server.BACKEND_PACKAGE): return object_list.filter( layer_styles__layer__id__in=permitted_ids)
def test_csw_outputschema_dc(self): """Verify that GeoNode CSW can handle ISO metadata with Dublin Core outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual(record.title, 'San Andres Y Providencia Location', 'Expected a specific title in Dublin Core model') # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.abstract, 'No abstract provided', 'Expected a specific abstract in Dublin Core model') # test for correct service link articulation for link in record.references: if check_ogc_backend(geoserver.BACKEND_PACKAGE): if link['scheme'] == 'OGC:WMS': self.assertEqual( link['url'], 'http://localhost:8000/gs/ows', 'Expected a specific OGC:WMS URL') elif link['scheme'] == 'OGC:WFS': self.assertEqual( link['url'], 'http://localhost:8000/gs/wfs', 'Expected a specific OGC:WFS URL') elif check_ogc_backend(qgis_server.BACKEND_PACKAGE): if link['scheme'] == 'OGC:WMS': self.assertEqual( link['url'], 'http://localhost:8000/qgis-server/ogc/' 'san_andres_y_providencia_location', 'Expected a specific OGC:WMS URL') elif link['scheme'] == 'OGC:WFS': self.assertEqual( link['url'], 'http://localhost:8000/qgis-server/ogc/' 'san_andres_y_providencia_location', 'Expected a specific OGC:WFS URL')
def handle(self, *args, **options): from django.conf import settings client_id = None client_secret = None if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import ogc_server_settings redirect_uris = '%s\n%s' % (ogc_server_settings.LOCATION, ogc_server_settings.public_url) if Application.objects.filter(name='GeoServer').exists(): Application.objects.filter(name='GeoServer').update(redirect_uris=redirect_uris) app = Application.objects.filter(name='GeoServer')[0] client_id = app.client_id client_secret = app.client_secret else: client_id = generate_client_id() client_secret = generate_client_secret() Application.objects.create( skip_authorization=True, redirect_uris=redirect_uris, name='GeoServer', authorization_grant_type='authorization-code', client_type='confidential', client_id=client_id, client_secret=client_secret, user=Profile.objects.filter(is_superuser=True)[0] ) return '%s,%s' % (client_id, client_secret)
def register_qgis_server_signals(): """Helper function to register model signals.""" if check_ogc_backend(qgis_server.BACKEND_PACKAGE): # Do it only if qgis_server is being used logger.debug('Register signals QGIS Server') signals.pre_save.connect( qgis_server_pre_save, dispatch_uid='Layer-qgis_server_pre_save', sender=Layer) signals.pre_delete.connect( qgis_server_pre_delete, dispatch_uid='Layer-qgis_server_pre_delete', sender=Layer) signals.post_save.connect( qgis_server_post_save, dispatch_uid='Layer-qgis_server_post_save', sender=Layer) signals.pre_save.connect( qgis_server_pre_save_maplayer, dispatch_uid='MapLayer-qgis_server_pre_save_maplayer', sender=MapLayer) signals.post_save.connect( qgis_server_post_save_map, dispatch_uid='Map-qgis_server_post_save_map', sender=Map) signals.post_delete.connect( qgis_server_layer_post_delete, dispatch_uid='QGISServerLayer-qgis_server_layer_post_delete', sender=QGISServerLayer)
def test_layer_get_detail_unauth_layer_not_public(self): """ Test that layer detail gives 401 when not public and not logged in """ layer = Layer.objects.all()[0] layer.set_permissions(self.perm_spec) self.assertHttpUnauthorized(self.api_client.get( self.list_url + str(layer.id) + '/')) self.api_client.client.login(username=self.user, password=self.passwd) resp = self.api_client.get(self.list_url + str(layer.id) + '/') self.assertValidJSONResponse(resp) # with delayed security with self.settings(DELAYED_SECURITY_SIGNALS=True): if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.security.utils import sync_geofence_with_guardian sync_geofence_with_guardian(layer, self.perm_spec) self.assertTrue(layer.dirty_state) self.client.login(username=self.user, password=self.passwd) resp = self.client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 8) self.client.logout() resp = self.client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 7) from geonode.people.models import Profile Profile.objects.create(username='******', password='******') self.client.login(username='******', password='******') resp = self.client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 7)
def test_layer_links(self): lyr = Layer.objects.filter(storeType="dataStore").first() self.assertEquals(lyr.storeType, "dataStore") if check_ogc_backend(geoserver.BACKEND_PACKAGE): links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata") self.assertIsNotNone(links) self.assertEquals(len(links), 7) for ll in links: self.assertEquals(ll.link_type, "metadata") lyr = Layer.objects.filter(storeType="coverageStore").first() self.assertEquals(lyr.storeType, "coverageStore") if check_ogc_backend(geoserver.BACKEND_PACKAGE): links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata") self.assertIsNotNone(links) self.assertEquals(len(links), 7) for ll in links: self.assertEquals(ll.link_type, "metadata")
def setUp(self): super(PermissionsTest, self).setUp() if check_ogc_backend(geoserver.BACKEND_PACKAGE): settings.OGC_SERVER['default']['GEOFENCE_SECURITY_ENABLED'] = True self.user = '******' self.passwd = 'admin' create_layer_data() self.anonymous_user = get_anonymous_user()
def test_fix_baselayers(self): """Test fix_baselayers function, used by the fix_baselayers command """ map_id = 1 map_obj = Map.objects.get(id=map_id) if check_ogc_backend(geoserver.BACKEND_PACKAGE): # number of base layers (we remove the local geoserver entry from the total) n_baselayers = len(settings.MAP_BASELAYERS) - 1 elif check_ogc_backend(qgis_server.BACKEND_PACKAGE): # QGIS Server backend already excluded local geoserver entry n_baselayers = len(settings.MAP_BASELAYERS) # number of local layers n_locallayers = map_obj.layer_set.filter(local=True).count() fix_baselayers(map_id) self.assertEquals(map_obj.layer_set.all().count(), n_baselayers + n_locallayers)
def test_importlayer_mgmt_command(self): """Test layer import management command """ vector_file = os.path.join( gisdata.VECTOR_DATA, 'san_andres_y_providencia_administrative.shp') call_command('importlayers', vector_file, overwrite=True, keywords="test, import, san andreas", title="Test San Andres y Providencia Administrative", verbosity=1) lyr = Layer.objects.get(title='Test San Andres y Providencia Administrative') try: self.assertIsNotNone(lyr) self.assertEqual(lyr.name, "test_san_andres_y_providencia_administrative") self.assertEqual(lyr.title, "Test San Andres y Providencia Administrative") default_keywords = [ u'import', u'san andreas', u'test', ] if check_ogc_backend(geoserver.BACKEND_PACKAGE): geoserver_keywords = [ u'features', u'test_san_andres_y_providencia_administrative' ] self.assertEqual( set(lyr.keyword_list()), set(default_keywords + geoserver_keywords)) elif check_ogc_backend(qgis_server.BACKEND_PACKAGE): self.assertEqual( set(lyr.keyword_list()), set(default_keywords)) finally: # Clean up and completely delete the layer lyr.delete()
def test_anonymus_permissions(self): # grab a layer layer = Layer.objects.all()[0] layer.set_default_permissions() # 1. view_resourcebase # 1.1 has view_resourcebase: verify that anonymous user can access # the layer detail page self.assertTrue( self.anonymous_user.has_perm( 'view_resourcebase', layer.get_self_resource())) response = self.client.get(reverse('layer_detail', args=(layer.alternate,))) self.assertEquals(response.status_code, 200) # 1.2 has not view_resourcebase: verify that anonymous user can not # access the layer detail page remove_perm('view_resourcebase', self.anonymous_user, layer.get_self_resource()) anonymous_group = Group.objects.get(name='anonymous') remove_perm('view_resourcebase', anonymous_group, layer.get_self_resource()) response = self.client.get(reverse('layer_detail', args=(layer.alternate,))) self.assertEquals(response.status_code, 302) # 2. change_resourcebase # 2.1 has not change_resourcebase: verify that anonymous user cannot # access the layer replace page but redirected to login response = self.client.get(reverse('layer_replace', args=(layer.alternate,))) self.assertEquals(response.status_code, 302) # 3. delete_resourcebase # 3.1 has not delete_resourcebase: verify that anonymous user cannot # access the layer delete page but redirected to login response = self.client.get(reverse('layer_remove', args=(layer.alternate,))) self.assertEquals(response.status_code, 302) # 4. change_resourcebase_metadata # 4.1 has not change_resourcebase_metadata: verify that anonymous user # cannot access the layer metadata page but redirected to login response = self.client.get(reverse('layer_metadata', args=(layer.alternate,))) self.assertEquals(response.status_code, 302) # 5 N\A? 6 is an integration test... # 7. change_layer_style # 7.1 has not change_layer_style: verify that anonymous user cannot access # the layer style page but redirected to login if check_ogc_backend(geoserver.BACKEND_PACKAGE): # Only for geoserver backend response = self.client.get(reverse('layer_style_manage', args=(layer.alternate,))) self.assertEquals(response.status_code, 302)
def setUp(self): super(BulkPermissionsTests, self).setUp() if check_ogc_backend(geoserver.BACKEND_PACKAGE): settings.OGC_SERVER['default']['GEOFENCE_SECURITY_ENABLED'] = True self.user = '******' self.passwd = 'admin' self.list_url = reverse( 'api_dispatch_list', kwargs={ 'api_name': 'api', 'resource_name': 'layers'}) self.bulk_perms_url = reverse('bulk_permissions') all_public() self.perm_spec = { "users": {"admin": ["view_resourcebase"]}, "groups": {}}
def publish_layer_group(self): """ Publishes local map layers as WMS layer group on local OWS. """ if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import gs_catalog from geoserver.layergroup import UnsavedLayerGroup as GsUnsavedLayerGroup else: raise Exception( 'Cannot publish layer group if geonode.geoserver is not in INSTALLED_APPS') # temporary permission workaround: # only allow public maps to be published if not self.is_public: return 'Only public maps can be saved as layer group.' map_layers = MapLayer.objects.filter(map=self.id) # Local Group Layer layers and corresponding styles layers = [] lg_styles = [] for ml in map_layers: if ml.local: layer = Layer.objects.get(alternate=ml.name) style = ml.styles or getattr(layer.default_style, 'name', '') layers.append(layer) lg_styles.append(style) lg_layers = [l.name for l in layers] # Group layer bounds and name lg_bounds = [str(coord) for coord in self.bbox] lg_name = '%s_%d' % (slugify(self.title), self.id) # Update existing or add new group layer lg = self.layer_group if lg is None: lg = GsUnsavedLayerGroup( gs_catalog, lg_name, lg_layers, lg_styles, lg_bounds) else: lg.layers, lg.styles, lg.bounds = lg_layers, lg_styles, lg_bounds gs_catalog.save(lg) return lg_name
def layer_group(self): """ Returns layer group name from local OWS for this map instance. """ if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import gs_catalog, ogc_server_settings lg_name = '%s_%d' % (slugify(self.title), self.id) try: return { 'catalog': gs_catalog.get_layergroup(lg_name), 'ows': ogc_server_settings.ows } except BaseException: return { 'catalog': None, 'ows': ogc_server_settings.ows } else: return None
def populate_object(self, obj): """Populate results with necessary fields :param obj: Layer obj :type obj: Layer :return: """ if check_ogc_backend(qgis_server.BACKEND_PACKAGE): # Provides custom links for QGIS Server styles info # Default style try: obj.qgis_default_style = obj.qgis_layer.default_style except BaseException: obj.qgis_default_style = None # Styles try: obj.qgis_styles = obj.qgis_layer.styles except BaseException: obj.qgis_styles = [] return obj
def test_csw_outputschema_iso(self): """Verify that GeoNode CSW can handle ISO metadata with ISO outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.isotc211.org/2005/gmd', esn='full') record = list(csw.catalogue.records.values())[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual(record.identification.title, "san_andres_y_providencia_location.shp") # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.identification.abstract, 'No abstract provided') # test BBOX properties in Dublin Core from decimal import Decimal self.assertAlmostEqual(Decimal(record.identification.bbox.minx), Decimal('-81.8593555'), places=3) self.assertAlmostEqual(Decimal(record.identification.bbox.miny), Decimal('12.1665322'), places=3) self.assertAlmostEqual(Decimal(record.identification.bbox.maxx), Decimal('-81.356409'), places=3) self.assertAlmostEqual(Decimal(record.identification.bbox.maxy), Decimal('13.396306'), places=3) # test for correct link articulation for link in record.distribution.online: if check_ogc_backend(geoserver.BACKEND_PACKAGE): if link.protocol == 'OGC:WMS': self.assertEqual( link.url, f'{settings.GEOSERVER_PUBLIC_LOCATION}ows', 'Expected a specific OGC:WMS URL') elif link.protocol == 'OGC:WFS': self.assertEqual( link.url, f'{settings.GEOSERVER_PUBLIC_LOCATION}ows', 'Expected a specific OGC:WFS URL')
class GeoNodeBaseTestSupport(TestCase): type = None obj_ids = [] if check_ogc_backend(geoserver.BACKEND_PACKAGE): fixtures = [ 'initial_data.json', 'group_test_data.json', 'default_oauth_apps.json' ] else: fixtures = [ 'initial_data.json', 'group_test_data.json' ] @classproperty def get_type(cls): return cls.type @classproperty def get_obj_ids(cls): return cls.obj_ids def setUp(self): super(GeoNodeBaseTestSupport, self).setUp() logging.info(" Test setUp. Creating models.") self.get_obj_ids = create_models(type=self.get_type) def tearDown(self): super(GeoNodeBaseTestSupport, self).tearDown() logging.info(" Test tearDown. Destroying models / Cleaning up Server.") remove_models(self.get_obj_ids, type=self.get_type) from django.conf import settings if settings.OGC_SERVER['default'].get( "GEOFENCE_SECURITY_ENABLED", False): from geonode.security.utils import purge_geofence_all purge_geofence_all()
def test_new_user_has_access_to_old_layers(self): """Test that a new user can access the public available layers""" from django.contrib.auth import get_user_model get_user_model().objects.create( username='******', password='******') self.api_client.client.login(username='******', password='******') resp = self.api_client.get(self.list_url) self.assertValidJSONResponse(resp) self.assertEqual(len(self.deserialize(resp)['objects']), 0) # with delayed security if check_ogc_backend(geoserver.BACKEND_PACKAGE): _ogc_geofence_enabled = settings.OGC_SERVER try: _ogc_geofence_enabled['default'][ 'GEOFENCE_SECURITY_ENABLED'] = True with self.settings(DELAYED_SECURITY_SIGNALS=True, OGC_SERVER=_ogc_geofence_enabled, DEFAULT_ANONYMOUS_VIEW_PERMISSION=True): layer = Layer.objects.all()[0] layer.set_default_permissions() layer.refresh_from_db() self.assertTrue(layer.dirty_state) self.client.login(username=self.user, password=self.passwd) resp = self.client.get(self.list_url) self.assertEqual(len(self.deserialize(resp)['objects']), 8) self.client.logout() resp = self.client.get(self.list_url) self.assertEqual(len(self.deserialize(resp)['objects']), 0) self.client.login(username='******', password='******') resp = self.client.get(self.list_url) self.assertEqual(len(self.deserialize(resp)['objects']), 0) finally: _ogc_geofence_enabled['default'][ 'GEOFENCE_SECURITY_ENABLED'] = False
def handle(self, *args, **options): force_exec = options.get('force_exec') target_address = options.get('target_address') from django.conf import settings client_id = None client_secret = None if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import ogc_server_settings redirect_uris = '%s\n%s\n%s' % ( ogc_server_settings.LOCATION, ogc_server_settings.public_url, "{0}/geoserver/".format(target_address)) if Application.objects.filter(name='GeoServer').exists(): Application.objects.filter(name='GeoServer').update(redirect_uris=redirect_uris) if force_exec: Application.objects.filter(name='GeoServer').update( client_id=generate_client_id(), client_secret=generate_client_secret() ) app = Application.objects.filter(name='GeoServer')[0] client_id = app.client_id client_secret = app.client_secret else: client_id = generate_client_id() client_secret = generate_client_secret() Application.objects.create( skip_authorization=True, redirect_uris=redirect_uris, name='GeoServer', authorization_grant_type='authorization-code', client_type='confidential', client_id=client_id, client_secret=client_secret, user=get_user_model().objects.filter(is_superuser=True)[0] ) return '%s,%s' % (client_id, client_secret)
def test_layer_get_detail_unauth_layer_not_public(self): """ Test that layer detail gives 404 when not public and not logged in """ layer = Layer.objects.all()[0] layer.set_permissions(self.perm_spec) layer.clear_dirty_state() self.assertHttpNotFound( self.api_client.get(f"{self.list_url + str(layer.id)}/")) self.api_client.client.login(username=self.user, password=self.passwd) resp = self.api_client.get(f"{self.list_url + str(layer.id)}/") self.assertValidJSONResponse(resp) # with delayed security with self.settings(DELAYED_SECURITY_SIGNALS=True): if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.security.utils import sync_geofence_with_guardian sync_geofence_with_guardian(layer, self.perm_spec) self.assertTrue(layer.dirty_state) self.client.login(username=self.user, password=self.passwd) resp = self.client.get(self.list_url) self.assertEqual(len(self.deserialize(resp)['objects']), 7) self.client.logout() resp = self.client.get(self.list_url) self.assertEqual(len(self.deserialize(resp)['objects']), 7) from django.contrib.auth import get_user_model get_user_model().objects.create( username='******', password='******') self.client.login(username='******', password='******') resp = self.client.get(self.list_url) self.assertEqual(len(self.deserialize(resp)['objects']), 7)
def create_maplayers(): if check_ogc_backend(geoserver.BACKEND_PACKAGE): from django.db.models import signals from geonode.geoserver.signals import geoserver_pre_save_maplayer from geonode.geoserver.signals import geoserver_post_save_map signals.pre_save.disconnect( geoserver_pre_save_maplayer, sender=MapLayer) signals.post_save.disconnect(geoserver_post_save_map, sender=Map) for ml in maplayers: MapLayer.objects.create( fixed=ml['fixed'], group=ml['group'], name=ml['name'], layer_params=ml['layer_params'], map=Map.objects.get(title=ml['map']), source_params=ml['source_params'], stack_order=ml['stack_order'], opacity=ml['opacity'], transparent=True, visibility=True )
def setUpClass(cls): super().setUpClass() cls.user_admin = get_user_model().objects.get(username="******") if check_ogc_backend(geoserver.BACKEND_PACKAGE): # upload shape files shp_file = os.path.join(gisdata.VECTOR_DATA, "san_andres_y_providencia_coastline.shp") cls.layer_coast_line = file_upload(shp_file, overwrite=True, user=cls.user_admin) shp_file = os.path.join(gisdata.VECTOR_DATA, "san_andres_y_providencia_highway.shp") cls.layer_highway = file_upload(shp_file, overwrite=True, user=cls.user_admin) # create a map from loaded layers cls.map_composition = Map() admin_user = get_user_model().objects.get(username="******") cls.map_composition.create_from_layer_list( admin_user, [cls.layer_coast_line, cls.layer_highway], "composition", "abstract") # update MapLayers to correctly show layers' location with DisableDjangoSignals(): for maplayer in cls.map_composition.layers: if maplayer.name in [ cls.layer_coast_line.alternate, cls.layer_highway.alternate ]: maplayer.local = True maplayer.save(force_update=True) maplayer.refresh_from_db() cls.map_composition.refresh_from_db()
def test_new_user_has_access_to_old_layers(self): """Test that a new user can access the public available layers""" from geonode.people.models import Profile Profile.objects.create(username='******', password='******') self.api_client.client.login(username='******', password='******') resp = self.api_client.get(self.list_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 8) # with delayed security if check_ogc_backend(geoserver.BACKEND_PACKAGE): _ogc_geofence_enabled = settings.OGC_SERVER try: _ogc_geofence_enabled['default']['GEOFENCE_SECURITY_ENABLED'] = True with self.settings(DELAYED_SECURITY_SIGNALS=True, OGC_SERVER=_ogc_geofence_enabled, DEFAULT_ANONYMOUS_VIEW_PERMISSION=True): layer = Layer.objects.all()[0] layer.set_default_permissions() layer.refresh_from_db() self.assertTrue(layer.dirty_state) self.client.login(username=self.user, password=self.passwd) resp = self.client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 8) self.client.logout() resp = self.client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 7) self.client.login(username='******', password='******') resp = self.client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 7) finally: _ogc_geofence_enabled['default']['GEOFENCE_SECURITY_ENABLED'] = False
def create_thumbnail(instance, thumbnail_remote_url, thumbnail_create_url=None, check_bbox=False, ogc_client=None, overwrite=False, width=240, height=200): thumbnail_dir = os.path.join(settings.MEDIA_ROOT, 'thumbs') if not os.path.exists(thumbnail_dir): os.makedirs(thumbnail_dir) thumbnail_name = None if isinstance(instance, Layer): thumbnail_name = 'layer-%s-thumb.png' % instance.uuid elif isinstance(instance, Map): thumbnail_name = 'map-%s-thumb.png' % instance.uuid thumbnail_path = os.path.join(thumbnail_dir, thumbnail_name) if overwrite or not storage.exists(thumbnail_path): BBOX_DIFFERENCE_THRESHOLD = 1e-5 if not thumbnail_create_url: thumbnail_create_url = thumbnail_remote_url if check_bbox: # Check if the bbox is invalid valid_x = ( float( instance.bbox_x0) - float( instance.bbox_x1)) ** 2 > BBOX_DIFFERENCE_THRESHOLD valid_y = ( float( instance.bbox_y1) - float( instance.bbox_y0)) ** 2 > BBOX_DIFFERENCE_THRESHOLD else: valid_x = True valid_y = True image = None if valid_x and valid_y: Link.objects.get_or_create(resource=instance.get_self_resource(), url=thumbnail_remote_url, defaults=dict( extension='png', name="Remote Thumbnail", mime='image/png', link_type='image', ) ) ResourceBase.objects.filter(id=instance.id) \ .update(thumbnail_url=thumbnail_remote_url) # Download thumbnail and save it locally. if not ogc_client: ogc_client = http_client if ogc_client: try: params = { 'width': width, 'height': height } # Add the bbox param only if the bbox is different to [None, None, # None, None] if None not in instance.bbox: params['bbox'] = instance.bbox_string params['crs'] = instance.srid for _p in params.keys(): if _p.lower() not in thumbnail_create_url.lower(): thumbnail_create_url = thumbnail_create_url + '&%s=%s' % (_p, params[_p]) resp, image = ogc_client.request(thumbnail_create_url) if 'ServiceException' in image or \ resp.status_code < 200 or resp.status_code > 299: msg = 'Unable to obtain thumbnail: %s' % image logger.error(msg) # Replace error message with None. image = None except BaseException as e: logger.exception(e) # Replace error message with None. image = None if check_ogc_backend(geoserver.BACKEND_PACKAGE): if image is None and instance.bbox: instance_bbox = instance.bbox[0:4] request_body = { 'bbox': [str(coord) for coord in instance_bbox], 'srid': instance.srid, 'width': width, 'height': height } if thumbnail_create_url: request_body['thumbnail_create_url'] = thumbnail_create_url elif instance.alternate: request_body['layers'] = instance.alternate image = _prepare_thumbnail_body_from_opts(request_body) if image is not None: instance.save_thumbnail(thumbnail_name, image=image) else: msg = 'Unable to obtain thumbnail for: %s' % instance logger.error(msg)
def test_layer_upload_metadata(self): """Test uploading a layer with XML metadata""" thelayer = os.path.join( gisdata.PROJECT_ROOT, 'both', 'good', 'sangis.org', 'Airport', 'Air_Runways.shp') self.assertTrue('%s.xml' % thelayer, 'Expected layer XML metadata to exist') uploaded = file_upload(thelayer, overwrite=True) self.assertEqual( uploaded.title, 'Air Runways', 'Expected specific title from uploaded layer XML metadata') self.assertEqual( uploaded.abstract, 'Airport Runways', 'Expected specific abstract from uploaded layer XML metadata') self.assertEqual( uploaded.purpose, 'To show the location of Public Airports ' 'and Runways within San Diego County', 'Expected specific purpose from uploaded layer XML metadata') self.assertEqual(uploaded.supplemental_information, 'No information provided', 'Expected specific supplemental information ' 'from uploaded layer XML metadata') if check_ogc_backend(geoserver.BACKEND_PACKAGE): self.assertEqual( len(uploaded.keyword_list()), 7, 'Expected specific number of keywords from uploaded layer XML metadata') elif check_ogc_backend(qgis_server.BACKEND_PACKAGE): # QGIS Server backend doesn't have GeoServer assigned keywords. self.assertEqual( len(uploaded.keyword_list()), 5, 'Expected specific number of keywords from uploaded layer XML metadata') self.assertTrue( u'Airport,Airports,Landing Strips,Runway,Runways' in uploaded.keyword_csv, 'Expected CSV of keywords from uploaded layer XML metadata') self.assertTrue( 'Landing Strips' in uploaded.keyword_list(), 'Expected specific keyword from uploaded layer XML metadata') self.assertEqual( uploaded.constraints_other, 'None', 'Expected specific constraint from uploaded layer XML metadata') self.assertEqual( uploaded.date, datetime.datetime( 2010, 8, 3, 0, 0), 'Expected specific date from uploaded layer XML metadata') # Clean up and completely delete the layer uploaded.delete()
class LayersApiTests(APITestCase, URLPatternsTestCase): fixtures = [ 'initial_data.json', 'group_test_data.json', 'default_oauth_apps.json' ] from geonode.urls import urlpatterns if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.views import layer_acls, resolve_user urlpatterns += [ url(r'^acls/?$', layer_acls, name='layer_acls'), url(r'^acls_dep/?$', layer_acls, name='layer_acls_dep'), url(r'^resolve_user/?$', resolve_user, name='layer_resolve_user'), url(r'^resolve_user_dep/?$', resolve_user, name='layer_resolve_user_dep'), ] def setUp(self): create_models(b'document') create_models(b'map') create_models(b'layer') def test_layers(self): """ Ensure we can access the Layers list. """ url = reverse('layers-list') # Anonymous response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 8) # Pagination self.assertEqual(len(response.data['layers']), 8) logger.debug(response.data) for _l in response.data['layers']: self.assertTrue(_l['resource_type'], 'layer') def test_raw_HTML_stripped_properties(self): """ Ensure "raw_*" properties returns no HTML or carriage-return tag """ layer = Layer.objects.first() layer.abstract = "<p><em>No abstract provided</em>.</p>\r\n<p><img src=\"data:image/jpeg;base64,/9j/4AAQSkZJR/>" layer.constraints_other = "<p><span style=\"text-decoration: underline;\">None</span></p>" layer.supplemental_information = "<p>No information provided í</p> <p>£682m</p>" layer.data_quality_statement = "<p><strong>OK</strong></p>\r\n<table style=\"border-collapse: collapse; width:\ 85.2071%;\" border=\"1\">\r\n<tbody>\r\n<tr>\r\n<td style=\"width: 49.6528%;\">1</td>\r\n<td style=\"width:\ 50%;\">2</td>\r\n</tr>\r\n<tr>\r\n<td style=\"width: 49.6528%;\">a</td>\r\n<td style=\"width: 50%;\">b</td>\ \r\n</tr>\r\n</tbody>\r\n</table>" layer.save() # Admin self.assertTrue(self.client.login(username='******', password='******')) url = reverse('layers-detail', kwargs={'pk': layer.pk}) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(int(response.data['layer']['pk']), int(layer.pk)) self.assertEqual(response.data['layer']['raw_abstract'], "No abstract provided.") self.assertEqual(response.data['layer']['raw_constraints_other'], "None") self.assertEqual( response.data['layer']['raw_supplemental_information'], "No information provided í £682m") self.assertEqual(response.data['layer']['raw_data_quality_statement'], "OK 1 2 a b")
def layer_replace(request, layername, template='layers/layer_replace.html'): layer = _resolve_layer( request, layername, 'base.change_resourcebase', _PERMISSION_MSG_MODIFY) if request.method == 'GET': ctx = { 'charsets': CHARSETS, 'layer': layer, 'is_featuretype': layer.is_vector(), 'is_layer': True, } return render_to_response(template, RequestContext(request, ctx)) elif request.method == 'POST': form = LayerUploadForm(request.POST, request.FILES) tempdir = None out = {} if form.is_valid(): try: tempdir, base_file = form.write_files() if layer.is_vector() and is_raster(base_file): out['success'] = False out['errors'] = _( "You are attempting to replace a vector layer with a raster.") elif (not layer.is_vector()) and is_vector(base_file): out['success'] = False out['errors'] = _( "You are attempting to replace a raster layer with a vector.") else: if check_ogc_backend(geoserver.BACKEND_PACKAGE): # delete geoserver's store before upload cat = gs_catalog cascading_delete(cat, layer.typename) out['ogc_backend'] = geoserver.BACKEND_PACKAGE saved_layer = file_upload( base_file, name=layer.name, user=request.user, overwrite=True, charset=form.cleaned_data["charset"], ) out['success'] = True out['url'] = reverse( 'layer_detail', args=[ saved_layer.service_typename]) except Exception as e: logger.exception(e) tb = traceback.format_exc() out['success'] = False out['errors'] = str(tb) finally: if tempdir is not None: shutil.rmtree(tempdir) else: errormsgs = [] for e in form.errors.values(): errormsgs.append([escape(v) for v in e]) out['errors'] = form.errors out['errormsgs'] = errormsgs if out['success']: status_code = 200 else: status_code = 400 return HttpResponse( json.dumps(out), content_type='application/json', status=status_code)
def test_layer_links(self): lyr = Layer.objects.filter(storeType="dataStore").first() self.assertEquals(lyr.storeType, "dataStore") if check_ogc_backend(geoserver.BACKEND_PACKAGE): links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata") self.assertIsNotNone(links) self.assertEquals(len(links), 7) for ll in links: self.assertEquals(ll.link_type, "metadata") _def_link_types = ('data', 'image', 'original', 'html', 'OGC:WMS', 'OGC:WFS', 'OGC:WCS') Link.objects.filter(resource=lyr.resourcebase_ptr, link_type__in=_def_link_types).delete() links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data") self.assertIsNotNone(links) self.assertEquals(len(links), 0) set_resource_default_links(lyr, lyr) links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata") self.assertIsNotNone(links) self.assertEquals(len(links), 7) for ll in links: self.assertEquals(ll.link_type, "metadata") links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data") self.assertIsNotNone(links) self.assertEquals(len(links), 6) links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="image") self.assertIsNotNone(links) self.assertEquals(len(links), 3) lyr = Layer.objects.filter(storeType="coverageStore").first() self.assertEquals(lyr.storeType, "coverageStore") if check_ogc_backend(geoserver.BACKEND_PACKAGE): links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata") self.assertIsNotNone(links) self.assertEquals(len(links), 7) for ll in links: self.assertEquals(ll.link_type, "metadata") _def_link_types = ('data', 'image', 'original', 'html', 'OGC:WMS', 'OGC:WFS', 'OGC:WCS') Link.objects.filter(resource=lyr.resourcebase_ptr, link_type__in=_def_link_types).delete() links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data") self.assertIsNotNone(links) self.assertEquals(len(links), 0) set_resource_default_links(lyr, lyr) links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata") self.assertIsNotNone(links) self.assertEquals(len(links), 7) for ll in links: self.assertEquals(ll.link_type, "metadata") links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data") self.assertIsNotNone(links) self.assertEquals(len(links), 2) links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="image") self.assertIsNotNone(links) self.assertEquals(len(links), 7)
from geonode import qgis_server from geonode.base.models import Link from geonode.layers.models import Layer, LayerFile from geonode.maps.models import Map, MapLayer from geonode.qgis_server.gis_tools import set_attributes from geonode.qgis_server.helpers import tile_url_format, create_qgis_project, \ style_list from geonode.qgis_server.models import QGISServerLayer, QGISServerMap from geonode.qgis_server.tasks.update import create_qgis_server_thumbnail from geonode.qgis_server.xml_utilities import update_xml from geonode.utils import check_ogc_backend logger = logging.getLogger("geonode.qgis_server.signals") if check_ogc_backend(qgis_server.BACKEND_PACKAGE): QGIS_layer_directory = settings.QGIS_SERVER_CONFIG['layer_directory'] qgis_map_with_layers = Signal(providing_args=[]) def qgis_server_layer_post_delete(instance, sender, **kwargs): """Removes the layer from Local Storage.""" logger.debug('QGIS Server Layer Post Delete') instance.delete_qgis_layer() def qgis_server_pre_delete(instance, sender, **kwargs): """Removes the layer from Local Storage.""" # logger.debug('QGIS Server Pre Delete') # Deleting QGISServerLayer object happened on geonode level since it's
def clean(self): cleaned = super().clean() dbf_file = shx_file = prj_file = xml_file = sld_file = None base_name = base_ext = None if zipfile.is_zipfile(cleaned["base_file"]): filenames = zipfile.ZipFile(cleaned["base_file"], allowZip64=True).namelist() for filename in filenames: name, ext = os.path.splitext(filename) if ext.lower() == '.shp': if base_name is not None: raise forms.ValidationError( "Only one shapefile per zip is allowed") base_name = name base_ext = ext elif ext.lower() == '.dbf': dbf_file = filename elif ext.lower() == '.shx': shx_file = filename elif ext.lower() == '.prj': prj_file = filename elif ext.lower() == '.xml': xml_file = filename elif ext.lower() == '.sld': sld_file = filename if base_name is None: raise forms.ValidationError( "Zip files can only contain shapefile.") else: base_name, base_ext = os.path.splitext(cleaned["base_file"].name) if cleaned["dbf_file"] is not None: dbf_file = cleaned["dbf_file"].name if cleaned["shx_file"] is not None: shx_file = cleaned["shx_file"].name if cleaned["prj_file"] is not None: prj_file = cleaned["prj_file"].name if cleaned["xml_file"] is not None: xml_file = cleaned["xml_file"].name # SLD style only available in GeoServer backend if check_ogc_backend(geoserver.BACKEND_PACKAGE): if cleaned["sld_file"] is not None: sld_file = cleaned["sld_file"].name if not cleaned["metadata_upload_form"] and not cleaned[ "style_upload_form"] and base_ext.lower() not in ( ".shp", ".tif", ".tiff", ".geotif", ".geotiff", ".asc", ".sld", ".kml", ".kmz", ".csv"): raise forms.ValidationError( f"Only Shapefiles, GeoTiffs, and ASCIIs are supported. You uploaded a {base_ext} file" ) elif cleaned["metadata_upload_form"] and base_ext.lower() not in ( ".xml"): raise forms.ValidationError( f"Only XML files are supported. You uploaded a {base_ext} file" ) elif cleaned["style_upload_form"] and base_ext.lower() not in (".sld"): raise forms.ValidationError( f"Only SLD files are supported. You uploaded a {base_ext} file" ) if base_ext.lower() == ".shp": if dbf_file is None or shx_file is None: raise forms.ValidationError( "When uploading Shapefiles, .shx and .dbf files are also required." ) dbf_name, __ = os.path.splitext(dbf_file) shx_name, __ = os.path.splitext(shx_file) if dbf_name != base_name or shx_name != base_name: raise forms.ValidationError( "It looks like you're uploading " "components from different Shapefiles. Please " "double-check your file selections.") if prj_file is not None: if os.path.splitext(prj_file)[0] != base_name: raise forms.ValidationError( "It looks like you're " "uploading components from different Shapefiles. " "Please double-check your file selections.") if xml_file is not None: if os.path.splitext(xml_file)[0] != base_name: if xml_file.find('.shp') != -1: # force rename of file so that file.shp.xml doesn't # overwrite as file.shp if cleaned.get("xml_file"): cleaned["xml_file"].name = f'{base_name}.xml' if sld_file is not None: if os.path.splitext(sld_file)[0] != base_name: if sld_file.find('.shp') != -1: # force rename of file so that file.shp.xml doesn't # overwrite as file.shp if cleaned.get("sld_file"): cleaned["sld_file"].name = f'{base_name}.sld' return cleaned
class LayerResource(CommonModelApi): """Dataset API""" links = fields.ListField(attribute='links', null=True, use_in='all', default=[]) if check_ogc_backend(geoserver.BACKEND_PACKAGE): default_style = fields.ForeignKey('geonode.api.api.StyleResource', attribute='default_style', null=True) styles = fields.ManyToManyField('geonode.api.api.StyleResource', attribute='styles', null=True, use_in='detail') def build_filters(self, filters=None, ignore_bad_filters=False, **kwargs): _filters = filters.copy() metadata_only = _filters.pop('metadata_only', False) orm_filters = super().build_filters(_filters) orm_filters[ 'metadata_only'] = False if not metadata_only else metadata_only[0] return orm_filters def format_objects(self, objects): """ Formats the object. """ formatted_objects = [] for obj in objects: # convert the object to a dict using the standard values. # includes other values values = self.VALUES + ['alternate', 'name'] formatted_obj = model_to_dict(obj, fields=values) username = obj.owner.get_username() full_name = (obj.owner.get_full_name() or username) formatted_obj['owner__username'] = username formatted_obj['owner_name'] = full_name if obj.category: formatted_obj['category__gn_description'] = _( obj.category.gn_description) if obj.group: formatted_obj['group'] = obj.group try: formatted_obj['group_name'] = GroupProfile.objects.get( slug=obj.group.name) except GroupProfile.DoesNotExist: formatted_obj['group_name'] = obj.group formatted_obj['keywords'] = [k.name for k in obj.keywords.all() ] if obj.keywords else [] formatted_obj['regions'] = [r.name for r in obj.regions.all() ] if obj.regions else [] # provide style information bundle = self.build_bundle(obj=obj) formatted_obj['default_style'] = self.default_style.dehydrate( bundle, for_list=True) # Add resource uri formatted_obj['resource_uri'] = self.get_resource_uri(bundle) formatted_obj['links'] = self.dehydrate_ogc_links(bundle) if 'site_url' not in formatted_obj or len( formatted_obj['site_url']) == 0: formatted_obj['site_url'] = settings.SITEURL # Probe Remote Services formatted_obj['store_type'] = 'dataset' formatted_obj['online'] = True if hasattr(obj, 'subtype'): formatted_obj['store_type'] = obj.subtype if obj.subtype in ['tileStore', 'remote'] and hasattr( obj, 'remote_service'): if obj.remote_service: formatted_obj['online'] = ( obj.remote_service.probe == 200) else: formatted_obj['online'] = False formatted_obj['gtype'] = self.dehydrate_gtype(bundle) # replace thumbnail_url with curated_thumbs if hasattr(obj, 'curatedthumbnail'): try: if hasattr(obj.curatedthumbnail.img_thumbnail, 'url'): formatted_obj[ 'thumbnail_url'] = obj.curatedthumbnail.thumbnail_url except Exception as e: logger.exception(e) formatted_obj['processed'] = obj.instance_is_processed # put the object on the response stack formatted_objects.append(formatted_obj) return formatted_objects def _dehydrate_links(self, bundle, link_types=None): """Dehydrate links field.""" dehydrated = [] obj = bundle.obj link_fields = ['extension', 'link_type', 'name', 'mime', 'url'] links = obj.link_set.all() if link_types: links = links.filter(link_type__in=link_types) for lnk in links: formatted_link = model_to_dict(lnk, fields=link_fields) dehydrated.append(formatted_link) return dehydrated def dehydrate_links(self, bundle): return self._dehydrate_links(bundle) def dehydrate_ogc_links(self, bundle): return self._dehydrate_links(bundle, ['OGC:WMS', 'OGC:WFS', 'OGC:WCS']) def dehydrate_gtype(self, bundle): return bundle.obj.gtype def build_bundle(self, obj=None, data=None, request=None, **kwargs): """Override build_bundle method to add additional info.""" if obj is None and self._meta.object_class: obj = self._meta.object_class() elif obj: obj = self.populate_object(obj) return Bundle(obj=obj, data=data, request=request, **kwargs) def populate_object(self, obj): """Populate results with necessary fields :param obj: Dataset obj :type obj: Dataset :return: """ return obj # copy parent attribute before modifying VALUES = CommonModelApi.VALUES[:] VALUES.append('typename') class Meta(CommonMetaApi): paginator_class = CrossSiteXHRPaginator queryset = Dataset.objects.distinct().order_by('-date') resource_name = 'datasets' detail_uri_name = 'id' include_resource_uri = True allowed_methods = ['get', 'patch'] excludes = ['csw_anytext', 'metadata_xml'] authentication = MultiAuthentication(SessionAuthentication(), OAuthAuthentication(), GeonodeApiKeyAuthentication()) filtering = CommonMetaApi.filtering # Allow filtering using ID filtering.update({ 'id': ALL, 'name': ALL, 'alternate': ALL, 'metadata_only': ALL })
def wrapper(*args, **kwargs): on_backend = check_ogc_backend(backend_package) if on_backend: return func(*args, **kwargs)
def test_get_files(self): # Check that a well-formed Shapefile has its components all picked up d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj", "foo.dbf"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict((k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals(gotten_files, dict(shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf")) finally: if d is not None: shutil.rmtree(d) # Check that a Shapefile missing required components raises an # exception d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including an SLD with a valid shapefile results in the SLD # getting picked up if check_ogc_backend(geoserver.BACKEND_PACKAGE): d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj", "foo.dbf", "foo.sld"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict((k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict( shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf", sld="foo.sld")) finally: if d is not None: shutil.rmtree(d) # Check that including a QML with a valid shapefile # results in the QML # getting picked up if check_ogc_backend(qgis_server.BACKEND_PACKAGE): d = None try: d = tempfile.mkdtemp() for f in ( "foo.shp", "foo.shx", "foo.prj", "foo.dbf", "foo.qml", "foo.json"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict((k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict( shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf", qml="foo.qml", json="foo.json")) finally: if d is not None: shutil.rmtree(d) # Check that capitalized extensions are ok d = None try: d = tempfile.mkdtemp() for f in ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.SHP")) gotten_files = dict((k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals(gotten_files, dict(shp="foo.SHP", shx="foo.SHX", prj="foo.PRJ", dbf="foo.DBF")) finally: if d is not None: shutil.rmtree(d) # Check that mixed capital and lowercase extensions are ok d = None try: d = tempfile.mkdtemp() for f in ("foo.SHP", "foo.shx", "foo.pRJ", "foo.DBF"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.SHP")) gotten_files = dict((k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals(gotten_files, dict(shp="foo.SHP", shx="foo.shx", prj="foo.pRJ", dbf="foo.DBF")) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase extensions raises an # exception d = None try: d = tempfile.mkdtemp() files = ( "foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.shp", "foo.shx", "foo.prj", "foo.dbf") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase PRJ (this is # special-cased in the implementation) d = None try: d = tempfile.mkdtemp() files = ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.prj") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase SLD (this is # special-cased in the implementation) if check_ogc_backend(geoserver.BACKEND_PACKAGE): d = None try: d = tempfile.mkdtemp() files = ( "foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.SLD", "foo.sld") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d)
def map_download(request, mapid, template='maps/map_download.html'): """ Download all the layers of a map as a batch XXX To do, remove layer status once progress id done This should be fix because """ map_obj = _resolve_map( request, mapid, 'base.download_resourcebase', _PERMISSION_MSG_VIEW) map_status = dict() if request.method == 'POST': def perm_filter(layer): return request.user.has_perm( 'base.view_resourcebase', obj=layer.get_self_resource()) mapJson = map_obj.json(perm_filter) # we need to remove duplicate layers j_map = json.loads(mapJson) j_layers = j_map["layers"] for j_layer in j_layers: if j_layer["service"] is None: j_layers.remove(j_layer) continue if (len([_l for _l in j_layers if _l == j_layer])) > 1: j_layers.remove(j_layer) mapJson = json.dumps(j_map) if check_ogc_backend(geoserver.BACKEND_PACKAGE): # TODO the url needs to be verified on geoserver url = "%srest/process/batchDownload/launch/" % ogc_server_settings.LOCATION elif check_ogc_backend(qgis_server.BACKEND_PACKAGE): url = urljoin(settings.SITEURL, reverse("qgis_server:download-map", kwargs={'mapid': mapid})) # qgis-server backend stop here, continue on qgis_server/views.py return redirect(url) # the path to geoserver backend continue here resp, content = http_client.request(url, 'POST', body=mapJson) status = int(resp.status) if status == 200: map_status = json.loads(content) request.session["map_status"] = map_status else: raise Exception( 'Could not start the download of %s. Error was: %s' % (map_obj.title, content)) locked_layers = [] remote_layers = [] downloadable_layers = [] for lyr in map_obj.layer_set.all(): if lyr.group != "background": if not lyr.local: remote_layers.append(lyr) else: ownable_layer = Layer.objects.get(alternate=lyr.name) if not request.user.has_perm( 'download_resourcebase', obj=ownable_layer.get_self_resource()): locked_layers.append(lyr) else: # we need to add the layer only once if len( [_l for _l in downloadable_layers if _l.name == lyr.name]) == 0: downloadable_layers.append(lyr) site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL return render(request, template, context={ "geoserver": ogc_server_settings.PUBLIC_LOCATION, "map_status": map_status, "map": map_obj, "locked_layers": locked_layers, "remote_layers": remote_layers, "downloadable_layers": downloadable_layers, "site": site_url })
def test_not_superuser_permissions(self): # grab bobby bob = get_user_model().objects.get(username='******') # grab a layer layer = Layer.objects.all()[0] layer.set_default_permissions() # verify bobby has view/change permissions on it but not manage self.assertFalse( bob.has_perm( 'change_resourcebase_permissions', layer.get_self_resource())) self.assertTrue(self.client.login(username='******', password='******')) # 1. view_resourcebase # 1.1 has view_resourcebase: verify that bobby can access the layer # detail page self.assertTrue( bob.has_perm( 'view_resourcebase', layer.get_self_resource())) response = self.client.get(reverse('layer_detail', args=(layer.alternate,))) self.assertEquals(response.status_code, 200) # 1.2 has not view_resourcebase: verify that bobby can not access the # layer detail page remove_perm('view_resourcebase', bob, layer.get_self_resource()) anonymous_group = Group.objects.get(name='anonymous') remove_perm('view_resourcebase', anonymous_group, layer.get_self_resource()) response = self.client.get(reverse('layer_detail', args=(layer.alternate,))) self.assertEquals(response.status_code, 401) # 2. change_resourcebase # 2.1 has not change_resourcebase: verify that bobby cannot access the # layer replace page response = self.client.get(reverse('layer_replace', args=(layer.alternate,))) self.assertEquals(response.status_code, 401) # 2.2 has change_resourcebase: verify that bobby can access the layer # replace page assign_perm('change_resourcebase', bob, layer.get_self_resource()) self.assertTrue( bob.has_perm( 'change_resourcebase', layer.get_self_resource())) response = self.client.get(reverse('layer_replace', args=(layer.alternate,))) self.assertEquals(response.status_code, 200) # 3. delete_resourcebase # 3.1 has not delete_resourcebase: verify that bobby cannot access the # layer delete page response = self.client.get(reverse('layer_remove', args=(layer.alternate,))) self.assertEquals(response.status_code, 401) # 3.2 has delete_resourcebase: verify that bobby can access the layer # delete page assign_perm('delete_resourcebase', bob, layer.get_self_resource()) self.assertTrue( bob.has_perm( 'delete_resourcebase', layer.get_self_resource())) response = self.client.get(reverse('layer_remove', args=(layer.alternate,))) self.assertEquals(response.status_code, 200) # 4. change_resourcebase_metadata # 4.1 has not change_resourcebase_metadata: verify that bobby cannot # access the layer metadata page response = self.client.get(reverse('layer_metadata', args=(layer.alternate,))) self.assertEquals(response.status_code, 401) # 4.2 has delete_resourcebase: verify that bobby can access the layer # delete page assign_perm('change_resourcebase_metadata', bob, layer.get_self_resource()) self.assertTrue( bob.has_perm( 'change_resourcebase_metadata', layer.get_self_resource())) response = self.client.get(reverse('layer_metadata', args=(layer.alternate,))) self.assertEquals(response.status_code, 200) # 5. change_resourcebase_permissions # should be impossible for the user without change_resourcebase_permissions # to change permissions as the permission form is not available in the # layer detail page? # 6. change_layer_data # must be done in integration test sending a WFS-T request with CURL # 7. change_layer_style # 7.1 has not change_layer_style: verify that bobby cannot access # the layer style page if check_ogc_backend(geoserver.BACKEND_PACKAGE): # Only for geoserver backend response = self.client.get(reverse('layer_style_manage', args=(layer.alternate,))) self.assertEquals(response.status_code, 401) # 7.2 has change_layer_style: verify that bobby can access the # change layer style page if check_ogc_backend(geoserver.BACKEND_PACKAGE): # Only for geoserver backend assign_perm('change_layer_style', bob, layer) self.assertTrue( bob.has_perm( 'change_layer_style', layer)) response = self.client.get(reverse('layer_style_manage', args=(layer.alternate,))) self.assertEquals(response.status_code, 200)
def clean(self): cleaned = super(LayerUploadForm, self).clean() dbf_file = shx_file = prj_file = xml_file = sld_file = None base_name = base_ext = None if zipfile.is_zipfile(cleaned["base_file"]): filenames = zipfile.ZipFile(cleaned["base_file"]).namelist() for filename in filenames: name, ext = os.path.splitext(filename) if ext.lower() == '.shp': if base_name is not None: raise forms.ValidationError( "Only one shapefile per zip is allowed") base_name = name base_ext = ext elif ext.lower() == '.dbf': dbf_file = filename elif ext.lower() == '.shx': shx_file = filename elif ext.lower() == '.prj': prj_file = filename elif ext.lower() == '.xml': xml_file = filename elif ext.lower() == '.sld': sld_file = filename if base_name is None: raise forms.ValidationError( "Zip files can only contain shapefile.") else: base_name, base_ext = os.path.splitext(cleaned["base_file"].name) if cleaned["dbf_file"] is not None: dbf_file = cleaned["dbf_file"].name if cleaned["shx_file"] is not None: shx_file = cleaned["shx_file"].name if cleaned["prj_file"] is not None: prj_file = cleaned["prj_file"].name if cleaned["xml_file"] is not None: xml_file = cleaned["xml_file"].name # SLD style only available in GeoServer backend if check_ogc_backend(geoserver.BACKEND_PACKAGE): if cleaned["sld_file"] is not None: sld_file = cleaned["sld_file"].name if not cleaned["metadata_upload_form"] and not cleaned["style_upload_form"] and base_ext.lower() not in ( ".shp", ".tif", ".tiff", ".geotif", ".geotiff", ".asc"): raise forms.ValidationError( "Only Shapefiles, GeoTiffs, and ASCIIs are supported. You " "uploaded a %s file" % base_ext) elif cleaned["metadata_upload_form"] and base_ext.lower() not in (".xml"): raise forms.ValidationError( "Only XML files are supported. You uploaded a %s file" % base_ext) elif cleaned["style_upload_form"] and base_ext.lower() not in (".sld"): raise forms.ValidationError( "Only SLD files are supported. You uploaded a %s file" % base_ext) if base_ext.lower() == ".shp": if dbf_file is None or shx_file is None: raise forms.ValidationError( "When uploading Shapefiles, .shx and .dbf files are also required.") dbf_name, __ = os.path.splitext(dbf_file) shx_name, __ = os.path.splitext(shx_file) if dbf_name != base_name or shx_name != base_name: raise forms.ValidationError( "It looks like you're uploading " "components from different Shapefiles. Please " "double-check your file selections.") if prj_file is not None: if os.path.splitext(prj_file)[0] != base_name: raise forms.ValidationError( "It looks like you're " "uploading components from different Shapefiles. " "Please double-check your file selections.") if xml_file is not None: if os.path.splitext(xml_file)[0] != base_name: if xml_file.find('.shp') != -1: # force rename of file so that file.shp.xml doesn't # overwrite as file.shp if cleaned.get("xml_file"): cleaned["xml_file"].name = '%s.xml' % base_name if sld_file is not None: if os.path.splitext(sld_file)[0] != base_name: if sld_file.find('.shp') != -1: # force rename of file so that file.shp.xml doesn't # overwrite as file.shp if cleaned.get("sld_file"): cleaned["sld_file"].name = '%s.sld' % base_name return cleaned
class LayerUploadForm(forms.Form): base_file = forms.FileField() dbf_file = forms.FileField(required=False) shx_file = forms.FileField(required=False) prj_file = forms.FileField(required=False) xml_file = forms.FileField(required=False) if check_ogc_backend(geoserver.BACKEND_PACKAGE): sld_file = forms.FileField(required=False) charset = forms.CharField(required=False) metadata_uploaded_preserve = forms.BooleanField(required=False) metadata_upload_form = forms.BooleanField(required=False) style_upload_form = forms.BooleanField(required=False) spatial_files = ["base_file", "dbf_file", "shx_file", "prj_file"] # Adding style file based on the backend if check_ogc_backend(geoserver.BACKEND_PACKAGE): spatial_files.append('sld_file') spatial_files = tuple(spatial_files) def clean(self): cleaned = super().clean() dbf_file = shx_file = prj_file = xml_file = sld_file = None base_name = base_ext = None if zipfile.is_zipfile(cleaned["base_file"]): filenames = zipfile.ZipFile(cleaned["base_file"], allowZip64=True).namelist() for filename in filenames: name, ext = os.path.splitext(filename) if ext.lower() == '.shp': if base_name is not None: raise forms.ValidationError( "Only one shapefile per zip is allowed") base_name = name base_ext = ext elif ext.lower() == '.dbf': dbf_file = filename elif ext.lower() == '.shx': shx_file = filename elif ext.lower() == '.prj': prj_file = filename elif ext.lower() == '.xml': xml_file = filename elif ext.lower() == '.sld': sld_file = filename if base_name is None: raise forms.ValidationError( "Zip files can only contain shapefile.") else: base_name, base_ext = os.path.splitext(cleaned["base_file"].name) if cleaned["dbf_file"] is not None: dbf_file = cleaned["dbf_file"].name if cleaned["shx_file"] is not None: shx_file = cleaned["shx_file"].name if cleaned["prj_file"] is not None: prj_file = cleaned["prj_file"].name if cleaned["xml_file"] is not None: xml_file = cleaned["xml_file"].name # SLD style only available in GeoServer backend if check_ogc_backend(geoserver.BACKEND_PACKAGE): if cleaned["sld_file"] is not None: sld_file = cleaned["sld_file"].name if not cleaned["metadata_upload_form"] and not cleaned[ "style_upload_form"] and base_ext.lower() not in ( ".shp", ".tif", ".tiff", ".geotif", ".geotiff", ".asc", ".sld", ".kml", ".kmz", ".csv"): raise forms.ValidationError( f"Only Shapefiles, GeoTiffs, and ASCIIs are supported. You uploaded a {base_ext} file" ) elif cleaned["metadata_upload_form"] and base_ext.lower() not in ( ".xml"): raise forms.ValidationError( f"Only XML files are supported. You uploaded a {base_ext} file" ) elif cleaned["style_upload_form"] and base_ext.lower() not in (".sld"): raise forms.ValidationError( f"Only SLD files are supported. You uploaded a {base_ext} file" ) if base_ext.lower() == ".shp": if dbf_file is None or shx_file is None: raise forms.ValidationError( "When uploading Shapefiles, .shx and .dbf files are also required." ) dbf_name, __ = os.path.splitext(dbf_file) shx_name, __ = os.path.splitext(shx_file) if dbf_name != base_name or shx_name != base_name: raise forms.ValidationError( "It looks like you're uploading " "components from different Shapefiles. Please " "double-check your file selections.") if prj_file is not None: if os.path.splitext(prj_file)[0] != base_name: raise forms.ValidationError( "It looks like you're " "uploading components from different Shapefiles. " "Please double-check your file selections.") if xml_file is not None: if os.path.splitext(xml_file)[0] != base_name: if xml_file.find('.shp') != -1: # force rename of file so that file.shp.xml doesn't # overwrite as file.shp if cleaned.get("xml_file"): cleaned["xml_file"].name = f'{base_name}.xml' if sld_file is not None: if os.path.splitext(sld_file)[0] != base_name: if sld_file.find('.shp') != -1: # force rename of file so that file.shp.xml doesn't # overwrite as file.shp if cleaned.get("sld_file"): cleaned["sld_file"].name = f'{base_name}.sld' return cleaned def write_files(self): absolute_base_file = None tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT) if zipfile.is_zipfile(self.cleaned_data['base_file']): absolute_base_file = unzip_file(self.cleaned_data['base_file'], '.shp', tempdir=tempdir) else: for field in self.spatial_files: f = self.cleaned_data[field] if f is not None: path = os.path.join(tempdir, f.name) with open(path, 'wb') as writable: for c in f.chunks(): writable.write(c) absolute_base_file = os.path.join( tempdir, self.cleaned_data["base_file"].name) return tempdir, absolute_base_file
class LayerUploadForm(forms.Form): base_file = forms.FileField() dbf_file = forms.FileField(required=False) shx_file = forms.FileField(required=False) prj_file = forms.FileField(required=False) xml_file = forms.FileField(required=False) if check_ogc_backend(geoserver.BACKEND_PACKAGE): sld_file = forms.FileField(required=False) if check_ogc_backend(qgis_server.BACKEND_PACKAGE): qml_file = forms.FileField(required=False) geogig = forms.BooleanField(required=False) geogig_store = forms.CharField(required=False) time = forms.BooleanField(required=False) mosaic = forms.BooleanField(required=False) append_to_mosaic_opts = forms.BooleanField(required=False) append_to_mosaic_name = forms.CharField(required=False) mosaic_time_regex = forms.CharField(required=False) mosaic_time_value = forms.CharField(required=False) time_presentation = forms.CharField(required=False) time_presentation_res = forms.IntegerField(required=False) time_presentation_default_value = forms.CharField(required=False) time_presentation_reference_value = forms.CharField(required=False) is_private = forms.BooleanField(required=False, initial=False) abstract = forms.CharField(required=False) layer_title = forms.CharField(required=False) permissions = JSONField() metadata_uploaded_preserve = forms.BooleanField(required=False) metadata_upload_form = forms.BooleanField(required=False) style_upload_form = forms.BooleanField(required=False) spatial_files = [ "base_file", "dbf_file", "shx_file", "prj_file", "xml_file", ] # Adding style file based on the backend if check_ogc_backend(geoserver.BACKEND_PACKAGE): spatial_files.append('sld_file') if check_ogc_backend(qgis_server.BACKEND_PACKAGE): spatial_files.append('qml_file') spatial_files = tuple(spatial_files) def clean(self): requires_datastore = () if ogc_server_settings.DATASTORE else ('.csv', '.kml') types = [t for t in files.types if t.code not in requires_datastore] def supported_type(ext): return any([t.matches(ext) for t in types]) cleaned = super(LayerUploadForm, self).clean() base_name, base_ext = os.path.splitext(cleaned["base_file"].name) if base_ext.lower() == '.zip': # for now, no verification, but this could be unified pass elif not supported_type(base_ext.lower()[1:]): supported = " , ".join([t.name for t in types]) raise forms.ValidationError( "%s files are supported. You uploaded a %s file" % (supported, base_ext)) if base_ext.lower() == ".shp": dbf_file = cleaned["dbf_file"] shx_file = cleaned["shx_file"] if dbf_file is None or shx_file is None: raise forms.ValidationError( "When uploading Shapefiles, .SHX and .DBF files are also required." ) dbf_name, __ = os.path.splitext(dbf_file.name) shx_name, __ = os.path.splitext(shx_file.name) if dbf_name != base_name or shx_name != base_name: raise forms.ValidationError( "It looks like you're uploading " "components from different Shapefiles. Please " "double-check your file selections.") if cleaned["prj_file"] is not None: prj_file = cleaned["prj_file"].name if os.path.splitext(prj_file)[0] != base_name: raise forms.ValidationError( "It looks like you're " "uploading components from different Shapefiles. " "Please double-check your file selections.") return cleaned def write_files(self): tempdir = tempfile.mkdtemp(dir=settings.FILE_UPLOAD_TEMP_DIR) for field in self.spatial_files: f = self.cleaned_data[field] if f is not None: path = os.path.join(tempdir, f.name) with open(path, 'wb') as writable: for c in f.chunks(): writable.write(c) absolute_base_file = os.path.join(tempdir, self.cleaned_data["base_file"].name) return tempdir, absolute_base_file
def map_download(request, mapid, template='maps/map_download.html'): """ Download all the layers of a map as a batch XXX To do, remove layer status once progress id done This should be fix because """ map_obj = _resolve_map(request, mapid, 'base.download_resourcebase', _PERMISSION_MSG_VIEW) map_status = dict() if request.method == 'POST': def perm_filter(layer): return request.user.has_perm('base.view_resourcebase', obj=layer.get_self_resource()) mapJson = map_obj.json(perm_filter) # we need to remove duplicate layers j_map = json.loads(mapJson) j_layers = j_map["layers"] for j_layer in j_layers: if j_layer["service"] is None: j_layers.remove(j_layer) continue if (len([l for l in j_layers if l == j_layer])) > 1: j_layers.remove(j_layer) mapJson = json.dumps(j_map) if check_ogc_backend(geoserver.BACKEND_PACKAGE): # TODO the url needs to be verified on geoserver url = "%srest/process/batchDownload/launch/" % ogc_server_settings.LOCATION elif check_ogc_backend(qgis_server.BACKEND_PACKAGE): url = urljoin( settings.SITEURL, reverse("qgis_server:download-map", kwargs={'mapid': mapid})) # qgis-server backend stop here, continue on qgis_server/views.py return redirect(url) # the path to geoserver backend continue here resp, content = http_client.request(url, 'POST', body=mapJson) status = int(resp.status) if status == 200: map_status = json.loads(content) request.session["map_status"] = map_status else: raise Exception( 'Could not start the download of %s. Error was: %s' % (map_obj.title, content)) locked_layers = [] remote_layers = [] downloadable_layers = [] for lyr in map_obj.layer_set.all(): if lyr.group != "background": if not lyr.local: remote_layers.append(lyr) else: ownable_layer = Layer.objects.get(alternate=lyr.name) if not request.user.has_perm( 'download_resourcebase', obj=ownable_layer.get_self_resource()): locked_layers.append(lyr) else: # we need to add the layer only once if len( [l for l in downloadable_layers if l.name == lyr.name]) == 0: downloadable_layers.append(lyr) return render(request, template, context={ "geoserver": ogc_server_settings.PUBLIC_LOCATION, "map_status": map_status, "map": map_obj, "locked_layers": locked_layers, "remote_layers": remote_layers, "downloadable_layers": downloadable_layers, "site": settings.SITEURL })
class LayerResource(CommonModelApi): """Layer API""" links = fields.ListField(attribute='links', null=True, use_in='all', default=[]) if check_ogc_backend(qgis_server.BACKEND_PACKAGE): default_style = fields.ForeignKey('geonode.api.api.StyleResource', attribute='qgis_default_style', null=True) styles = fields.ManyToManyField('geonode.api.api.StyleResource', attribute='qgis_styles', null=True, use_in='detail') elif check_ogc_backend(geoserver.BACKEND_PACKAGE): default_style = fields.ForeignKey('geonode.api.api.StyleResource', attribute='default_style', null=True) styles = fields.ManyToManyField('geonode.api.api.StyleResource', attribute='styles', null=True, use_in='detail') def format_objects(self, objects): """ Formats the object. """ formatted_objects = [] for obj in objects: # convert the object to a dict using the standard values. # includes other values values = self.VALUES + ['alternate', 'name'] formatted_obj = model_to_dict(obj, fields=values) username = obj.owner.get_username() full_name = (obj.owner.get_full_name() or username) formatted_obj['owner__username'] = username formatted_obj['owner_name'] = full_name if obj.category: formatted_obj[ 'category__gn_description'] = obj.category.gn_description if obj.group: formatted_obj['group'] = obj.group try: formatted_obj['group_name'] = GroupProfile.objects.get( slug=obj.group.name) except GroupProfile.DoesNotExist: formatted_obj['group_name'] = obj.group formatted_obj['keywords'] = [k.name for k in obj.keywords.all() ] if obj.keywords else [] formatted_obj['regions'] = [r.name for r in obj.regions.all() ] if obj.regions else [] # provide style information bundle = self.build_bundle(obj=obj) formatted_obj['default_style'] = self.default_style.dehydrate( bundle, for_list=True) # Add resource uri formatted_obj['resource_uri'] = self.get_resource_uri(bundle) formatted_obj['links'] = self.dehydrate_ogc_links(bundle) if 'site_url' not in formatted_obj or len( formatted_obj['site_url']) == 0: formatted_obj['site_url'] = settings.SITEURL # Probe Remote Services formatted_obj['store_type'] = 'dataset' formatted_obj['online'] = True if hasattr(obj, 'storeType'): formatted_obj['store_type'] = obj.storeType if obj.storeType == 'remoteStore' and hasattr( obj, 'remote_service'): if obj.remote_service: formatted_obj['online'] = ( obj.remote_service.probe == 200) else: formatted_obj['online'] = False formatted_obj['gtype'] = self.dehydrate_gtype(bundle) # replace thumbnail_url with curated_thumbs if hasattr(obj, 'curatedthumbnail'): formatted_obj[ 'thumbnail_url'] = obj.curatedthumbnail.thumbnail_url # put the object on the response stack formatted_objects.append(formatted_obj) return formatted_objects def _dehydrate_links(self, bundle, link_types=None): """Dehydrate links field.""" dehydrated = [] obj = bundle.obj link_fields = ['extension', 'link_type', 'name', 'mime', 'url'] links = obj.link_set.all() if link_types: links = links.filter(link_type__in=link_types) for l in links: formatted_link = model_to_dict(l, fields=link_fields) dehydrated.append(formatted_link) return dehydrated def dehydrate_links(self, bundle): return self._dehydrate_links(bundle) def dehydrate_ogc_links(self, bundle): return self._dehydrate_links(bundle, ['OGC:WMS', 'OGC:WFS', 'OGC:WCS']) def dehydrate_gtype(self, bundle): return bundle.obj.gtype def populate_object(self, obj): """Populate results with necessary fields :param obj: Layer obj :type obj: Layer :return: """ if check_ogc_backend(qgis_server.BACKEND_PACKAGE): # Provides custom links for QGIS Server styles info # Default style try: obj.qgis_default_style = obj.qgis_layer.default_style except BaseException: obj.qgis_default_style = None # Styles try: obj.qgis_styles = obj.qgis_layer.styles except BaseException: obj.qgis_styles = [] return obj def build_bundle(self, obj=None, data=None, request=None, **kwargs): """Override build_bundle method to add additional info.""" if obj is None and self._meta.object_class: obj = self._meta.object_class() elif obj: obj = self.populate_object(obj) return Bundle(obj=obj, data=data, request=request, **kwargs) def patch_detail(self, request, **kwargs): """Allow patch request to update default_style. Request body must match this: { 'default_style': <resource_uri_to_style> } """ reason = 'Can only patch "default_style" field.' try: body = json.loads(request.body) if 'default_style' not in body: return http.HttpBadRequest(reason=reason) match = resolve(body['default_style']) style_id = match.kwargs['id'] api_name = match.kwargs['api_name'] resource_name = match.kwargs['resource_name'] if not (resource_name == 'styles' and api_name == 'api'): raise Exception() from geonode.qgis_server.models import QGISServerStyle style = QGISServerStyle.objects.get(id=style_id) layer_id = kwargs['id'] layer = Layer.objects.get(id=layer_id) except BaseException: return http.HttpBadRequest(reason=reason) from geonode.qgis_server.views import default_qml_style request.method = 'POST' response = default_qml_style(request, layername=layer.name, style_name=style.name) if isinstance(response, TemplateResponse): if response.status_code == 200: return HttpResponse(status=200) return self.error_response(request, response.content) # copy parent attribute before modifying VALUES = CommonModelApi.VALUES[:] VALUES.append('typename') class Meta(CommonMetaApi): paginator_class = CrossSiteXHRPaginator queryset = Layer.objects.distinct().order_by('-date') resource_name = 'layers' detail_uri_name = 'id' include_resource_uri = True allowed_methods = ['get', 'patch'] excludes = ['csw_anytext', 'metadata_xml'] authentication = MultiAuthentication(SessionAuthentication(), OAuthAuthentication(), GeonodeApiKeyAuthentication()) filtering = CommonMetaApi.filtering # Allow filtering using ID filtering.update({ 'id': ALL, 'name': ALL, 'alternate': ALL, })
from geonode.utils import GXPLayer from geonode.utils import GXPMap from geonode.layers.utils import file_upload, is_raster, is_vector from geonode.utils import resolve_object, llbbox_to_mercator from geonode.people.forms import ProfileForm, PocForm from geonode.security.views import _perms_info_json from geonode.documents.models import get_related_documents from geonode.utils import build_social_links from geonode.base.views import batch_modify from geonode.base.models import Thesaurus from geonode.maps.models import Map from geonode.geoserver.helpers import (cascading_delete, gs_catalog, ogc_server_settings, save_style, extract_name_from_sld, _invalidate_geowebcache_layer) if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import _render_thumbnail CONTEXT_LOG_FILE = ogc_server_settings.LOG_FILE logger = logging.getLogger("geonode.layers.views") DEFAULT_SEARCH_BATCH_SIZE = 10 MAX_SEARCH_BATCH_SIZE = 25 GENERIC_UPLOAD_ERROR = _("There was an error while attempting to upload your data. \ Please try again, or contact and administrator if the problem continues.") METADATA_UPLOADED_PRESERVE_ERROR = _("Note: this layer's orginal metadata was \ populated and preserved by importing a metadata XML file. This metadata cannot be edited.") _PERMISSION_MSG_DELETE = _("You are not permitted to delete this layer") _PERMISSION_MSG_GENERIC = _('You do not have permissions for this layer.')
def test_save_and_delete_signals(self): """Test that GeoServer Signals methods work as espected""" layers = Layer.objects.all()[:2].values_list('id', flat=True) test_perm_layer = Layer.objects.get(id=layers[0]) self.client.login(username='******', password='******') if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.signals import (geoserver_pre_delete, geoserver_post_save, geoserver_post_save_local) # Handle Layer Save and Upload Signals geoserver_post_save(test_perm_layer, sender=Layer) geoserver_post_save_local(test_perm_layer) # Check instance bbox and links self.assertIsNotNone(test_perm_layer.bbox) self.assertIsNotNone(test_perm_layer.srid) self.assertIsNotNone(test_perm_layer.link_set) self.assertEquals(len(test_perm_layer.link_set.all()), 7) # Layer Manipulation from geonode.geoserver.upload import geoserver_upload from geonode.geoserver.signals import gs_catalog from geonode.geoserver.helpers import ( check_geoserver_is_up, get_sld_for, fixup_style, set_layer_style, get_store, set_attributes_from_geoserver, set_styles, create_gs_thumbnail, cleanup) check_geoserver_is_up() admin_user = get_user_model().objects.get(username="******") saved_layer = geoserver_upload( test_perm_layer, os.path.join(gisdata.VECTOR_DATA, "san_andres_y_providencia_poi.shp"), admin_user, test_perm_layer.name, overwrite=True) self.assertIsNotNone(saved_layer) _log(saved_layer) workspace, name = test_perm_layer.alternate.split(':') self.assertIsNotNone(workspace) self.assertIsNotNone(name) ws = gs_catalog.get_workspace(workspace) self.assertIsNotNone(ws) store = get_store(gs_catalog, name, workspace=ws) _log("1. ------------ %s " % store) self.assertIsNotNone(store) # Save layer attributes set_attributes_from_geoserver(test_perm_layer) # Save layer styles set_styles(test_perm_layer, gs_catalog) # set SLD sld = test_perm_layer.default_style.sld_body if test_perm_layer.default_style else None if sld: _log("2. ------------ %s " % sld) set_layer_style(test_perm_layer, test_perm_layer.alternate, sld) fixup_style(gs_catalog, test_perm_layer.alternate, None) self.assertIsNone(get_sld_for(gs_catalog, test_perm_layer)) _log("3. ------------ %s " % get_sld_for(gs_catalog, test_perm_layer)) create_gs_thumbnail(test_perm_layer, overwrite=True) self.assertIsNotNone(test_perm_layer.get_thumbnail_url()) self.assertTrue(test_perm_layer.has_thumbnail()) # Handle Layer Delete Signals geoserver_pre_delete(test_perm_layer, sender=Layer) # Check instance has been removed from GeoServer also from geonode.geoserver.views import get_layer_capabilities self.assertIsNone(get_layer_capabilities(test_perm_layer)) # Cleaning Up test_perm_layer.delete() cleanup(test_perm_layer.name, test_perm_layer.uuid)
def get_files(filename): """Converts the data to Shapefiles or Geotiffs and returns a dictionary with all the required files """ files = {} # Verify if the filename is in ascii format. try: filename.decode('ascii') except UnicodeEncodeError: msg = "Please use only characters from the english alphabet for the filename. '%s' is not yet supported." \ % os.path.basename(filename).encode('UTF-8') raise GeoNodeException(msg) # Let's unzip the filname in case it is a ZIP file import tempfile import zipfile from geonode.utils import unzip_file if zipfile.is_zipfile(filename): tempdir = tempfile.mkdtemp() filename = unzip_file(filename, '.shp', tempdir=tempdir) if not filename: # We need to iterate files as filename could be the zipfile import ntpath from geonode.upload.utils import _SUPPORTED_EXT file_basename, file_ext = ntpath.splitext(filename) for item in os.listdir(tempdir): item_basename, item_ext = ntpath.splitext(item) if ntpath.basename(item_basename) == ntpath.basename(file_basename) and ( item_ext.lower() in _SUPPORTED_EXT): filename = os.path.join(tempdir, item) break # Make sure the file exists. if not os.path.exists(filename): msg = ('Could not open %s. Make sure you are using a ' 'valid file' % filename) logger.warn(msg) raise GeoNodeException(msg) base_name, extension = os.path.splitext(filename) # Replace special characters in filenames - []{}() glob_name = re.sub(r'([\[\]\(\)\{\}])', r'[\g<1>]', base_name) if extension.lower() == '.shp': required_extensions = dict( shp='.[sS][hH][pP]', dbf='.[dD][bB][fF]', shx='.[sS][hH][xX]') for ext, pattern in required_extensions.iteritems(): matches = glob.glob(glob_name + pattern) if len(matches) == 0: msg = ('Expected helper file %s does not exist; a Shapefile ' 'requires helper files with the following extensions: ' '%s') % (base_name + "." + ext, required_extensions.keys()) raise GeoNodeException(msg) elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) else: files[ext] = matches[0] matches = glob.glob(glob_name + ".[pP][rR][jJ]") if len(matches) == 1: files['prj'] = matches[0] elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) elif extension.lower() in cov_exts: files[extension.lower().replace('.', '')] = filename # Only for GeoServer if check_ogc_backend(geoserver.BACKEND_PACKAGE): matches = glob.glob(glob_name + ".[sS][lL][dD]") if len(matches) == 1: files['sld'] = matches[0] elif len(matches) > 1: msg = ('Multiple style files (sld) for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) matches = glob.glob(glob_name + ".[xX][mM][lL]") # shapefile XML metadata is sometimes named base_name.shp.xml # try looking for filename.xml if base_name.xml does not exist if len(matches) == 0: matches = glob.glob(filename + ".[xX][mM][lL]") if len(matches) == 1: files['xml'] = matches[0] elif len(matches) > 1: msg = ('Multiple XML files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) # Only for QGIS Server if check_ogc_backend(qgis_server.BACKEND_PACKAGE): matches = glob.glob(glob_name + ".[qQ][mM][lL]") logger.debug('Checking QML file') logger.debug('Number of matches QML file : %s' % len(matches)) logger.debug('glob name: %s' % glob_name) if len(matches) == 1: files['qml'] = matches[0] elif len(matches) > 1: msg = ('Multiple style files (qml) for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) # Provides json files for additional extra data matches = glob.glob(glob_name + ".[jJ][sS][oO][nN]") logger.debug('Checking JSON File') logger.debug( 'Number of matches JSON file : %s' % len(matches)) logger.debug('glob name: %s' % glob) if len(matches) == 1: files['json'] = matches[0] elif len(matches) > 1: msg = ('Multiple json files (json) for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) return files
def get_files(filename): """Converts the data to Shapefiles or Geotiffs and returns a dictionary with all the required files """ files = {} # Verify if the filename is in ascii format. try: filename.encode('ascii') except UnicodeEncodeError: msg = f"Please use only characters from the english alphabet for the filename. '{os.path.basename(filename).encode('UTF-8', 'strict')}' is not yet supported." raise GeoNodeException(msg) # Let's unzip the filname in case it is a ZIP file import tempfile from geonode.utils import unzip_file if is_zipfile(filename): tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT) _filename = unzip_file(filename, '.shp', tempdir=tempdir) if not _filename: # We need to iterate files as filename could be the zipfile import ntpath from geonode.upload.utils import _SUPPORTED_EXT file_basename, file_ext = ntpath.splitext(filename) for item in os.listdir(tempdir): item_basename, item_ext = ntpath.splitext(item) if ntpath.basename(item_basename) == ntpath.basename( file_basename) and (item_ext.lower() in _SUPPORTED_EXT): filename = os.path.join(tempdir, item) break else: filename = _filename # Make sure the file exists. if not os.path.exists(filename): msg = f'Could not open {filename}. Make sure you are using a valid file' logger.debug(msg) raise GeoNodeException(msg) base_name, extension = os.path.splitext(filename) # Replace special characters in filenames - []{}() glob_name = re.sub(r'([\[\]\(\)\{\}])', r'[\g<1>]', base_name) if extension.lower() == '.shp': required_extensions = dict(shp='.[sS][hH][pP]', dbf='.[dD][bB][fF]', shx='.[sS][hH][xX]') for ext, pattern in required_extensions.items(): matches = glob.glob(glob_name + pattern) if len(matches) == 0: msg = ( f'Expected helper file {base_name}.{ext} does not exist; a Shapefile ' 'requires helper files with the following extensions: ' f'{list(required_extensions.keys())}') raise GeoNodeException(msg) elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) else: files[ext] = matches[0] matches = glob.glob(f"{glob_name}.[pP][rR][jJ]") if len(matches) == 1: files['prj'] = matches[0] elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) elif extension.lower() in cov_exts: files[extension.lower().replace('.', '')] = filename # Only for GeoServer if check_ogc_backend(geoserver.BACKEND_PACKAGE): matches = glob.glob(f"{os.path.dirname(glob_name)}.[sS][lL][dD]") if len(matches) == 1: files['sld'] = matches[0] else: matches = glob.glob(f"{glob_name}.[sS][lL][dD]") if len(matches) == 1: files['sld'] = matches[0] elif len(matches) > 1: msg = ( 'Multiple style files (sld) for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) matches = glob.glob(f"{glob_name}.[xX][mM][lL]") # shapefile XML metadata is sometimes named base_name.shp.xml # try looking for filename.xml if base_name.xml does not exist if len(matches) == 0: matches = glob.glob(f"{filename}.[xX][mM][lL]") if len(matches) == 1: files['xml'] = matches[0] elif len(matches) > 1: msg = ('Multiple XML files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) return files
def tearDownClass(cls): if check_ogc_backend(geoserver.BACKEND_PACKAGE): if cls.layer_coast_line: cls.layer_coast_line.delete() super().tearDownClass()
def test_set_bulk_permissions(self): """Test that after restrict view permissions on two layers bobby is unable to see them""" geofence_rules_count = 0 if check_ogc_backend(geoserver.BACKEND_PACKAGE): purge_geofence_all() # Reset GeoFence Rules geofence_rules_count = get_geofence_rules_count() self.assertEquals(geofence_rules_count, 0) layers = Layer.objects.all()[:2].values_list('id', flat=True) layers_id = map(lambda x: str(x), layers) test_perm_layer = Layer.objects.get(id=layers[0]) self.client.login(username='******', password='******') resp = self.client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 8) data = { 'permissions': json.dumps(self.perm_spec), 'resources': layers_id } resp = self.client.post(self.bulk_perms_url, data) self.assertHttpOK(resp) if check_ogc_backend(geoserver.BACKEND_PACKAGE): # Check GeoFence Rules have been correctly created geofence_rules_count = get_geofence_rules_count() _log("1. geofence_rules_count: %s " % geofence_rules_count) self.assertEquals(geofence_rules_count, 8) set_geofence_all(test_perm_layer) geofence_rules_count = get_geofence_rules_count() _log("2. geofence_rules_count: %s " % geofence_rules_count) self.assertEquals(geofence_rules_count, 9) self.client.logout() self.client.login(username='******', password='******') resp = self.client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 7) if check_ogc_backend(geoserver.BACKEND_PACKAGE): perms = get_users_with_perms(test_perm_layer) _log("3. perms: %s " % perms) sync_geofence_with_guardian(test_perm_layer, perms, user='******') # Check GeoFence Rules have been correctly created geofence_rules_count = get_geofence_rules_count() _log("4. geofence_rules_count: %s " % geofence_rules_count) self.assertEquals(geofence_rules_count, 10) # Validate maximum priority geofence_rules_highest_priority = get_highest_priority() _log("5. geofence_rules_highest_priority: %s " % geofence_rules_highest_priority) self.assertTrue(geofence_rules_highest_priority > 0) # Try GWC Invalidation # - it should not work here since the layer has not been uploaded to GeoServer set_geowebcache_invalidate_cache(test_perm_layer.alternate) url = settings.OGC_SERVER['default']['LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] import requests from requests.auth import HTTPBasicAuth r = requests.get( url + 'gwc/rest/seed/%s.json' % test_perm_layer.alternate, auth=HTTPBasicAuth(user, passwd)) self.assertEquals(r.status_code, 400) geofence_rules_count = 0 if check_ogc_backend(geoserver.BACKEND_PACKAGE): purge_geofence_all() # Reset GeoFence Rules geofence_rules_count = get_geofence_rules_count() self.assertEquals(geofence_rules_count, 0)
def test_csw_outputschema_iso(self): """Verify that GeoNode CSW can handle ISO metadata with ISO outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.isotc211.org/2005/gmd', esn='full') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual( record.identification.title, 'San Andres Y Providencia Location', 'Expected a specific title in ISO model') # test that the ISO abstract maps correctly in Dublin Core self.assertEqual( record.identification.abstract, 'No abstract provided', 'Expected a specific abstract in ISO model') # test BBOX properties in Dublin Core from decimal import Decimal self.assertEqual( Decimal(record.identification.bbox.minx), Decimal('-81.8593555'), 'Expected a specific minx coordinate value in ISO model') self.assertEqual( Decimal(record.identification.bbox.miny), Decimal('12.1665322'), 'Expected a specific minx coordinate value in ISO model') self.assertEqual( Decimal(record.identification.bbox.maxx), Decimal('-81.356409'), 'Expected a specific maxx coordinate value in ISO model') self.assertEqual( Decimal(record.identification.bbox.maxy), Decimal('13.396306'), 'Expected a specific maxy coordinate value in ISO model') # test for correct link articulation for link in record.distribution.online: if check_ogc_backend(geoserver.BACKEND_PACKAGE): if link.protocol == 'OGC:WMS': self.assertEqual( link.url, 'http://localhost:8000/gs/ows', 'Expected a specific OGC:WMS URL') elif link.protocol == 'OGC:WFS': self.assertEqual( link.url, 'http://localhost:8000/gs/wfs', 'Expected a specific OGC:WFS URL') if check_ogc_backend(qgis_server.BACKEND_PACKAGE): if link.protocol == 'OGC:WMS': self.assertEqual( link.url, 'http://localhost:8000/qgis-server/ogc/' 'san_andres_y_providencia_location', 'Expected a specific OGC:WMS URL') elif link.protocol == 'OGC:WFS': self.assertEqual( link.url, 'http://localhost:8000/qgis-server/ogc/' 'san_andres_y_providencia_location', 'Expected a specific OGC:WFS URL')
def test_get_files(self): # Check that a well-formed Shapefile has its components all picked up d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj", "foo.dbf"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict( (k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict(shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf")) finally: if d is not None: shutil.rmtree(d) # Check that a Shapefile missing required components raises an # exception d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() self.assertRaises(GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including an SLD with a valid shapefile results in the SLD # getting picked up if check_ogc_backend(geoserver.BACKEND_PACKAGE): d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj", "foo.dbf", "foo.sld"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict( (k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict(shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf", sld="foo.sld")) finally: if d is not None: shutil.rmtree(d) # Check that including a QML with a valid shapefile # results in the QML # getting picked up if check_ogc_backend(qgis_server.BACKEND_PACKAGE): d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj", "foo.dbf", "foo.qml", "foo.json"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict( (k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict(shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf", qml="foo.qml", json="foo.json")) finally: if d is not None: shutil.rmtree(d) # Check that capitalized extensions are ok d = None try: d = tempfile.mkdtemp() for f in ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.SHP")) gotten_files = dict( (k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict(shp="foo.SHP", shx="foo.SHX", prj="foo.PRJ", dbf="foo.DBF")) finally: if d is not None: shutil.rmtree(d) # Check that mixed capital and lowercase extensions are ok d = None try: d = tempfile.mkdtemp() for f in ("foo.SHP", "foo.shx", "foo.pRJ", "foo.DBF"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.SHP")) gotten_files = dict( (k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict(shp="foo.SHP", shx="foo.shx", prj="foo.pRJ", dbf="foo.DBF")) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase extensions raises an # exception d = None try: d = tempfile.mkdtemp() files = ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.shp", "foo.shx", "foo.prj", "foo.dbf") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase PRJ (this is # special-cased in the implementation) d = None try: d = tempfile.mkdtemp() files = ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.prj") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase SLD (this is # special-cased in the implementation) if check_ogc_backend(geoserver.BACKEND_PACKAGE): d = None try: d = tempfile.mkdtemp() files = ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.SLD", "foo.sld") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d)
def test_not_superuser_permissions(self): geofence_rules_count = 0 if check_ogc_backend(geoserver.BACKEND_PACKAGE): purge_geofence_all() # Reset GeoFence Rules geofence_rules_count = get_geofence_rules_count() self.assertTrue(geofence_rules_count == 0) # grab bobby bob = get_user_model().objects.get(username='******') # grab a layer layer = Layer.objects.all()[0] layer.set_default_permissions() # verify bobby has view/change permissions on it but not manage self.assertTrue( bob.has_perm('change_resourcebase_permissions', layer.get_self_resource())) if check_ogc_backend(geoserver.BACKEND_PACKAGE): # Check GeoFence Rules have been correctly created geofence_rules_count = get_geofence_rules_count() _log("1. geofence_rules_count: %s " % geofence_rules_count) self.assertTrue(geofence_rules_count == 1) self.assertTrue(self.client.login(username='******', password='******')) # 1. view_resourcebase # 1.1 has view_resourcebase: verify that bobby can access the layer # detail page self.assertTrue( bob.has_perm('view_resourcebase', layer.get_self_resource())) response = self.client.get( reverse('layer_detail', args=(layer.alternate, ))) self.assertEquals(response.status_code, 200) # 1.2 has not view_resourcebase: verify that bobby can not access the # layer detail page remove_perm('view_resourcebase', bob, layer.get_self_resource()) anonymous_group = Group.objects.get(name='anonymous') remove_perm('view_resourcebase', anonymous_group, layer.get_self_resource()) response = self.client.get( reverse('layer_detail', args=(layer.alternate, ))) self.assertEquals(response.status_code, 401) # 2. change_resourcebase # 2.1 has not change_resourcebase: verify that bobby cannot access the # layer replace page response = self.client.get( reverse('layer_replace', args=(layer.alternate, ))) self.assertEquals(response.status_code, 200) # 2.2 has change_resourcebase: verify that bobby can access the layer # replace page assign_perm('change_resourcebase', bob, layer.get_self_resource()) self.assertTrue( bob.has_perm('change_resourcebase', layer.get_self_resource())) response = self.client.get( reverse('layer_replace', args=(layer.alternate, ))) self.assertEquals(response.status_code, 200) # 3. delete_resourcebase # 3.1 has not delete_resourcebase: verify that bobby cannot access the # layer delete page response = self.client.get( reverse('layer_remove', args=(layer.alternate, ))) self.assertEquals(response.status_code, 200) # 3.2 has delete_resourcebase: verify that bobby can access the layer # delete page assign_perm('delete_resourcebase', bob, layer.get_self_resource()) self.assertTrue( bob.has_perm('delete_resourcebase', layer.get_self_resource())) response = self.client.get( reverse('layer_remove', args=(layer.alternate, ))) self.assertEquals(response.status_code, 200) # 4. change_resourcebase_metadata # 4.1 has not change_resourcebase_metadata: verify that bobby cannot # access the layer metadata page response = self.client.get( reverse('layer_metadata', args=(layer.alternate, ))) self.assertEquals(response.status_code, 200) # 4.2 has delete_resourcebase: verify that bobby can access the layer # delete page assign_perm('change_resourcebase_metadata', bob, layer.get_self_resource()) self.assertTrue( bob.has_perm('change_resourcebase_metadata', layer.get_self_resource())) response = self.client.get( reverse('layer_metadata', args=(layer.alternate, ))) self.assertEquals(response.status_code, 200) if check_ogc_backend(geoserver.BACKEND_PACKAGE): perms = get_users_with_perms(layer) _log("2. perms: %s " % perms) sync_geofence_with_guardian(layer, perms, user=bob, group=anonymous_group) # Check GeoFence Rules have been correctly created geofence_rules_count = get_geofence_rules_count() _log("3. geofence_rules_count: %s " % geofence_rules_count) self.assertEquals(geofence_rules_count, 3) # 5. change_resourcebase_permissions # should be impossible for the user without change_resourcebase_permissions # to change permissions as the permission form is not available in the # layer detail page? # 6. change_layer_data # must be done in integration test sending a WFS-T request with CURL # 7. change_layer_style # 7.1 has not change_layer_style: verify that bobby cannot access # the layer style page if check_ogc_backend(geoserver.BACKEND_PACKAGE): # Only for geoserver backend response = self.client.get( reverse('layer_style_manage', args=(layer.alternate, ))) self.assertEquals(response.status_code, 200) # 7.2 has change_layer_style: verify that bobby can access the # change layer style page if check_ogc_backend(geoserver.BACKEND_PACKAGE): # Only for geoserver backend assign_perm('change_layer_style', bob, layer) self.assertTrue(bob.has_perm('change_layer_style', layer)) response = self.client.get( reverse('layer_style_manage', args=(layer.alternate, ))) self.assertEquals(response.status_code, 200) geofence_rules_count = 0 if check_ogc_backend(geoserver.BACKEND_PACKAGE): purge_geofence_all() # Reset GeoFence Rules geofence_rules_count = get_geofence_rules_count() self.assertEquals(geofence_rules_count, 0)
def proxy(request, url=None, response_callback=None, sec_chk_hosts=True, sec_chk_rules=True, timeout=None, allowed_hosts=[], **kwargs): # Request default timeout if not timeout: timeout = TIMEOUT # Security rules and settings PROXY_ALLOWED_HOSTS = getattr(settings, 'PROXY_ALLOWED_HOSTS', ()) # Sanity url checks if 'url' not in request.GET and not url: return HttpResponse( "The proxy service requires a URL-encoded URL as a parameter.", status=400, content_type="text/plain") raw_url = url or request.GET['url'] raw_url = urljoin(settings.SITEURL, raw_url) if raw_url.startswith("/") else raw_url url = urlsplit(raw_url) scheme = str(url.scheme) locator = str(url.path) if url.query != "": locator += '?' + url.query if url.fragment != "": locator += '#' + url.fragment # White-Black Listing Hosts site_url = urlsplit(settings.SITEURL) if sec_chk_hosts and not settings.DEBUG: # Attach current SITEURL if site_url.hostname not in PROXY_ALLOWED_HOSTS: PROXY_ALLOWED_HOSTS += (site_url.hostname, ) # Attach current hostname if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import ogc_server_settings hostname = ( ogc_server_settings.hostname, ) if ogc_server_settings else () if hostname not in PROXY_ALLOWED_HOSTS: PROXY_ALLOWED_HOSTS += hostname # Check OWS regexp if url.query and ows_regexp.match(url.query): ows_tokens = ows_regexp.match(url.query).groups() if len( ows_tokens ) == 4 and 'version' == ows_tokens[0] and StrictVersion( ows_tokens[1]) >= StrictVersion("1.0.0") and StrictVersion( ows_tokens[1] ) <= StrictVersion("3.0.0") and ows_tokens[2].lower() in ( 'getcapabilities') and ows_tokens[3].upper() in ( 'OWS', 'WCS', 'WFS', 'WMS', 'WPS', 'CSW'): if url.hostname not in PROXY_ALLOWED_HOSTS: PROXY_ALLOWED_HOSTS += (url.hostname, ) # Check Remote Services base_urls from geonode.services.models import Service for _s in Service.objects.all(): _remote_host = urlsplit(_s.base_url).hostname PROXY_ALLOWED_HOSTS += (_remote_host, ) if not validate_host(url.hostname, PROXY_ALLOWED_HOSTS): return HttpResponse( "DEBUG is set to False but the host of the path provided to the proxy service" " is not in the PROXY_ALLOWED_HOSTS setting.", status=403, content_type="text/plain") # Security checks based on rules; allow only specific requests if sec_chk_rules: # TODO: Not yet implemented pass # Collecting headers and cookies headers, access_token = get_headers(request, url, raw_url, allowed_hosts=allowed_hosts) # Inject access_token if necessary parsed = urlparse(raw_url) parsed._replace(path=locator.encode('utf8')) if parsed.netloc == site_url.netloc and scheme != site_url.scheme: parsed = parsed._replace(scheme=site_url.scheme) _url = parsed.geturl() # Some clients / JS libraries generate URLs with relative URL paths, e.g. # "http://host/path/path/../file.css", which the requests library cannot # currently handle (https://github.com/kennethreitz/requests/issues/2982). # We parse and normalise such URLs into absolute paths before attempting # to proxy the request. _url = URL.from_text(_url).normalize().to_text() if request.method == "GET" and access_token and 'access_token' not in _url: query_separator = '&' if '?' in _url else '?' _url = ('%s%saccess_token=%s' % (_url, query_separator, access_token)) _data = request.body.decode('utf-8') # Avoid translating local geoserver calls into external ones if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import ogc_server_settings _url = _url.replace('%s%s' % (settings.SITEURL, 'geoserver'), ogc_server_settings.LOCATION.rstrip('/')) _data = _data.replace('%s%s' % (settings.SITEURL, 'geoserver'), ogc_server_settings.LOCATION.rstrip('/')) response, content = http_client.request(_url, method=request.method, data=_data, headers=headers, timeout=timeout, user=request.user) content = response.content or response.reason status = response.status_code content_type = response.headers.get('Content-Type') if status >= 400: return HttpResponse(content=content, reason=content, status=status, content_type=content_type) # decompress GZipped responses if not enabled # if content and response and response.getheader('Content-Encoding') == 'gzip': if content and content_type and content_type == 'gzip': buf = io.BytesIO(content) f = gzip.GzipFile(fileobj=buf) content = f.read() PLAIN_CONTENT_TYPES = ['text', 'plain', 'html', 'json', 'xml', 'gml'] for _ct in PLAIN_CONTENT_TYPES: if content_type and _ct in content_type and not isinstance( content, six.string_types): try: content = content.decode() break except Exception: pass if response and response_callback: kwargs = {} if not kwargs else kwargs kwargs.update({ 'response': response, 'content': content, 'status': status, 'content_type': content_type }) return response_callback(**kwargs) else: # If we get a redirect, let's add a useful message. if status and status in (301, 302, 303, 307): _response = HttpResponse( ('This proxy does not support redirects. The server in "%s" ' 'asked for a redirect to "%s"' % (url, response.getheader('Location'))), status=status, content_type=content_type) _response['Location'] = response.getheader('Location') return _response else: def _get_message(text): _s = text if isinstance(text, bytes): _s = text.decode("utf-8", "replace") try: found = re.search('<b>Message</b>(.+?)</p>', _s).group(1).strip() except Exception: found = _s return found return HttpResponse( content=content, reason=_get_message(content) if status not in (200, 201) else None, status=status, content_type=content_type)
def layer_detail(request, layername, template='layers/layer_detail.html'): layer = _resolve_layer(request, layername, 'base.view_resourcebase', _PERMISSION_MSG_VIEW) # assert False, str(layer_bbox) config = layer.attribute_config() # Add required parameters for GXP lazy-loading layer_bbox = layer.bbox[0:4] bbox = layer_bbox[:] bbox[0] = float(layer_bbox[0]) bbox[1] = float(layer_bbox[2]) bbox[2] = float(layer_bbox[1]) bbox[3] = float(layer_bbox[3]) def decimal_encode(bbox): import decimal _bbox = [] for o in [float(coord) for coord in bbox]: if isinstance(o, decimal.Decimal): o = (str(o) for o in [o]) _bbox.append(o) # Must be in the form : [x0, x1, y0, y1 return [_bbox[0], _bbox[2], _bbox[1], _bbox[3]] def sld_definition(style): from urllib import quote _sld = { "title": style.sld_title or style.name, "legend": { "height": "40", "width": "22", "href": layer.ows_url + "?service=wms&request=GetLegendGraphic&format=image%2Fpng&width=20&height=20&layer=" + quote(layer.service_typename, safe=''), "format": "image/png" }, "name": style.name } return _sld if hasattr(layer, 'srid'): config['crs'] = {'type': 'name', 'properties': layer.srid} # Add required parameters for GXP lazy-loading attribution = "%s %s" % ( layer.owner.first_name, layer.owner.last_name ) if layer.owner.first_name or layer.owner.last_name else str(layer.owner) srs = getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857') srs_srid = int(srs.split(":")[1]) if srs != "EPSG:900913" else 3857 config[ "attribution"] = "<span class='gx-attribution-title'>%s</span>" % attribution config["format"] = getattr(settings, 'DEFAULT_LAYER_FORMAT', 'image/png') config["title"] = layer.title config["wrapDateLine"] = True config["visibility"] = True config["srs"] = srs config["bbox"] = decimal_encode( bbox_to_projection([float(coord) for coord in layer_bbox] + [ layer.srid, ], target_srid=int(srs.split(":")[1]))[:4]) config["capability"] = { "abstract": layer.abstract, "name": layer.alternate, "title": layer.title, "queryable": True, "storeType": layer.storeType, "bbox": { layer.srid: { "srs": layer.srid, "bbox": decimal_encode(bbox) }, srs: { "srs": srs, "bbox": decimal_encode( bbox_to_projection([float(coord) for coord in layer_bbox] + [ layer.srid, ], target_srid=srs_srid)[:4]) }, "EPSG:4326": { "srs": "EPSG:4326", "bbox": decimal_encode(bbox) if layer.srid == 'EPSG:4326' else decimal_encode( bbox_to_projection([float(coord) for coord in layer_bbox] + [ layer.srid, ], target_srid=4326)[:4]) }, "EPSG:900913": { "srs": "EPSG:900913", "bbox": decimal_encode(bbox) if layer.srid == 'EPSG:900913' else decimal_encode( bbox_to_projection([float(coord) for coord in layer_bbox] + [ layer.srid, ], target_srid=3857)[:4]) } }, "srs": { srs: True }, "formats": [ "image/png", "application/atom xml", "application/atom+xml", "application/json;type=utfgrid", "application/openlayers", "application/pdf", "application/rss xml", "application/rss+xml", "application/vnd.google-earth.kml", "application/vnd.google-earth.kml xml", "application/vnd.google-earth.kml+xml", "application/vnd.google-earth.kml+xml;mode=networklink", "application/vnd.google-earth.kmz", "application/vnd.google-earth.kmz xml", "application/vnd.google-earth.kmz+xml", "application/vnd.google-earth.kmz;mode=networklink", "atom", "image/geotiff", "image/geotiff8", "image/gif", "image/gif;subtype=animated", "image/jpeg", "image/png8", "image/png; mode=8bit", "image/svg", "image/svg xml", "image/svg+xml", "image/tiff", "image/tiff8", "image/vnd.jpeg-png", "kml", "kmz", "openlayers", "rss", "text/html; subtype=openlayers", "utfgrid" ], "attribution": { "title": attribution }, "infoFormats": [ "text/plain", "application/vnd.ogc.gml", "text/xml", "application/vnd.ogc.gml/3.1.1", "text/xml; subtype=gml/3.1.1", "text/html", "application/json" ], "styles": [sld_definition(s) for s in layer.styles.all()], "prefix": layer.alternate.split(":")[0] if ":" in layer.alternate else "", "keywords": [k.name for k in layer.keywords.all()] if layer.keywords else [], "llbbox": decimal_encode(bbox) if layer.srid == 'EPSG:4326' else decimal_encode( bbox_to_projection([float(coord) for coord in layer_bbox] + [ layer.srid, ], target_srid=4326)[:4]) } all_times = None if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.views import get_capabilities workspace, layername = layer.alternate.split( ":") if ":" in layer.alternate else (None, layer.alternate) # WARNING Please make sure to have enabled DJANGO CACHE as per # https://docs.djangoproject.com/en/2.0/topics/cache/#filesystem-caching wms_capabilities_resp = get_capabilities(request, layer.id, tolerant=True) if wms_capabilities_resp.status_code >= 200 and wms_capabilities_resp.status_code < 400: wms_capabilities = wms_capabilities_resp.getvalue() if wms_capabilities: import xml.etree.ElementTree as ET e = ET.fromstring(wms_capabilities) for atype in e.findall("./[Name='%s']/Extent[@name='time']" % (layername)): dim_name = atype.get('name') if dim_name: dim_name = str(dim_name).lower() if dim_name == 'time': dim_values = atype.text if dim_values: all_times = dim_values.split(",") break if all_times: config["capability"]["dimensions"] = { "time": { "name": "time", "units": "ISO8601", "unitsymbol": None, "nearestVal": False, "multipleVal": False, "current": False, "default": "current", "values": all_times } } if layer.storeType == "remoteStore": service = layer.remote_service source_params = {} if service.type in ('REST_MAP', 'REST_IMG'): source_params = { "ptype": service.ptype, "remote": True, "url": service.service_url, "name": service.name, "title": "[R] %s" % service.title } maplayer = GXPLayer(name=layer.alternate, ows_url=layer.ows_url, layer_params=json.dumps(config), source_params=json.dumps(source_params)) else: maplayer = GXPLayer(name=layer.alternate, ows_url=layer.ows_url, layer_params=json.dumps(config)) # Update count for popularity ranking, # but do not includes admins or resource owners layer.view_count_up(request.user) # center/zoom don't matter; the viewer will center on the layer bounds map_obj = GXPMap(sender=Layer, projection=getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857')) NON_WMS_BASE_LAYERS = [ la for la in default_map_config(request)[1] if la.ows_url is None ] metadata = layer.link_set.metadata().filter( name__in=settings.DOWNLOAD_FORMATS_METADATA) granules = None all_granules = None all_times = None filter = None if layer.is_mosaic: try: cat = gs_catalog cat._cache.clear() store = cat.get_store(layer.name) coverages = cat.mosaic_coverages(store) filter = None try: if request.GET["filter"]: filter = request.GET["filter"] except BaseException: pass offset = 10 * (request.page - 1) granules = cat.mosaic_granules( coverages['coverages']['coverage'][0]['name'], store, limit=10, offset=offset, filter=filter) all_granules = cat.mosaic_granules( coverages['coverages']['coverage'][0]['name'], store, filter=filter) except BaseException: granules = {"features": []} all_granules = {"features": []} if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.views import get_capabilities workspace, layername = layer.alternate.split( ":") if ":" in layer.alternate else (None, layer.alternate) # WARNING Please make sure to have enabled DJANGO CACHE as per # https://docs.djangoproject.com/en/2.0/topics/cache/#filesystem-caching wms_capabilities_resp = get_capabilities(request, layer.id, tolerant=True) if wms_capabilities_resp.status_code >= 200 and wms_capabilities_resp.status_code < 400: wms_capabilities = wms_capabilities_resp.getvalue() if wms_capabilities: import xml.etree.ElementTree as ET e = ET.fromstring(wms_capabilities) for atype in e.findall("./[Name='%s']/Extent[@name='time']" % (layername)): dim_name = atype.get('name') if dim_name: dim_name = str(dim_name).lower() if dim_name == 'time': dim_values = atype.text if dim_values: all_times = dim_values.split(",") break group = None if layer.group: try: group = GroupProfile.objects.get(slug=layer.group.name) except GroupProfile.DoesNotExist: group = None # a flag to be used for qgis server show_popup = False if 'show_popup' in request.GET and request.GET["show_popup"]: show_popup = True ### # MapStory Specific Changes ### keywords = json.dumps([tag.name for tag in layer.keywords.all()]) if request.method == "POST": keywords_form = KeywordsForm(request.POST, instance=layer) metadata_form = MetadataForm(instance=layer) distributionurl_form = DistributionUrlForm(request.POST, instance=layer) if 'keywords' in request.POST: if keywords_form.is_valid(): keywords_form.save() new_keywords = keywords_form.cleaned_data['keywords'] layer.keywords.set(*new_keywords) layer.save() metadata_form = MetadataForm(instance=layer) elif 'title' in request.POST: metadata_form = MetadataForm(request.POST, instance=layer) if metadata_form.is_valid(): metadata_form.save() # update all the metadata if metadata_form.cleaned_data['category'] is not None: new_category = TopicCategory.objects.get( id=metadata_form.cleaned_data['category'].id) Layer.objects.filter(id=layer.id).update( category=new_category) layer.title = metadata_form.cleaned_data['title'] layer.language = metadata_form.cleaned_data['language'] layer.data_quality_statement = metadata_form.cleaned_data[ 'data_quality_statement'] layer.purpose = metadata_form.cleaned_data['purpose'] layer.is_published = metadata_form.cleaned_data['is_published'] layer.save() if distributionurl_form.is_valid(): layer.distribution_url = distributionurl_form.cleaned_data[ 'distribution_url'] keywords_form = KeywordsForm(instance=layer) if 'add_keyword' in request.POST: layer.keywords.add(request.POST['add_keyword']) layer.save() if 'remove_keyword' in request.POST: layer.keywords.remove(request.POST['remove_keyword']) layer.save() else: keywords_form = KeywordsForm(instance=layer) metadata_form = MetadataForm(instance=layer) distributionurl_form = DistributionUrlForm(instance=layer) content_moderators = Group.objects.filter(name='content_moderator').first() thumbnail = layer.get_thumbnail_url # This will get URL encoded later and is used for the social media share URL share_url = "https://%s/layers/%s" % (request.get_host(), layer.typename) share_title = "%s by %s." % (layer.title, layer.owner) share_description = layer.abstract # Get membership buttons ready: admin_memberships = [] # Check if user is admin in one of those organizations users_org_memberships = OrganizationMembership.objects.filter( user_id=request.user.pk) for membership in users_org_memberships.all(): # We have permission if we own the layer, or if we are an organization's admin. if (layer.owner == request.user) or membership.is_admin: admin_memberships.append(membership) if len(admin_memberships) < 1: admin_memberships = None ini_memberships = [] # Checks if user is admin for Inititives user_ini_memberships = InitiativeMembership.objects.filter( user_id=request.user.pk) for membership in user_ini_memberships.all(): if (layer.owner == request.user) or membership.is_admin: ini_memberships.append(membership) if len(ini_memberships) < 1: ini_memberships = None shapefile_link = layer.link_set.download().filter(mime='SHAPE-ZIP').first() if shapefile_link is not None: shapefile_link = shapefile_link.url + '&featureID=fakeID' + '&maxFeatures=1' request.session['shp_name'] = layer.typename request.session['shp_link'] = shapefile_link csv_link = layer.link_set.download().filter(mime='csv').first() if csv_link is not None: csv_link = csv_link.url + '&featureID=fakeID' + '&maxFeatures=1' request.session['csv_name'] = layer.typename request.session['csv_link'] = csv_link ### # End MapStory Specific Changes ### context_dict = { 'resource': layer, 'group': group, 'perms_list': get_perms(request.user, layer.get_self_resource()), "permissions_json": _perms_info_json(layer), "documents": get_related_documents(layer), "metadata": metadata, "is_layer": True, "wps_enabled": settings.OGC_SERVER['default']['WPS_ENABLED'], "granules": granules, "all_granules": all_granules, "all_times": all_times, "show_popup": show_popup, "filter": filter, "storeType": layer.storeType, # MapStory Specific Additions "keywords": keywords, "keywords_form": keywords_form, "metadata_form": metadata_form, "distributionurl_form": distributionurl_form, "content_moderators": content_moderators, "thumbnail": thumbnail, "share_url": share_url, "share_title": share_title, "share_description": share_description, "organizations": admin_memberships, "initiatives": ini_memberships # "online": (layer.remote_service.probe == 200) if layer.storeType == "remoteStore" else True } if request and 'access_token' in request.session: access_token = request.session['access_token'] else: u = uuid.uuid1() access_token = u.hex context_dict["viewer"] = json.dumps( map_obj.viewer_json(request, *(NON_WMS_BASE_LAYERS + [maplayer]))) context_dict["preview"] = getattr(settings, 'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY', 'geoext') context_dict["crs"] = getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857') # provide bbox in EPSG:4326 for leaflet if context_dict["preview"] == 'leaflet': srid, wkt = layer.geographic_bounding_box.split(';') srid = re.findall(r'\d+', srid) geom = GEOSGeometry(wkt, srid=int(srid[0])) geom.transform(4326) context_dict["layer_bbox"] = ','.join([str(c) for c in geom.extent]) if layer.storeType == 'dataStore': links = layer.link_set.download().filter( Q(name__in=settings.DOWNLOAD_FORMATS_VECTOR) | Q(link_type='original')) else: links = layer.link_set.download().filter( Q(name__in=settings.DOWNLOAD_FORMATS_RASTER) | Q(link_type='original')) links_view = [ item for idx, item in enumerate(links) if item.url and 'wms' in item.url or 'gwc' in item.url ] links_download = [ item for idx, item in enumerate(links) if item.url and 'wms' not in item.url and 'gwc' not in item.url ] for item in links_view: if item.url and access_token and 'access_token' not in item.url: params = {'access_token': access_token} item.url = Request('GET', item.url, params=params).prepare().url for item in links_download: if item.url and access_token and 'access_token' not in item.url: params = {'access_token': access_token} item.url = Request('GET', item.url, params=params).prepare().url if request.user.has_perm('view_resourcebase', layer.get_self_resource()): context_dict["links"] = links_view if request.user.has_perm('download_resourcebase', layer.get_self_resource()): if layer.storeType == 'dataStore': links = layer.link_set.download().filter( name__in=settings.DOWNLOAD_FORMATS_VECTOR) else: links = layer.link_set.download().filter( name__in=settings.DOWNLOAD_FORMATS_RASTER) context_dict["links_download"] = links_download if settings.SOCIAL_ORIGINS: context_dict["social_links"] = build_social_links(request, layer) layers_names = layer.alternate try: if settings.DEFAULT_WORKSPACE and settings.DEFAULT_WORKSPACE in layers_names: workspace, name = layers_names.split(':', 1) else: name = layers_names except BaseException: logger.error("Can not identify workspace type and layername") context_dict["layer_name"] = json.dumps(layers_names) try: # get type of layer (raster or vector) if layer.storeType == 'coverageStore': context_dict["layer_type"] = "raster" elif layer.storeType == 'dataStore': if layer.has_time: context_dict["layer_type"] = "vector_time" else: context_dict["layer_type"] = "vector" location = "{location}{service}".format( **{ 'location': settings.OGC_SERVER['default']['LOCATION'], 'service': 'wms', }) # get schema for specific layer username = settings.OGC_SERVER['default']['USER'] password = settings.OGC_SERVER['default']['PASSWORD'] schema = get_schema(location, name, username=username, password=password) # get the name of the column which holds the geometry if 'the_geom' in schema['properties']: schema['properties'].pop('the_geom', None) elif 'geom' in schema['properties']: schema['properties'].pop("geom", None) # filter the schema dict based on the values of layers_attributes layer_attributes_schema = [] for key in schema['properties'].keys(): layer_attributes_schema.append(key) filtered_attributes = layer_attributes_schema context_dict["schema"] = schema context_dict["filtered_attributes"] = filtered_attributes except BaseException: logger.error( "Possible error with OWSLib. Turning all available properties to string" ) if settings.GEOTIFF_IO_ENABLED: from geonode.contrib.geotiffio import create_geotiff_io_url context_dict["link_geotiff_io"] = create_geotiff_io_url( layer, access_token) # maps owned by user needed to fill the "add to existing map section" in template if request.user.is_authenticated(): context_dict["maps"] = Map.objects.filter(owner=request.user) return TemplateResponse(request, template, context=context_dict)
def add_layers_to_map_config( request, map_obj, layer_names, add_base_layers=True): DEFAULT_MAP_CONFIG, DEFAULT_BASE_LAYERS = default_map_config(request) bbox = [] layers = [] for layer_name in layer_names: try: layer = _resolve_layer(request, layer_name) except ObjectDoesNotExist: # bad layer, skip continue except Http404: # can't find the layer, skip it. continue if not request.user.has_perm( 'view_resourcebase', obj=layer.get_self_resource()): # invisible layer, skip inclusion continue layer_bbox = layer.bbox[0:4] bbox = layer_bbox[:] bbox[0] = layer_bbox[0] bbox[1] = layer_bbox[2] bbox[2] = layer_bbox[1] bbox[3] = layer_bbox[3] # assert False, str(layer_bbox) def decimal_encode(bbox): import decimal _bbox = [] for o in [float(coord) for coord in bbox]: if isinstance(o, decimal.Decimal): o = (str(o) for o in [o]) _bbox.append(o) # Must be in the form : [x0, x1, y0, y1 return [_bbox[0], _bbox[2], _bbox[1], _bbox[3]] def sld_definition(style): from urllib import quote _sld = { "title": style.sld_title or style.name, "legend": { "height": "40", "width": "22", "href": layer.ows_url + "?service=wms&request=GetLegendGraphic&format=image%2Fpng&width=20&height=20&layer=" + quote(layer.service_typename, safe=''), "format": "image/png" }, "name": style.name } return _sld config = layer.attribute_config() if hasattr(layer, 'srid'): config['crs'] = { 'type': 'name', 'properties': layer.srid } # Add required parameters for GXP lazy-loading attribution = "%s %s" % (layer.owner.first_name, layer.owner.last_name) if layer.owner.first_name or layer.owner.last_name else str( layer.owner) srs = getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857') srs_srid = int(srs.split(":")[1]) if srs != "EPSG:900913" else 3857 config["attribution"] = "<span class='gx-attribution-title'>%s</span>" % attribution config["format"] = getattr( settings, 'DEFAULT_LAYER_FORMAT', 'image/png') config["title"] = layer.title config["wrapDateLine"] = True config["visibility"] = True config["srs"] = srs config["bbox"] = decimal_encode( bbox_to_projection([float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=int(srs.split(":")[1]))[:4]) config["capability"] = { "abstract": layer.abstract, "name": layer.alternate, "title": layer.title, "queryable": True, "storeType": layer.storeType, "bbox": { layer.srid: { "srs": layer.srid, "bbox": decimal_encode(bbox) }, srs: { "srs": srs, "bbox": decimal_encode( bbox_to_projection([float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=srs_srid)[:4]) }, "EPSG:4326": { "srs": "EPSG:4326", "bbox": decimal_encode(bbox) if layer.srid == 'EPSG:4326' else decimal_encode(bbox_to_projection( [float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=4326)[:4]) }, "EPSG:900913": { "srs": "EPSG:900913", "bbox": decimal_encode(bbox) if layer.srid == 'EPSG:900913' else decimal_encode(bbox_to_projection( [float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=3857)[:4]) } }, "srs": { srs: True }, "formats": ["image/png", "application/atom xml", "application/atom+xml", "application/json;type=utfgrid", "application/openlayers", "application/pdf", "application/rss xml", "application/rss+xml", "application/vnd.google-earth.kml", "application/vnd.google-earth.kml xml", "application/vnd.google-earth.kml+xml", "application/vnd.google-earth.kml+xml;mode=networklink", "application/vnd.google-earth.kmz", "application/vnd.google-earth.kmz xml", "application/vnd.google-earth.kmz+xml", "application/vnd.google-earth.kmz;mode=networklink", "atom", "image/geotiff", "image/geotiff8", "image/gif", "image/gif;subtype=animated", "image/jpeg", "image/png8", "image/png; mode=8bit", "image/svg", "image/svg xml", "image/svg+xml", "image/tiff", "image/tiff8", "image/vnd.jpeg-png", "kml", "kmz", "openlayers", "rss", "text/html; subtype=openlayers", "utfgrid"], "attribution": { "title": attribution }, "infoFormats": ["text/plain", "application/vnd.ogc.gml", "text/xml", "application/vnd.ogc.gml/3.1.1", "text/xml; subtype=gml/3.1.1", "text/html", "application/json"], "styles": [sld_definition(s) for s in layer.styles.all()], "prefix": layer.alternate.split(":")[0] if ":" in layer.alternate else "", "keywords": [k.name for k in layer.keywords.all()] if layer.keywords else [], "llbbox": decimal_encode(bbox) if layer.srid == 'EPSG:4326' else decimal_encode(bbox_to_projection( [float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=4326)[:4]) } all_times = None if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.views import get_capabilities workspace, layername = layer.alternate.split( ":") if ":" in layer.alternate else (None, layer.alternate) # WARNING Please make sure to have enabled DJANGO CACHE as per # https://docs.djangoproject.com/en/2.0/topics/cache/#filesystem-caching wms_capabilities_resp = get_capabilities( request, layer.id, tolerant=True) if wms_capabilities_resp.status_code >= 200 and wms_capabilities_resp.status_code < 400: wms_capabilities = wms_capabilities_resp.getvalue() if wms_capabilities: import xml.etree.ElementTree as ET namespaces = {'wms': 'http://www.opengis.net/wms', 'xlink': 'http://www.w3.org/1999/xlink', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance'} e = ET.fromstring(wms_capabilities) for atype in e.findall( "./[wms:Name='%s']/wms:Dimension[@name='time']" % (layer.alternate), namespaces): dim_name = atype.get('name') if dim_name: dim_name = str(dim_name).lower() if dim_name == 'time': dim_values = atype.text if dim_values: all_times = dim_values.split(",") break if all_times: config["capability"]["dimensions"] = { "time": { "name": "time", "units": "ISO8601", "unitsymbol": None, "nearestVal": False, "multipleVal": False, "current": False, "default": "current", "values": all_times } } if layer.storeType == "remoteStore": service = layer.remote_service source_params = {} if service.type in ('REST_MAP', 'REST_IMG'): source_params = { "ptype": service.ptype, "remote": True, "url": service.service_url, "name": service.name, "title": "[R] %s" % service.title} maplayer = MapLayer(map=map_obj, name=layer.alternate, ows_url=layer.ows_url, layer_params=json.dumps(config), visibility=True, source_params=json.dumps(source_params) ) else: ogc_server_url = urlparse.urlsplit( ogc_server_settings.PUBLIC_LOCATION).netloc layer_url = urlparse.urlsplit(layer.ows_url).netloc access_token = request.session['access_token'] if request and 'access_token' in request.session else None if access_token and ogc_server_url == layer_url and 'access_token' not in layer.ows_url: url = '%s?access_token=%s' % (layer.ows_url, access_token) else: url = layer.ows_url maplayer = MapLayer( map=map_obj, name=layer.alternate, ows_url=url, # use DjangoJSONEncoder to handle Decimal values layer_params=json.dumps(config, cls=DjangoJSONEncoder), visibility=True ) layers.append(maplayer) if bbox and len(bbox) >= 4: minx, maxx, miny, maxy = [float(coord) for coord in bbox] x = (minx + maxx) / 2 y = (miny + maxy) / 2 if getattr( settings, 'DEFAULT_MAP_CRS', 'EPSG:3857') == "EPSG:4326": center = list((x, y)) else: center = list(forward_mercator((x, y))) if center[1] == float('-inf'): center[1] = 0 BBOX_DIFFERENCE_THRESHOLD = 1e-5 # Check if the bbox is invalid valid_x = (maxx - minx) ** 2 > BBOX_DIFFERENCE_THRESHOLD valid_y = (maxy - miny) ** 2 > BBOX_DIFFERENCE_THRESHOLD if valid_x: width_zoom = math.log(360 / abs(maxx - minx), 2) else: width_zoom = 15 if valid_y: height_zoom = math.log(360 / abs(maxy - miny), 2) else: height_zoom = 15 map_obj.center_x = center[0] map_obj.center_y = center[1] map_obj.zoom = math.ceil(min(width_zoom, height_zoom)) map_obj.handle_moderated_uploads() if add_base_layers: layers_to_add = DEFAULT_BASE_LAYERS + layers else: layers_to_add = layers config = map_obj.viewer_json( request, *layers_to_add) config['fromLayer'] = True return config
def test_layer_links(self): lyr = Layer.objects.filter(storeType="dataStore").first() self.assertEquals(lyr.storeType, "dataStore") if check_ogc_backend(geoserver.BACKEND_PACKAGE): links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata") self.assertIsNotNone(links) self.assertEquals(len(links), 7) for ll in links: self.assertEquals(ll.link_type, "metadata") _def_link_types = ( 'data', 'image', 'original', 'html', 'OGC:WMS', 'OGC:WFS', 'OGC:WCS') Link.objects.filter(resource=lyr.resourcebase_ptr, link_type__in=_def_link_types).delete() links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data") self.assertIsNotNone(links) self.assertEquals(len(links), 0) set_resource_default_links(lyr, lyr) links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata") self.assertIsNotNone(links) self.assertEquals(len(links), 7) for ll in links: self.assertEquals(ll.link_type, "metadata") links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data") self.assertIsNotNone(links) self.assertEquals(len(links), 6) links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="image") self.assertIsNotNone(links) self.assertEquals(len(links), 3) lyr = Layer.objects.filter(storeType="coverageStore").first() self.assertEquals(lyr.storeType, "coverageStore") if check_ogc_backend(geoserver.BACKEND_PACKAGE): links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata") self.assertIsNotNone(links) self.assertEquals(len(links), 7) for ll in links: self.assertEquals(ll.link_type, "metadata") _def_link_types = ( 'data', 'image', 'original', 'html', 'OGC:WMS', 'OGC:WFS', 'OGC:WCS') Link.objects.filter(resource=lyr.resourcebase_ptr, link_type__in=_def_link_types).delete() links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data") self.assertIsNotNone(links) self.assertEquals(len(links), 0) set_resource_default_links(lyr, lyr) links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="metadata") self.assertIsNotNone(links) self.assertEquals(len(links), 7) for ll in links: self.assertEquals(ll.link_type, "metadata") links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="data") self.assertIsNotNone(links) self.assertEquals(len(links), 2) links = Link.objects.filter(resource=lyr.resourcebase_ptr, link_type="image") self.assertIsNotNone(links) self.assertEquals(len(links), 7)
class BaseApiTests(APITestCase, URLPatternsTestCase): fixtures = [ 'initial_data.json', 'group_test_data.json', 'default_oauth_apps.json' ] urlpatterns = [ url(r'^home/$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^help/$', TemplateView.as_view(template_name='help.html'), name='help'), url(r"^account/", include("allauth.urls")), url(r'^people/', include('geonode.people.urls')), url(r'^api/v2/', include(router.urls)), url(r'^api/v2/', include('geonode.api.urls')), url(r'^api/v2/api-auth/', include('rest_framework.urls', namespace='geonode_rest_framework')), url(r'^$', TemplateView.as_view(template_name='layers/layer_list.html'), { 'facet_type': 'layers', 'is_layer': True }, name='layer_browse'), url(r'^$', TemplateView.as_view(template_name='maps/map_list.html'), { 'facet_type': 'maps', 'is_map': True }, name='maps_browse'), url(r'^$', TemplateView.as_view(template_name='documents/document_list.html'), { 'facet_type': 'documents', 'is_document': True }, name='document_browse'), url(r'^$', TemplateView.as_view(template_name='groups/group_list.html'), name='group_list'), url(r'^search/$', TemplateView.as_view(template_name='search/search.html'), name='search'), url(r'^$', services, name='services'), url( r'^invitations/', include('geonode.invitations.urls', namespace='geonode.invitations')), url(r'^i18n/', include(django.conf.urls.i18n), name="i18n"), url(r'^jsi18n/$', JavaScriptCatalog.as_view(), {}, name='javascript-catalog'), url(r'^(?P<mapid>[^/]+)/embed$', map_embed, name='map_embed'), url(r'^(?P<layername>[^/]+)/embed$', layer_embed, name='layer_embed'), url(r'^(?P<geoappid>[^/]+)/embed$', geoapp_edit, {'template': 'apps/app_embed.html'}, name='geoapp_embed'), ] if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.views import layer_acls, resolve_user urlpatterns += [ url(r'^acls/?$', layer_acls, name='layer_acls'), url(r'^acls_dep/?$', layer_acls, name='layer_acls_dep'), url(r'^resolve_user/?$', resolve_user, name='layer_resolve_user'), url(r'^resolve_user_dep/?$', resolve_user, name='layer_resolve_user_dep'), ] def setUp(self): create_models(b'document') create_models(b'map') create_models(b'layer') def test_gropus_list(self): """ Ensure we can access the gropus list. """ url = reverse('group-profiles-list') # Unauhtorized response = self.client.get(url, format='json') self.assertEqual(response.status_code, 403) # Auhtorized self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) logger.debug(response.data) self.assertEqual(response.data['total'], 2) self.assertEqual(len(response.data['group_profiles']), 2) url = reverse('group-profiles-detail', kwargs={'pk': 1}) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) logger.debug(response.data) self.assertEqual(response.data['group_profile']['title'], 'Registered Members') self.assertEqual(response.data['group_profile']['description'], 'Registered Members') self.assertEqual(response.data['group_profile']['access'], 'private') self.assertEqual(response.data['group_profile']['group']['name'], response.data['group_profile']['slug']) def test_users_list(self): """ Ensure we can access the users list. """ url = reverse('users-list') # Anonymous response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) logger.debug(response.data) self.assertEqual(response.data['total'], 0) self.assertEqual(len(response.data['users']), 0) # Auhtorized self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) logger.debug(response.data) self.assertEqual(response.data['total'], 10) self.assertEqual(len(response.data['users']), 10) url = reverse('users-detail', kwargs={'pk': 1}) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) logger.debug(response.data) self.assertEqual(response.data['user']['username'], 'admin') self.assertIsNotNone(response.data['user']['avatar']) # Bobby self.assertTrue(self.client.login(username='******', password='******')) # Bobby cannot access other users' details response = self.client.get(url, format='json') self.assertEqual(response.status_code, 404) # Bobby can see himself in the list url = reverse('users-list') self.assertEqual(len(response.data), 1) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) logger.debug(response.data) self.assertEqual(response.data['total'], 1) self.assertEqual(len(response.data['users']), 1) # Bobby can access its own details bobby = get_user_model().objects.filter(username='******').get() url = reverse('users-detail', kwargs={'pk': bobby.id}) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) logger.debug(response.data) self.assertEqual(response.data['user']['username'], 'bobby') self.assertIsNotNone(response.data['user']['avatar']) def test_base_resources(self): """ Ensure we can access the Resource Base list. """ url = reverse('base-resources-list') # Anonymous response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 26) # Pagination self.assertEqual(len(response.data['resources']), 10) logger.debug(response.data) # Remove public permissions to Layers from geonode.layers.utils import set_layers_permissions set_layers_permissions( "read", # permissions_name None, # resources_names == None (all layers) [get_anonymous_user()], # users_usernames None, # groups_names True, # delete_flag ) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 18) # Pagination self.assertEqual(len(response.data['resources']), 10) # Admin self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 18) # Pagination self.assertEqual(len(response.data['resources']), 10) logger.debug(response.data) # Bobby self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 18) # Pagination self.assertEqual(len(response.data['resources']), 10) logger.debug(response.data) # Norman self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 18) # Pagination self.assertEqual(len(response.data['resources']), 10) logger.debug(response.data) # Pagination # Admin self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(f"{url}?page_size=17", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 18) # Pagination self.assertEqual(len(response.data['resources']), 17) # Check user permissions resource = ResourceBase.objects.filter(owner__username='******').first() # Admin response = self.client.get(f"{url}/{resource.id}/", format='json') self.assertTrue( 'change_resourcebase' in list(response.data['resource']['perms'])) # Annonymous self.assertIsNone(self.client.logout()) response = self.client.get(f"{url}/{resource.id}/", format='json') self.assertFalse( 'change_resourcebase' in list(response.data['resource']['perms'])) # user owner self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(f"{url}/{resource.id}/", format='json') self.assertTrue( 'change_resourcebase' in list(response.data['resource']['perms'])) # user not owner and not assigned self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(f"{url}/{resource.id}/", format='json') self.assertFalse( 'change_resourcebase' in list(response.data['resource']['perms'])) def test_search_resources(self): """ Ensure we can search across the Resource Base list. """ url = reverse('base-resources-list') # Admin self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get( f"{url}?search=ca&search_fields=title&search_fields=abstract", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 1) # Pagination self.assertEqual(len(response.data['resources']), 1) def test_filter_resources(self): """ Ensure we can filter across the Resource Base list. """ url = reverse('base-resources-list') # Admin self.assertTrue(self.client.login(username='******', password='******')) # Filter by owner == bobby response = self.client.get(f"{url}?filter{{owner.username}}=bobby", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 3) # Pagination self.assertEqual(len(response.data['resources']), 3) # Filter by resource_type == document response = self.client.get(f"{url}?filter{{resource_type}}=document", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 9) # Pagination self.assertEqual(len(response.data['resources']), 9) # Filter by resource_type == layer and title like 'common morx' response = self.client.get( f"{url}?filter{{resource_type}}=layer&filter{{title.icontains}}=common morx", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 1) # Pagination self.assertEqual(len(response.data['resources']), 1) # Filter by Keywords response = self.client.get(f"{url}?filter{{keywords.name}}=here", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 1) # Pagination self.assertEqual(len(response.data['resources']), 1) # Filter by Metadata Regions response = self.client.get( f"{url}?filter{{regions.name.icontains}}=Italy", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 0) # Pagination self.assertEqual(len(response.data['resources']), 0) # Filter by Metadata Categories response = self.client.get( f"{url}?filter{{category.identifier}}=elevation", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 9) # Pagination self.assertEqual(len(response.data['resources']), 9) # Extent Filter response = self.client.get( f"{url}?page_size=26&extent=-180,-90,180,90", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 26) # Pagination self.assertEqual(len(response.data['resources']), 26) response = self.client.get(f"{url}?page_size=26&extent=0,0,100,100", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 26) # Pagination self.assertEqual(len(response.data['resources']), 26) response = self.client.get(f"{url}?page_size=26&extent=-10,-10,-1,-1", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 12) # Pagination self.assertEqual(len(response.data['resources']), 12) # Extent Filter: Crossing Dateline extent = "-180.0000,56.9689,-162.5977,70.7435,155.9180,56.9689,180.0000,70.7435" response = self.client.get(f"{url}?page_size=26&extent={extent}", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 12) # Pagination self.assertEqual(len(response.data['resources']), 12) def test_sort_resources(self): """ Ensure we can sort the Resource Base list. """ url = reverse('base-resources-list') # Admin self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(f"{url}?sort[]=title", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 26) # Pagination self.assertEqual(len(response.data['resources']), 10) resource_titles = [] for _r in response.data['resources']: resource_titles.append(_r['title']) sorted_resource_titles = sorted(resource_titles.copy()) self.assertEqual(resource_titles, sorted_resource_titles) response = self.client.get(f"{url}?sort[]=-title", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 26) # Pagination self.assertEqual(len(response.data['resources']), 10) resource_titles = [] for _r in response.data['resources']: resource_titles.append(_r['title']) reversed_resource_titles = sorted(resource_titles.copy()) self.assertNotEqual(resource_titles, reversed_resource_titles) def test_perms_resources(self): """ Ensure we can Get & Set Permissions across the Resource Base list. """ url = reverse('base-resources-list') # Admin self.assertTrue(self.client.login(username='******', password='******')) resource = ResourceBase.objects.filter(owner__username='******').first() set_perms_url = urljoin( f"{reverse('base-resources-detail', kwargs={'pk': resource.pk})}/", 'set_perms/') get_perms_url = urljoin( f"{reverse('base-resources-detail', kwargs={'pk': resource.pk})}/", 'get_perms/') url = reverse('base-resources-detail', kwargs={'pk': resource.pk}) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(int(response.data['resource']['pk']), int(resource.pk)) response = self.client.get(get_perms_url, format='json') self.assertEqual(response.status_code, 200) resource_perm_spec = response.data self.assertTrue('bobby' in resource_perm_spec['users']) self.assertFalse('norman' in resource_perm_spec['users']) # Add perms to Norman resource_perm_spec['users']['norman'] = resource_perm_spec['users'][ 'bobby'] response = self.client.put(set_perms_url, data=resource_perm_spec, format='json') self.assertEqual(response.status_code, 200) response = self.client.get(get_perms_url, format='json') self.assertEqual(response.status_code, 200) resource_perm_spec = response.data self.assertTrue('norman' in resource_perm_spec['users']) # Remove perms to Norman resource_perm_spec['users']['norman'] = [] response = self.client.put(set_perms_url, data=resource_perm_spec, format='json') self.assertEqual(response.status_code, 200) response = self.client.get(get_perms_url, format='json') self.assertEqual(response.status_code, 200) resource_perm_spec = response.data self.assertFalse('norman' in resource_perm_spec['users']) # Ensure get_perms and set_perms are done by users with correct permissions. # logout admin user self.assertIsNone(self.client.logout()) # get perms response = self.client.get(get_perms_url, format='json') self.assertEqual(response.status_code, 403) # set perms response = self.client.put(set_perms_url, data=resource_perm_spec, format='json') self.assertEqual(response.status_code, 403) # login resourse owner # get perms self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(get_perms_url, format='json') self.assertEqual(response.status_code, 200) # set perms response = self.client.put(set_perms_url, data=resource_perm_spec, format='json') self.assertEqual(response.status_code, 200) def test_featured_and_published_resources(self): """ Ensure we can Get & Set Permissions across the Resource Base list. """ url = reverse('base-resources-list') # Admin self.assertTrue(self.client.login(username='******', password='******')) resources = ResourceBase.objects.filter(owner__username='******') url = urljoin(f"{reverse('base-resources-list')}/", 'featured/') response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 0) # Pagination self.assertEqual(len(response.data['resources']), 0) resources.filter(resource_type='map').update(featured=True) url = urljoin(f"{reverse('base-resources-list')}/", 'featured/') response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 2) # Pagination self.assertEqual(len(response.data['resources']), 2) def test_resource_types(self): """ Ensure we can Get & Set Permissions across the Resource Base list. """ url = urljoin(f"{reverse('base-resources-list')}/", 'resource_types/') response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertTrue('resource_types' in response.data) self.assertTrue('layer' in response.data['resource_types']) self.assertTrue('map' in response.data['resource_types']) self.assertTrue('document' in response.data['resource_types']) self.assertTrue('service' in response.data['resource_types']) @patch('PIL.Image.open', return_value=test_image) def test_thumbnail_urls(self, img): """ Ensure the thumbnail url reflects the current active Thumb on the resource. """ # Admin self.assertTrue(self.client.login(username='******', password='******')) resource = ResourceBase.objects.filter(owner__username='******').first() url = reverse('base-resources-detail', kwargs={'pk': resource.pk}) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(int(response.data['resource']['pk']), int(resource.pk)) thumbnail_url = response.data['resource']['thumbnail_url'] self.assertIsNone(thumbnail_url) f = BytesIO(test_image.tobytes()) f.name = 'test_image.jpeg' curated_thumbnail = CuratedThumbnail.objects.create(resource=resource, img=File(f)) url = reverse('base-resources-detail', kwargs={'pk': resource.pk}) response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(int(response.data['resource']['pk']), int(resource.pk)) thumbnail_url = response.data['resource']['thumbnail_url'] self.assertTrue(curated_thumbnail.thumbnail_url in thumbnail_url) def test_embed_urls(self): """ Ensure the embed urls reflect the concrete instance ones. """ # Admin self.assertTrue(self.client.login(username='******', password='******')) resources = ResourceBase.objects.all() for resource in resources: url = reverse('base-resources-detail', kwargs={'pk': resource.pk}) response = self.client.get(url, format='json') if resource.title.endswith('metadata true'): self.assertEqual(response.status_code, 404) else: self.assertEqual(response.status_code, 200) self.assertEqual(int(response.data['resource']['pk']), int(resource.pk)) embed_url = response.data['resource']['embed_url'] self.assertIsNotNone(embed_url) instance = resource.get_real_instance() if hasattr(instance, 'embed_url'): if instance.embed_url != NotImplemented: self.assertEqual(instance.embed_url, embed_url) else: self.assertEqual("", embed_url)
def proxy(request, url=None, response_callback=None, sec_chk_hosts=True, sec_chk_rules=True, **kwargs): # Security rules and settings PROXY_ALLOWED_HOSTS = getattr(settings, 'PROXY_ALLOWED_HOSTS', ()) # Sanity url checks if 'url' not in request.GET and not url: return HttpResponse( "The proxy service requires a URL-encoded URL as a parameter.", status=400, content_type="text/plain") raw_url = url or request.GET['url'] raw_url = urljoin(settings.SITEURL, raw_url) if raw_url.startswith("/") else raw_url url = urlsplit(raw_url) locator = str(url.path) if url.query != "": locator += '?' + url.query if url.fragment != "": locator += '#' + url.fragment access_token = None if request and 'access_token' in request.session: access_token = request.session['access_token'] # White-Black Listing Hosts if sec_chk_hosts and not settings.DEBUG: site_url = urlsplit(settings.SITEURL) if site_url.hostname not in PROXY_ALLOWED_HOSTS: PROXY_ALLOWED_HOSTS += (site_url.hostname, ) if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import ogc_server_settings hostname = ( ogc_server_settings.hostname, ) if ogc_server_settings else () if hostname not in PROXY_ALLOWED_HOSTS: PROXY_ALLOWED_HOSTS += hostname if url.query and ows_regexp.match(url.query): ows_tokens = ows_regexp.match(url.query).groups() if len( ows_tokens ) == 4 and 'version' == ows_tokens[0] and StrictVersion( ows_tokens[1]) >= StrictVersion("1.0.0") and StrictVersion( ows_tokens[1] ) <= StrictVersion("3.0.0") and ows_tokens[2].lower() in ( 'getcapabilities') and ows_tokens[3].upper() in ( 'OWS', 'WCS', 'WFS', 'WMS', 'WPS', 'CSW'): if url.hostname not in PROXY_ALLOWED_HOSTS: PROXY_ALLOWED_HOSTS += (url.hostname, ) if not validate_host(url.hostname, PROXY_ALLOWED_HOSTS): return HttpResponse( "DEBUG is set to False but the host of the path provided to the proxy service" " is not in the PROXY_ALLOWED_HOSTS setting.", status=403, content_type="text/plain") # Security checks based on rules; allow only specific requests if sec_chk_rules: # TODO: Not yet implemented pass # Collecting headers and cookies headers = {} cookies = None csrftoken = None if settings.SESSION_COOKIE_NAME in request.COOKIES and is_safe_url( url=raw_url, host=url.hostname): cookies = request.META["HTTP_COOKIE"] for cook in request.COOKIES: name = str(cook) value = request.COOKIES.get(name) if name == 'csrftoken': csrftoken = value cook = "%s=%s" % (name, value) cookies = cook if not cookies else (cookies + '; ' + cook) csrftoken = get_token(request) if not csrftoken else csrftoken if csrftoken: headers['X-Requested-With'] = "XMLHttpRequest" headers['X-CSRFToken'] = csrftoken cook = "%s=%s" % ('csrftoken', csrftoken) cookies = cook if not cookies else (cookies + '; ' + cook) if cookies: if 'JSESSIONID' in request.session and request.session['JSESSIONID']: cookies = cookies + '; JSESSIONID=' + \ request.session['JSESSIONID'] headers['Cookie'] = cookies if request.method in ("POST", "PUT") and "CONTENT_TYPE" in request.META: headers["Content-Type"] = request.META["CONTENT_TYPE"] access_token = None if request and 'access_token' in request.session: access_token = request.session['access_token'] if access_token: # TODO: Bearer is currently cutted of by Djano / GeoServer if request.method in ("POST", "PUT"): headers['Authorization'] = 'Bearer %s' % access_token if access_token and 'access_token' not in locator: query_separator = '&' if '?' in locator else '?' locator = ('%s%saccess_token=%s' % (locator, query_separator, access_token)) elif 'HTTP_AUTHORIZATION' in request.META: auth = request.META.get('HTTP_AUTHORIZATION', request.META.get('HTTP_AUTHORIZATION2')) if auth: headers['Authorization'] = auth site_url = urlsplit(settings.SITEURL) pragma = "no-cache" referer = request.META[ "HTTP_REFERER"] if "HTTP_REFERER" in request.META else \ "{scheme}://{netloc}/".format(scheme=site_url.scheme, netloc=site_url.netloc) encoding = request.META[ "HTTP_ACCEPT_ENCODING"] if "HTTP_ACCEPT_ENCODING" in request.META else "gzip" headers.update({ "Pragma": pragma, "Referer": referer, "Accept-encoding": encoding, }) if url.scheme == 'https': conn = HTTPSConnection(url.hostname, url.port) else: conn = HTTPConnection(url.hostname, url.port) conn.request(request.method, locator.encode('utf8'), request.body, headers) response = conn.getresponse() content = response.read() status = response.status content_type = response.getheader("Content-Type", "text/plain") # decompress GZipped responses if not enabled if content and response.getheader('Content-Encoding') == 'gzip': from StringIO import StringIO import gzip buf = StringIO(content) f = gzip.GzipFile(fileobj=buf) content = f.read() if response_callback: kwargs = {} if not kwargs else kwargs kwargs.update({ 'response': response, 'content': content, 'status': status, 'content_type': content_type }) return response_callback(**kwargs) else: # If we get a redirect, let's add a useful message. if status in (301, 302, 303, 307): _response = HttpResponse( ('This proxy does not support redirects. The server in "%s" ' 'asked for a redirect to "%s"' % (url, response.getheader('Location'))), status=status, content_type=content_type) _response['Location'] = response.getheader('Location') return _response else: return HttpResponse(content=content, status=status, content_type=content_type)
url(r'^i18n/', include(django.conf.urls.i18n), name="i18n"), url(r'^jsi18n/$', JavaScriptCatalog.as_view(), js_info_dict, name='javascript-catalog') ] urlpatterns += [ # '', url(r'^showmetadata/', include('geonode.catalogue.metadataxsl.urls')), ] if settings.FAVORITE_ENABLED: urlpatterns += [ # '', url(r'^favorite/', include('geonode.favorite.urls')), ] if check_ogc_backend(geoserver.BACKEND_PACKAGE): if settings.CREATE_LAYER: urlpatterns += [ # '', url(r'^createlayer/', include('geonode.geoserver.createlayer.urls')), ] from geonode.geoserver.views import get_capabilities # GeoServer Helper Views urlpatterns += [ # '', # Upload views url(r'^upload/', include('geonode.upload.urls')), # capabilities url(r'^capabilities/layer/(?P<layerid>\d+)/$', get_capabilities, name='capabilities_layer'), url(r'^capabilities/map/(?P<mapid>\d+)/$',
class BaseApiTests(APITestCase, URLPatternsTestCase): fixtures = [ 'initial_data.json', 'group_test_data.json', 'default_oauth_apps.json' ] urlpatterns = [ url(r'^home/$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^help/$', TemplateView.as_view(template_name='help.html'), name='help'), url(r'^developer/$', TemplateView.as_view(template_name='developer.html'), name='developer'), url(r'^about/$', TemplateView.as_view(template_name='about.html'), name='about'), url(r'^privacy_cookies/$', TemplateView.as_view(template_name='privacy-cookies.html'), name='privacy-cookies'), url(r"^account/", include("allauth.urls")), url(r'^people/', include('geonode.people.urls')), url(r'^api/v2/', include(router.urls)), url(r'^api/v2/', include('geonode.api.urls')), url(r'^api/v2/api-auth/', include('rest_framework.urls', namespace='geonode_rest_framework')), url(r'^upload$', layer_upload, name='layer_upload'), url(r'^$', TemplateView.as_view(template_name='layers/layer_list.html'), { 'facet_type': 'layers', 'is_layer': True }, name='layer_browse'), url(r'^$', TemplateView.as_view(template_name='maps/map_list.html'), { 'facet_type': 'maps', 'is_map': True }, name='maps_browse'), url(r'^$', TemplateView.as_view(template_name='documents/document_list.html'), { 'facet_type': 'documents', 'is_document': True }, name='document_browse'), url(r'^$', TemplateView.as_view(template_name='groups/group_list.html'), name='group_list'), url(r'^search/$', TemplateView.as_view(template_name='search/search.html'), name='search'), url(r'^$', services, name='services'), url( r'^invitations/', include('geonode.invitations.urls', namespace='geonode.invitations')), url(r'^i18n/', include(django.conf.urls.i18n), name="i18n"), url(r'^jsi18n/$', JavaScriptCatalog.as_view(), {}, name='javascript-catalog'), url(r'^(?P<mapid>[^/]+)/embed$', map_embed, name='map_embed'), url(r'^(?P<layername>[^/]+)/embed$', layer_embed, name='layer_embed'), url(r'^(?P<geoappid>[^/]+)/embed$', geoapp_edit, {'template': 'apps/app_embed.html'}, name='geoapp_embed'), ] if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.views import layer_acls, resolve_user urlpatterns += [ url(r'^acls/?$', layer_acls, name='layer_acls'), url(r'^acls_dep/?$', layer_acls, name='layer_acls_dep'), url(r'^resolve_user/?$', resolve_user, name='layer_resolve_user'), url(r'^resolve_user_dep/?$', resolve_user, name='layer_resolve_user_dep'), ] def setUp(self): create_models(b'document') create_models(b'map') create_models(b'layer') self.admin = get_user_model().objects.get(username='******') self.bobby = get_user_model().objects.get(username='******') self.norman = get_user_model().objects.get(username='******') self.gep_app = GeoApp.objects.create(title="Test GeoApp", owner=self.bobby) self.gep_app_data = GeoAppData.objects.create( blob='{"test_data": {"test": ["test_1","test_2","test_3"]}}', resource=self.gep_app) self.gep_app.set_default_permissions() def test_geoapps_list(self): """ Ensure we can access the GeoApps list. """ url = reverse('geoapps-list') # Anonymous response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 1) # Pagination self.assertEqual(len(response.data['geoapps']), 1) self.assertTrue('data' not in response.data['geoapps'][0]) response = self.client.get(f"{url}?include[]=data", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 1) # Pagination self.assertEqual(len(response.data['geoapps']), 1) self.assertTrue('data' in response.data['geoapps'][0]) self.assertEqual( response.data['geoapps'][0]['data'], {"test_data": { "test": ['test_1', 'test_2', 'test_3'] }}) def test_geoapps_crud(self): """ Ensure we can create/update GeoApps. """ # Bobby self.assertTrue(self.client.login(username='******', password='******')) # Create url = reverse('geoapps-list') data = { "geoapp": { "name": "Test Create", "title": "Test Create", "owner": "bobby" } } response = self.client.post(url, data=data, format='json') self.assertEqual(response.status_code, 201) # 201 - Created response = self.client.get(url, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 5) self.assertEqual(response.data['total'], 2) # Pagination self.assertEqual(len(response.data['geoapps']), 2) # Update: PATCH url = reverse('geoapps-detail', kwargs={'pk': self.gep_app.pk}) data = { "blob": { "test_data": { "test": ['test_4', 'test_5', 'test_6'] } } } response = self.client.patch(url, data=json.dumps(data), format='json') self.assertEqual(response.status_code, 200) response = self.client.get(f"{url}?include[]=data", format='json') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data), 1) # Pagination self.assertTrue('data' in response.data['geoapp']) self.assertEqual( response.data['geoapp']['data'], {"test_data": { "test": ['test_4', 'test_5', 'test_6'] }}) # Update: POST data = {"test_data": {"test": ['test_1', 'test_2', 'test_3']}} response = self.client.post(url, data=json.dumps(data), format='json') self.assertEqual(response.status_code, 405) # 405 – Method not allowed # Delete response = self.client.delete(url, format='json') self.assertEqual(response.status_code, 204) # 204 - No Content response = self.client.get(f"{url}?include[]=data", format='json') self.assertEqual(response.status_code, 404) # 404 - Not Found