def test_cascading_delete(self): """Verify that the helpers.cascading_delete() method is working properly """ gs_cat = gs_catalog # Upload a Shapefile shp_file = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp') shp_layer = file_upload(shp_file) # Save the names of the Resource/Store/Styles resource_name = shp_layer.name ws = gs_cat.get_workspace(shp_layer.workspace) store = gs_cat.get_store(shp_layer.store, ws) store_name = store.name layer = gs_cat.get_layer(resource_name) styles = layer.styles + [layer.default_style] # Delete the Layer using cascading_delete() cascading_delete(gs_cat, shp_layer.typename) # Verify that the styles were deleted for style in styles: s = gs_cat.get_style(style.name) assert s == None # Verify that the resource was deleted self.assertRaises(FailedRequestError, lambda: gs_cat.get_resource(resource_name, store=store)) # Verify that the store was deleted self.assertRaises(FailedRequestError, lambda: gs_cat.get_store(store_name)) # Clean up by deleting the layer from GeoNode's DB and GeoNetwork shp_layer.delete()
def geoserver_pre_delete(instance, sender, **kwargs): """Removes the layer from GeoServer """ # cascading_delete should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): cascading_delete(gs_catalog, instance.typename)
def geoserver_pre_delete(instance, sender, **kwargs): """Removes the layer from GeoServer """ ct = ContentType.objects.get_for_model(instance) OverallRating.objects.filter(content_type = ct, object_id = instance.id).delete() #cascading_delete should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True): cascading_delete(Layer.objects.gs_catalog, instance.typename)
def geoserver_pre_delete(instance, sender, **kwargs): """Removes the layer from GeoServer """ # cascading_delete should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): if instance.remote_service is None or instance.remote_service.method == CASCADED: if instance.alternate: cascading_delete(gs_catalog, instance.alternate)
def layer_replace(request, layername, template="layers/layer_replace.html"): layer = _resolve_layer(request, layername, "base.change_resourcebase", _PERMISSION_MSG_MODIFY) if request.method == "GET": ctx = {"charsets": CHARSETS, "layer": layer, "is_featuretype": layer.is_vector(), "is_layer": True} return render_to_response(template, RequestContext(request, ctx)) elif request.method == "POST": form = LayerUploadForm(request.POST, request.FILES) tempdir = None out = {} if form.is_valid(): try: tempdir, base_file = form.write_files() if layer.is_vector() and is_raster(base_file): out["success"] = False out["errors"] = _("You are attempting to replace a vector layer with a raster.") elif (not layer.is_vector()) and is_vector(base_file): out["success"] = False out["errors"] = _("You are attempting to replace a raster layer with a vector.") else: # delete geoserver's store before upload cat = gs_catalog cascading_delete(cat, layer.typename) saved_layer = file_upload( base_file, name=layer.name, user=request.user, overwrite=True, charset=form.cleaned_data["charset"], ) out["success"] = True out["url"] = reverse("layer_detail", args=[saved_layer.service_typename]) except Exception as e: out["success"] = False out["errors"] = str(e) finally: if tempdir is not None: shutil.rmtree(tempdir) else: errormsgs = [] for e in form.errors.values(): errormsgs.append([escape(v) for v in e]) out["errors"] = form.errors out["errormsgs"] = errormsgs if out["success"]: status_code = 200 else: status_code = 400 return HttpResponse(json.dumps(out), mimetype="application/json", status=status_code)
def test_time(self): """Verify that uploading time based shapefile works properly""" cascading_delete(self.catalog, 'boxes_with_date') timedir = os.path.join(GOOD_DATA, 'time') layer_name = 'boxes_with_date' shp = os.path.join(timedir, '%s.shp' % layer_name) # get to time step resp, data = self.client.upload_file(shp) self.assertEquals(resp.code, 200) if not isinstance(data, basestring): self.wait_for_progress(data.get('progress')) self.assertTrue(data['success']) self.assertTrue(data['redirect_to'], upload_step('time')) redirect_to = data['redirect_to'] resp, data = self.client.get_html(upload_step('time')) self.assertEquals(resp.code, 200) data = dict(csrfmiddlewaretoken=self.client.get_csrf_token(), time_attribute='date', presentation_strategy='LIST', ) resp = self.client.make_request(redirect_to, data) self.assertEquals(resp.code, 200) resp_js = json.loads(resp.read()) if resp_js['success']: url = resp_js['redirect_to'] resp = self.client.make_request(url, data) url = json.loads(resp.read())['url'] self.assertTrue( url.endswith(layer_name), 'expected url to end with %s, but got %s' % (layer_name, url)) self.assertEquals(resp.code, 200) url = urllib.unquote(url) self.check_layer_complete(url, layer_name) wms = get_wms(type_name='geonode:%s' % layer_name) layer_info = wms.items()[0][1] self.assertEquals(100, len(layer_info.timepositions)) else: self.assertTrue('error_msg' in resp_js) self.assertTrue( 'Source SRS is not valid' in resp_js['error_msg'])
def test_time(self): """Verify that uploading time based shapefile works properly""" cascading_delete(layer_name='boxes_with_date', catalog=self.catalog) timedir = os.path.join(GOOD_DATA, 'time') layer_name = 'boxes_with_date' shp = os.path.join(timedir, f'{layer_name}.shp') # get to time step resp, data = self.client.upload_file(shp) self.assertEqual(resp.status_code, 200) if not isinstance(data, str): # self.wait_for_progress(data.get('progress')) self.assertTrue(data['success']) self.assertTrue(data['redirect_to'], upload_step('time')) redirect_to = data['redirect_to'] resp, data = self.client.get_html(upload_step('time')) self.assertEqual(resp.status_code, 200) data = dict(csrfmiddlewaretoken=self.client.get_csrf_token(), time_attribute='date', presentation_strategy='LIST', ) resp = self.client.make_request(redirect_to, data) self.assertEqual(resp.status_code, 200) resp_js = resp.json() if resp_js['success']: url = resp_js['redirect_to'] resp = self.client.make_request(url, data) url = resp.json()['url'] self.assertTrue( url.endswith(layer_name), f'expected url to end with {layer_name}, but got {url}') self.assertEqual(resp.status_code, 200) url = unquote(url) self.check_layer_complete(url, layer_name) wms = get_wms( type_name=f'geonode:{layer_name}', username=GEOSERVER_USER, password=GEOSERVER_PASSWD) layer_info = list(wms.items())[0][1] self.assertEqual(100, len(layer_info.timepositions)) else: self.assertTrue('error_msg' in resp_js)
def test_cascading_delete(self): """Verify that the helpers.cascading_delete() method is working properly """ gs_cat = gs_catalog # Upload a Shapefile shp_file = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp') shp_layer = file_upload(shp_file) shp_layer = geoserver_post_save2(shp_layer.id) time.sleep(20) # Save the names of the Resource/Store/Styles self.assertIsNotNone(shp_layer.name) resource_name = shp_layer.name self.assertIsNotNone(shp_layer.workspace) ws = gs_cat.get_workspace(shp_layer.workspace) self.assertIsNotNone(shp_layer.store) store = gs_cat.get_store(shp_layer.store, ws) store_name = store.name self.assertIsNotNone(resource_name) layer = gs_cat.get_layer(resource_name) styles = layer.styles + [layer.default_style] # Delete the Layer using cascading_delete() cascading_delete(gs_cat, shp_layer.typename) # Verify that the styles were deleted for style in styles: s = gs_cat.get_style(style.name) assert s is None # Verify that the resource was deleted self.assertRaises( FailedRequestError, lambda: gs_cat.get_resource(resource_name, store=store)) # Verify that the store was deleted self.assertRaises(FailedRequestError, lambda: gs_cat.get_store(store_name)) # Clean up by deleting the layer from GeoNode's DB and GeoNetwork shp_layer.delete()
def test_time(self): """Verify that uploading time based shapefile works properly""" cascading_delete(self.catalog, 'boxes_with_date') timedir = os.path.join(GOOD_DATA, 'time') layer_name = 'boxes_with_date' shp = os.path.join(timedir, '%s.shp' % layer_name) # get to time step resp, data = self.client.upload_file(shp) self.wait_for_progress(data.get('progress')) self.assertEquals(resp.code, 200) self.assertTrue(data['success']) self.assertTrue(data['redirect_to'], upload_step('time')) resp, data = self.client.get_html(upload_step('time')) self.assertEquals(resp.code, 200) data = dict(csrfmiddlewaretoken=self.client.get_csrf_token(), time_attribute='date', presentation_strategy='LIST', ) resp = self.client.make_request(upload_step('time'), data) url = json.loads(resp.read())['redirect_to'] resp = self.client.make_request(url, data) url = json.loads(resp.read())['url'] self.assertTrue( url.endswith(layer_name), 'expected url to end with %s, but got %s' % (layer_name, url)) self.assertEquals(resp.code, 200) url = urllib.unquote(url) self.check_layer_complete(url, layer_name) wms = get_wms(type_name='geonode:%s' % layer_name) layer_info = wms.items()[0][1] self.assertEquals(100, len(layer_info.timepositions))
def test_configure_time(self): # make sure it's not there (and configured) cascading_delete(gs_catalog, 'boxes_with_end_date') def get_wms_timepositions(): metadata = get_wms().contents['geonode:boxes_with_end_date'] self.assertTrue(metadata is not None) return metadata.timepositions thefile = os.path.join( GOOD_DATA, 'time', 'boxes_with_end_date.shp' ) uploaded = file_upload(thefile, overwrite=True) check_layer(uploaded) # initial state is no positions or info self.assertTrue(get_wms_timepositions() is None) self.assertTrue(get_time_info(uploaded) is None) # enable using interval and single attribute set_time_info(uploaded, 'date', None, 'DISCRETE_INTERVAL', 3, 'days') self.assertEquals( ['2000-03-01T00:00:00.000Z/2000-06-08T00:00:00.000Z/P3D'], get_wms_timepositions() ) self.assertEquals( {'end_attribute': None, 'presentation': 'DISCRETE_INTERVAL', 'attribute': 'date', 'enabled': True, 'precision_value': '3', 'precision_step': 'days'}, get_time_info(uploaded) ) # disable but configure to use enddate attribute in list set_time_info(uploaded, 'date', 'enddate', 'LIST', None, None, enabled=False) # verify disabled self.assertTrue(get_wms_timepositions() is None) # test enabling now info = get_time_info(uploaded) info['enabled'] = True set_time_info(uploaded, **info) self.assertEquals(100, len(get_wms_timepositions()))
def setUp(self): super(UploaderBase, self).setUp() self._tempfiles = [] self.client = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) self.catalog = Catalog(GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD) # @todo - this is obviously the brute force approach - ideally, # these cases would be more declarative and delete only the things # they mess with for l in Layer.objects.all(): try: l.delete() except: print 'unable to delete layer', l # and destroy anything left dangling on geoserver cat = gs_catalog map(lambda name: cascading_delete(cat, name), [l.name for l in cat.get_layers()])
def setUp(self): super(UploaderBase, self).setUp() self._tempfiles = [] self.client = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) self.catalog = Catalog( GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD ) # @todo - this is obviously the brute force approach - ideally, # these cases would be more declarative and delete only the things # they mess with for l in Layer.objects.all(): try: l.delete() except: print 'unable to delete layer', l # and destroy anything left dangling on geoserver cat = Layer.objects.gs_catalog map(lambda name: cascading_delete(cat, name), [l.name for l in cat.get_layers()])
def test_configure_time(self): layer_name = 'boxes_with_end_date' # make sure it's not there (and configured) cascading_delete(layer_name=layer_name, catalog=gs_catalog) def get_wms_timepositions(): alternate_name = 'geonode:%s' % layer_name if alternate_name in get_wms().contents: metadata = get_wms().contents[alternate_name] self.assertTrue(metadata is not None) return metadata.timepositions else: return None thefile = os.path.join( GOOD_DATA, 'time', '%s.shp' % layer_name ) resp, data = self.client.upload_file(thefile) # initial state is no positions or info self.assertTrue(get_wms_timepositions() is None) self.assertEqual(resp.status_code, 200) # enable using interval and single attribute if not isinstance(data, string_types): self.wait_for_progress(data.get('progress')) self.assertTrue(data['success']) self.assertTrue(data['redirect_to'], upload_step('time')) redirect_to = data['redirect_to'] resp, data = self.client.get_html(upload_step('time')) self.assertEqual(resp.status_code, 200) data = dict(csrfmiddlewaretoken=self.client.get_csrf_token(), time_attribute='date', time_end_attribute='enddate', presentation_strategy='LIST', ) resp = self.client.make_request(redirect_to, data) self.assertEqual(resp.status_code, 200) resp_js = resp.json() if resp_js['success']: url = resp_js['redirect_to'] resp = self.client.make_request(url, data) url = resp.json()['url'] self.assertTrue( url.endswith(layer_name), 'expected url to end with %s, but got %s' % (layer_name, url)) self.assertEqual(resp.status_code, 200) url = unquote(url) self.check_layer_complete(url, layer_name) wms = get_wms( type_name='geonode:%s' % layer_name, username=GEOSERVER_USER, password=GEOSERVER_PASSWD) layer_info = list(wms.items())[0][1] self.assertEqual(100, len(layer_info.timepositions)) else: self.assertTrue('error_msg' in resp_js)
-90 <= miny <= 90 and -90 <= maxy <= 90: logger.info( 'GeoServer failed to detect the projection for layer ' '[%s]. Guessing EPSG:4326', name) # If GeoServer couldn't figure out the projection, we just # assume it's lat/lon to avoid a bad GeoServer configuration gs_resource.latlon_bbox = gs_resource.native_bbox gs_resource.projection = "EPSG:4326" cat.save(gs_resource) else: msg = ('GeoServer failed to detect the projection for layer ' '[%s]. It doesn\'t look like EPSG:4326, so backing out ' 'the layer.') logger.info(msg, name) cascading_delete(cat, name) raise GeoNodeException(msg % name) # Step 7. Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py logger.info('>>> Step 7. Creating style for [%s]' % name) publishing = cat.get_layer(name) if 'sld' in files: f = open(files['sld'], 'r') sld = f.read() f.close() else: sld = get_sld_for(publishing) if sld is not None:
def layer_replace(request, layername, template='layers/layer_replace.html'): layer = _resolve_layer( request, layername, 'base.change_resourcebase', _PERMISSION_MSG_MODIFY) if request.method == 'GET': ctx = { 'charsets': CHARSETS, 'layer': layer, 'is_featuretype': layer.is_vector(), 'is_layer': True, } return render_to_response(template, RequestContext(request, ctx)) elif request.method == 'POST': form = LayerUploadForm(request.POST, request.FILES) tempdir = None out = {} if form.is_valid(): try: tempdir, base_file = form.write_files() if layer.is_vector() and is_raster(base_file): out['success'] = False out['errors'] = _("You are attempting to replace a vector layer with a raster.") elif (not layer.is_vector()) and is_vector(base_file): out['success'] = False out['errors'] = _("You are attempting to replace a raster layer with a vector.") else: try: #^^ main = Main.objects.get(layer=layer) #^^ main_id = main.id #^^ except: #^^ main_id = None #^^ # delete geoserver's store before upload cat = gs_catalog cascading_delete(cat, layer.typename) saved_layer = file_upload( base_file, name=layer.name, user=request.user, overwrite=True, charset=form.cleaned_data["charset"], main_id=main_id, #^^ ) print 'debug file_upload success' out['success'] = True out['url'] = reverse( 'layer_detail', args=[ saved_layer.service_typename]) except Exception as e: out['success'] = False out['errors'] = str(e) finally: if tempdir is not None: shutil.rmtree(tempdir) else: errormsgs = [] for e in form.errors.values(): errormsgs.append([escape(v) for v in e]) out['errors'] = form.errors out['errormsgs'] = errormsgs if out['success']: status_code = 200 else: status_code = 400 return HttpResponse( json.dumps(out), mimetype='application/json', status=status_code)
def geoserver_delete(typename): # cascading_delete should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): cascading_delete(gs_catalog, typename)
def test_configure_time(self): layer_name = 'boxes_with_end_date' # make sure it's not there (and configured) cascading_delete(gs_catalog, layer_name) def get_wms_timepositions(): alternate_name = 'geonode:%s' % layer_name if alternate_name in get_wms().contents: metadata = get_wms().contents[alternate_name] self.assertTrue(metadata is not None) return metadata.timepositions else: return None thefile = os.path.join( GOOD_DATA, 'time', '%s.shp' % layer_name ) resp, data = self.client.upload_file(thefile) # initial state is no positions or info self.assertTrue(get_wms_timepositions() is None) self.assertEquals(resp.code, 200) # enable using interval and single attribute if not isinstance(data, basestring): self.wait_for_progress(data.get('progress')) self.assertTrue(data['success']) self.assertTrue(data['redirect_to'], upload_step('time')) redirect_to = data['redirect_to'] resp, data = self.client.get_html(upload_step('time')) self.assertEquals(resp.code, 200) data = dict(csrfmiddlewaretoken=self.client.get_csrf_token(), time_attribute='date', time_end_attribute='enddate', presentation_strategy='LIST', ) resp = self.client.make_request(redirect_to, data) self.assertEquals(resp.code, 200) resp_js = json.loads(resp.read()) if resp_js['success']: url = resp_js['redirect_to'] resp = self.client.make_request(url, data) url = json.loads(resp.read())['url'] self.assertTrue( url.endswith(layer_name), 'expected url to end with %s, but got %s' % (layer_name, url)) self.assertEquals(resp.code, 200) url = urllib.unquote(url) self.check_layer_complete(url, layer_name) wms = get_wms(type_name='geonode:%s' % layer_name) layer_info = wms.items()[0][1] self.assertEquals(100, len(layer_info.timepositions)) else: self.assertTrue('error_msg' in resp_js) self.assertTrue( 'Source SRS is not valid' in resp_js['error_msg'])
def test_configure_time(self): dataset_name = 'boxes_with_end_date' # make sure it's not there (and configured) cascading_delete(dataset_name=dataset_name, catalog=gs_catalog) def get_wms_timepositions(): alternate_name = f'geonode:{dataset_name}' if alternate_name in get_wms().contents: metadata = get_wms().contents[alternate_name] self.assertTrue(metadata is not None) return metadata.timepositions else: return None thefile = os.path.join( GOOD_DATA, 'time', f'{dataset_name}.shp' ) # Test upload with custom permissions resp, data = self.client.upload_file( thefile, perms='{"users": {"AnonymousUser": []}, "groups":{}}' ) _dataset = Dataset.objects.get(name=dataset_name) _user = get_user_model().objects.get(username='******') self.assertEqual(_dataset.get_user_perms(_user).count(), 0) # initial state is no positions or info self.assertTrue(get_wms_timepositions() is None) self.assertEqual(resp.status_code, 200) # enable using interval and single attribute if not isinstance(data, str): # self.wait_for_progress(data.get('progress')) self.assertTrue(data['success']) self.assertTrue(data['redirect_to'], upload_step('time')) redirect_to = data['redirect_to'] resp, data = self.client.get_html(upload_step('time')) self.assertEqual(resp.status_code, 200) data = dict(csrfmiddlewaretoken=self.client.get_csrf_token(), time_attribute='date', time_end_attribute='enddate', presentation_strategy='LIST', ) resp = self.client.make_request(redirect_to, data) self.assertEqual(resp.status_code, 200) resp_js = resp.json() if resp_js['success']: url = resp_js['redirect_to'] resp = self.client.make_request(url, data) url = resp.json()['url'] self.assertTrue( url.endswith(dataset_name), f'expected url to end with {dataset_name}, but got {url}') self.assertEqual(resp.status_code, 200) url = unquote(url) self.check_dataset_complete(url, dataset_name) wms = get_wms( type_name=f'geonode:{dataset_name}', username=GEOSERVER_USER, password=GEOSERVER_PASSWD) dataset_info = list(wms.items())[0][1] self.assertEqual(100, len(dataset_info.timepositions)) else: self.assertTrue('error_msg' in resp_js)
def layer_replace(request, layername, template='layers/layer_replace.html'): layer = _resolve_layer( request, layername, 'base.change_resourcebase', _PERMISSION_MSG_MODIFY) if request.method == 'GET': ctx = { 'charsets': CHARSETS, 'layer': layer, 'is_featuretype': layer.is_vector(), 'is_layer': True, } return render_to_response(template, RequestContext(request, ctx)) elif request.method == 'POST': form = LayerUploadForm(request.POST, request.FILES) tempdir = None out = {} if form.is_valid(): try: tempdir, base_file = form.write_files() if layer.is_vector() and is_raster(base_file): out['success'] = False out['errors'] = _("You are attempting to replace a vector layer with a raster.") elif (not layer.is_vector()) and is_vector(base_file): out['success'] = False out['errors'] = _("You are attempting to replace a raster layer with a vector.") else: # delete geoserver's store before upload cat = gs_catalog cascading_delete(cat, layer.typename) saved_layer = file_upload( base_file, name=layer.name, user=request.user, overwrite=True, charset=form.cleaned_data["charset"], ) out['success'] = True out['url'] = reverse( 'layer_detail', args=[ saved_layer.service_typename]) except Exception as e: out['success'] = False out['errors'] = str(e) finally: if tempdir is not None: shutil.rmtree(tempdir) else: errormsgs = [] for e in form.errors.values(): errormsgs.append([escape(v) for v in e]) out['errors'] = form.errors out['errormsgs'] = errormsgs if out['success']: status_code = 200 else: status_code = 400 return HttpResponse( json.dumps(out), content_type='application/json', status=status_code)
def geoserver_pre_save(instance, sender, **kwargs): """Send information to geoserver. The attributes sent include: * Title * Abstract * Name * Keywords * Metadata Links, * Point of Contact name and url """ base_file = instance.get_base_file() # There is no need to process it if there is not file. if base_file is None: return # delete geoserver's store before upload cascading_delete(gs_catalog, instance.typename) gs_name, workspace, values = geoserver_upload( instance, base_file.file.path, instance.owner, instance.name, overwrite=True, title=instance.title, abstract=instance.abstract, # keywords=instance.keywords, charset=instance.charset) # Set fields obtained via the geoserver upload. instance.name = gs_name instance.workspace = workspace # Iterate over values from geoserver. for key in ['typename', 'store', 'storeType']: setattr(instance, key, values[key]) gs_resource = gs_catalog.get_resource(instance.name, store=instance.store, workspace=instance.workspace) gs_resource.title = instance.title gs_resource.abstract = instance.abstract gs_resource.name = instance.name # Get metadata links metadata_links = [] for link in instance.link_set.metadata(): metadata_links.append((link.mime, link.name, link.url)) gs_resource.metadata_links = metadata_links # gs_resource should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): gs_catalog.save(gs_resource) gs_layer = gs_catalog.get_layer(instance.name) if instance.poc and instance.poc: gs_layer.attribution = str(instance.poc) profile = Profile.objects.get(username=instance.poc.username) gs_layer.attribution_link = settings.SITEURL[: -1] + profile.get_absolute_url( ) # gs_layer should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): gs_catalog.save(gs_layer) """Get information from geoserver. The attributes retrieved include: * Bounding Box * SRID * Download links (WMS, WCS or WFS and KML) * Styles (SLD) """ gs_resource = gs_catalog.get_resource(instance.name) bbox = gs_resource.latlon_bbox # FIXME(Ariel): Correct srid setting below # self.srid = gs_resource.src instance.srid_url = "http://www.spatialreference.org/ref/" + \ instance.srid.replace(':', '/').lower() + "/" # Set bounding box values instance.bbox_x0 = bbox[0] instance.bbox_x1 = bbox[1] instance.bbox_y0 = bbox[2] instance.bbox_y1 = bbox[3]
def geoserver_pre_delete(instance, sender, **kwargs): """Removes the layer from GeoServer """ #cascading_delete should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): cascading_delete(gs_catalog, instance.typename)
if -180 <= minx <= 180 and -180 <= maxx <= 180 and \ -90 <= miny <= 90 and -90 <= maxy <= 90: logger.info('GeoServer failed to detect the projection for layer ' '[%s]. Guessing EPSG:4326', name) # If GeoServer couldn't figure out the projection, we just # assume it's lat/lon to avoid a bad GeoServer configuration gs_resource.latlon_bbox = gs_resource.native_bbox gs_resource.projection = "EPSG:4326" cat.save(gs_resource) else: msg = ('GeoServer failed to detect the projection for layer ' '[%s]. It doesn\'t look like EPSG:4326, so backing out ' 'the layer.') logger.info(msg, name) cascading_delete(cat, name) raise GeoNodeException(msg % name) # Step 7. Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py logger.info('>>> Step 7. Creating style for [%s]' % name) publishing = cat.get_layer(name) if 'sld' in files: f = open(files['sld'], 'r') sld = f.read() f.close() else: sld = get_sld_for(publishing) if sld is not None:
def geoserver_pre_delete(instance, sender, **kwargs): """Removes the layer from GeoServer """ ct = ContentType.objects.get_for_model(instance) OverallRating.objects.filter(content_type = ct, object_id = instance.id).delete() cascading_delete(Layer.objects.gs_catalog, instance.typename)
def delete_layer(self, layername): try: cascading_delete(gs_catalog, "{}:{}".format(self.workspace, layername)) except Exception as e: logger.error(e.message)