def _check_geoserver_store(store_name, layer_type, overwrite): """Check if the store exists in geoserver""" try: store = gs_catalog.get_store(store_name) except geoserver.catalog.FailedRequestError: pass # There is no store, ergo the road is clear else: if store: resources = store.get_resources() if len(resources) == 0: if overwrite: logger.debug("Deleting previously existing store") store.delete() else: raise GeoNodeException("Layer already exists") else: for resource in resources: if resource.name == store_name: if not overwrite: raise GeoNodeException( "Name already in use and overwrite is False") existing_type = resource.resource_type if existing_type != layer_type: msg = ("Type of uploaded file {} ({}) does not " "match type of existing resource type " "{}".format(store_name, layer_type, existing_type)) logger.error(msg) raise GeoNodeException(msg)
def get_files(filename): """Converts the data to Shapefiles or Geotiffs and returns a dictionary with all the required files """ files = {'base': filename} base_name, extension = os.path.splitext(filename) #Replace special characters in filenames - []{}() glob_name = re.sub(r'([\[\]\(\)\{\}])', r'[\g<1>]', base_name) if extension.lower() == '.shp': required_extensions = dict( shp='.[sS][hH][pP]', dbf='.[dD][bB][fF]', shx='.[sS][hH][xX]') for ext, pattern in required_extensions.iteritems(): matches = glob.glob(glob_name + pattern) if len(matches) == 0: msg = ('Expected helper file %s does not exist; a Shapefile ' 'requires helper files with the following extensions: ' '%s') % (base_name + "." + ext, required_extensions.keys()) raise GeoNodeException(msg) elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) else: files[ext] = matches[0] matches = glob.glob(glob_name + ".[pP][rR][jJ]") if len(matches) == 1: files['prj'] = matches[0] elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) matches = glob.glob(glob_name + ".[sS][lL][dD]") if len(matches) == 1: files['sld'] = matches[0] elif len(matches) > 1: msg = ('Multiple style files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) matches = glob.glob(base_name + ".[xX][mM][lL]") # shapefile XML metadata is sometimes named base_name.shp.xml # try looking for filename.xml if base_name.xml does not exist if len(matches) == 0: matches = glob.glob(filename + ".[xX][mM][lL]") if len(matches) == 1: files['xml'] = matches[0] elif len(matches) > 1: msg = ('Multiple XML files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) return files
def get_attributes(geometry_type, json_attrs=None): """ Convert a json representation of attributes to a Python representation. parameters: json_attrs { "field_str": "string", "field_int": "integer", "field_date": "date", "field_float": "float" } geometry_type: a string which can be "Point", "LineString" or "Polygon" Output: [ ['the_geom', u'com.vividsolutions.jts.geom.Polygon', {'nillable': False}], ['field_str', 'java.lang.String', {'nillable': True}], ['field_int', 'java.lang.Integer', {'nillable': True}], ['field_date', 'java.util.Date', {'nillable': True}], ['field_float', 'java.lang.Float', {'nillable': True}] ] """ lattrs = [] gattr = [] gattr.append('the_geom') gattr.append('com.vividsolutions.jts.geom.%s' % geometry_type) gattr.append({'nillable': False}) lattrs.append(gattr) if json_attrs: jattrs = json.loads(json_attrs) for jattr in jattrs.items(): lattr = [] attr_name = slugify(jattr[0]) attr_type = jattr[1].lower() if len(attr_name) == 0: msg = 'You must provide an attribute name for attribute of type %s' % ( attr_type) logger.error(msg) raise GeoNodeException(msg) if attr_type not in ('float', 'date', 'string', 'integer'): msg = '%s is not a valid type for attribute %s' % (attr_type, attr_name) logger.error(msg) raise GeoNodeException(msg) if attr_type == 'date': attr_type = 'java.util.%s' % attr_type[:1].upper( ) + attr_type[1:] else: attr_type = 'java.lang.%s' % attr_type[:1].upper( ) + attr_type[1:] lattr.append(attr_name) lattr.append(attr_type) lattr.append({'nillable': True}) lattrs.append(lattr) return lattrs
def save(layer, base_file, user, overwrite=True, title=None, abstract=None, permissions=None, keywords=(), charset='UTF-8'): """Upload layer data to Geoserver and registers it with Geonode. If specified, the layer given is overwritten, otherwise a new layer is created. """ logger.info(_separator) # Step -1. Verify if the filename is in ascii format. try: base_file.decode('ascii') except UnicodeEncodeError: msg = "Please use only characters from the english alphabet for the filename. '%s' is not yet supported." % os.path.basename( base_file).encode('UTF-8') raise GeoNodeException(msg) logger.info('Uploading layer: [%s], base filename: [%s]', layer, base_file) # Step 0. Verify the file exists logger.info('>>> Step 0. Verify if the file %s exists so we can create ' 'the layer [%s]' % (base_file, layer)) if not os.path.exists(base_file): msg = ('Could not open %s to save %s. Make sure you are using a ' 'valid file' % (base_file, layer)) logger.warn(msg) raise GeoNodeException(msg) # Step 1. Figure out a name for the new layer, the one passed might not # be valid or being used. logger.info('>>> Step 1. Figure out a name for %s', layer) name = get_valid_layer_name(layer, overwrite) # Step 2. Check that it is uploading to the same resource type as # the existing resource logger.info( '>>> Step 2. Make sure we are not trying to overwrite a ' 'existing resource named [%s] with the wrong type', name) the_layer_type = layer_type(base_file) # Get a short handle to the gsconfig geoserver catalog cat = Layer.objects.gs_catalog # Check if the store exists in geoserver try: store = cat.get_store(name) except geoserver.catalog.FailedRequestError, e: # There is no store, ergo the road is clear pass
def layer_type(filename): """Finds out if a filename is a Feature or a Vector returns a gsconfig resource_type string that can be either 'featureType' or 'coverage' """ base_name, extension = os.path.splitext(filename) if extension.lower() == '.zip': zf = ZipFile(filename) # ZipFile doesn't support with statement in 2.6, so don't do it try: for n in zf.namelist(): b, e = os.path.splitext(n.lower()) if e in shp_exts or e in cov_exts or e in csv_exts: extension = e finally: zf.close() if extension.lower() in vec_exts: return 'vector' elif extension.lower() in cov_exts: return 'raster' else: msg = ('Saving of extension [%s] is not implemented' % extension) raise GeoNodeException(msg)
def harvest_resource(self, resource_id, geonode_service): """Harvest a single resource from the service This method creates new ``geonode.layers.models.Dataset`` instances (and their related objects too) and save them in the database. :arg resource_id: The resource's identifier :type resource_id: str :arg geonode_service: The already saved service instance :type geonode_service: geonode.services.models.Service """ if geonode_service or self.geonode_service_id: try: _service = geonode_service or models.Service.objects.get( id=self.geonode_service_id) if _service.harvester: _h = _service.harvester _h.harvestable_resources.filter(id=resource_id).update( should_be_harvested=True) _h.status = _h.STATUS_PERFORMING_HARVESTING _h.save() _h_session = AsynchronousHarvestingSession.objects.create( harvester=_h, session_type=AsynchronousHarvestingSession. TYPE_HARVESTING) harvest_resources.apply_async(args=([ resource_id, ], _h_session.pk)) except Exception as e: logger.exception(e) else: raise GeoNodeException( f"Could not harvest resource id {resource_id} for service {self.name}" )
def set_metadata(xml): """Generate dict of model properties based on XML metadata""" # check if document is XML try: exml = dlxml.fromstring(xml.encode()) except Exception as err: raise GeoNodeException(f'Uploaded XML document is not XML: {str(err)}') # check if document is an accepted XML metadata format tagname = get_tagname(exml) if tagname == 'GetRecordByIdResponse': # strip CSW element LOGGER.debug('stripping CSW root element') exml = exml.getchildren()[0] tagname = get_tagname(exml) if tagname == 'MD_Metadata': # ISO identifier, vals, regions, keywords = iso2dict(exml) elif tagname == 'metadata': # FGDC identifier, vals, regions, keywords = fgdc2dict(exml) elif tagname == 'Record': # Dublin Core identifier, vals, regions, keywords = dc2dict(exml) else: raise RuntimeError('Unsupported metadata format') if not vals.get("date"): vals["date"] = datetime.datetime.now( timezone.get_current_timezone()).strftime("%Y-%m-%dT%H:%M:%S") return [identifier, vals, regions, keywords]
def layer_type(filename): """Finds out if a filename is a Feature or a Vector returns a gsconfig resource_type string that can be either 'featureType' or 'coverage' """ base_name, extension = os.path.splitext(filename) if extension.lower() == '.zip': zf = ZipFile(filename, allowZip64=True) # ZipFile doesn't support with statement in 2.6, so don't do it with zf: for n in zf.namelist(): b, e = os.path.splitext(n.lower()) if e in shp_exts or e in cov_exts or e in csv_exts: extension = e if extension.lower() == '.tar' or filename.endswith('.tar.gz'): tf = tarfile.open(filename) # TarFile doesn't support with statement in 2.6, so don't do it with tf: for n in tf.getnames(): b, e = os.path.splitext(n.lower()) if e in shp_exts or e in cov_exts or e in csv_exts: extension = e if extension.lower() in vec_exts: return 'vector' elif extension.lower() in cov_exts: return 'raster' else: msg = f'Saving of extension [{extension}] is not implemented' raise GeoNodeException(msg)
def rndt_parser(xml, uuid="", vals={}, regions=[], keywords=[], custom={}): # check if document is XML try: exml = dlxml.fromstring(xml.encode()) except Exception as err: raise GeoNodeException(f"Uploaded XML document is not XML: {str(err)}") # check if document is an accepted XML metadata format tagname = get_tagname(exml) if tagname == "GetRecordByIdResponse": # strip CSW element exml = exml.getchildren()[0] tagname = get_tagname(exml) rndt_parser = RNDTMetadataParser(exml) keywords, discarded = rndt_parser.resolve_keywords() custom["rejected_keywords"] = discarded use_constr = rndt_parser.get_access_costraints(custom) rndt_parser.get_use_costraints(vals, use_constr) rndt_parser.get_resolutions(custom) rndt_parser.get_accuracy(custom) return uuid, vals, regions, keywords, custom
def fetch_gs_resource(values, tries): _max_tries = getattr(ogc_server_settings, "MAX_RETRIES", 5) gs_resource = gs_catalog.get_resource(name=instance.name, store=instance.store, workspace=instance.workspace) if not gs_resource: gs_resource = gs_catalog.get_resource(name=instance.alternate) if gs_resource: gs_resource.title = instance.title or "" gs_resource.abstract = instance.abstract or "" gs_resource.name = instance.name or "" if not values: values = dict(store=gs_resource.store.name, storeType=gs_resource.store.resource_type, alternate=gs_resource.store.workspace.name + ':' + gs_resource.name, title=gs_resource.title or gs_resource.store.name, abstract=gs_resource.abstract or '', owner=instance.owner) else: msg = "There isn't a geoserver resource for this layer: %s" % instance.name logger.exception(msg) if tries >= _max_tries: raise GeoNodeException(msg) gs_resource = None sleep(3.00) return (values, gs_resource)
def set_metadata(xml): """Generate dict of model properties based on XML metadata""" # check if document is XML try: exml = etree.fromstring(xml) except Exception, err: raise GeoNodeException('Uploaded XML document is not XML: %s' % str(err))
def wcs_links(wcs_url, identifier, bbox=None, crs=None, height=None, width=None, exclude_formats=True, quiet=True, version='1.0.0'): # FIXME(Ariel): This would only work for layers marked for public view, # what about the ones with permissions enabled? try: wcs = WebCoverageService(wcs_url, version=version) except ServiceException as err: err_msg = 'WCS server returned exception: %s' % err if not quiet: logger.warn(err_msg) raise GeoNodeException(err_msg) msg = ('Could not create WCS links for layer "%s",' ' it was not in the WCS catalog,' ' the available layers were: "%s"' % (identifier, wcs.contents.keys())) output = [] formats = [] if identifier not in wcs.contents: if not quiet: raise RuntimeError(msg) else: logger.warn(msg) else: coverage = wcs.contents[identifier] formats = coverage.supportedFormats for f in formats: if exclude_formats and f in DEFAULT_EXCLUDE_FORMATS: continue # roundabout, hacky way to accomplish getting a getCoverage url. # nonetheless, it's better than having to load an entire large # coverage just to generate a URL fakeUrl = wcs.getCoverage(identifier=coverage.id, format=f, bbox=bbox, crs=crs, height=20, width=20).geturl() url = sub(r'(height=)20(\&width=)20', r'\g<1>{0}\g<2>{1}', fakeUrl).format(height, width) # The outputs are: (ext, name, mime, url) # FIXME(Ariel): Find a way to get proper ext, name and mime # using format as a default for all is not good enough output.append((f, f, f, url)) return output
def get_default_user(): """Create a default user """ superusers = User.objects.filter(is_superuser=True).order_by('id') if superusers.count() > 0: # Return the first created superuser return superusers[0] else: raise GeoNodeException('You must have an admin account configured ' 'before importing data. ' 'Try: django-admin.py createsuperuser')
def update_from_session(self, upload_session, resource: ResourceBase = None): self.session = base64.encodebytes( pickle.dumps(upload_session)).decode('UTF-8') self.name = upload_session.name self.user = upload_session.user self.date = now() if not self.upload_dir: self.upload_dir = os.path.dirname(upload_session.base_file) if resource and not self.resource: if not isinstance(resource, ResourceBase) and hasattr( resource, 'resourcebase_ptr'): self.resource = resource.resourcebase_ptr elif not isinstance(resource, ResourceBase): raise GeoNodeException( "Invalid resource uploaded, plase select one of the available" ) else: self.resource = resource if upload_session.base_file and self.resource and self.resource.title: uploaded_files = upload_session.base_file[0] aux_files = uploaded_files.auxillary_files sld_files = uploaded_files.sld_files xml_files = uploaded_files.xml_files if self.store_spatial_files and self.resource and not self.resource.files: files_to_upload = aux_files + sld_files + xml_files + [ uploaded_files.base_file ] if len(files_to_upload): ResourceBase.objects.upload_files( resource_id=self.resource.id, files=files_to_upload) self.resource.refresh_from_db() # Now we delete the files from local file system # only if it does not match with the default temporary path if os.path.exists(self.upload_dir): if settings.STATIC_ROOT != os.path.dirname( os.path.abspath(self.upload_dir)): shutil.rmtree(self.upload_dir, ignore_errors=True) if "COMPLETE" == self.state: self.complete = True if self.resource and self.resource.processed: self.state = enumerations.STATE_RUNNING elif self.state in (enumerations.STATE_READY, enumerations.STATE_PENDING): self.state = upload_session.import_session.state self.save()
def verify(self): """Makes sure the state of the layer is consistent in GeoServer and Catalogue. """ # Check the layer is in the wms get capabilities record # FIXME: Implement caching of capabilities record site wide _local_wms = get_wms() record = _local_wms.contents.get(self.typename) if record is None: msg = "WMS Record missing for layer [%s]" % self.typename.encode('utf-8') raise GeoNodeException(msg)
def get_or_create_datastore(cat, workspace=None, charset="UTF-8"): """ Get a PostGIS database store or create it in GeoServer if does not exist. """ # TODO refactor this and geoserver.helpers._create_db_featurestore # dsname = ogc_server_settings.DATASTORE dsname = ogc_server_settings.datastore_db['NAME'] if not ogc_server_settings.DATASTORE: msg = ( "To use the createlayer application you must set ogc_server_settings.datastore_db['ENGINE']" " to 'django.contrib.gis.db.backends.postgis") logger.error(msg) raise GeoNodeException(msg) try: ds = cat.get_store(dsname, workspace=workspace) except FailedRequestError: ds = cat.create_datastore(dsname, workspace=workspace) db = ogc_server_settings.datastore_db ds.connection_parameters.update({ 'validate connections': 'true', 'max connections': '10', 'min connections': '1', 'fetch size': '1000', 'host': db['HOST'], 'port': db['PORT'] if isinstance(db['PORT'], string_types) else str(db['PORT']) or '5432', 'database': db['NAME'], 'user': db['USER'], 'passwd': db['PASSWORD'], 'dbtype': 'postgis' }) cat.save(ds) # we need to reload the ds as gsconfig-1.0.6 apparently does not populate ds.type # using create_datastore (TODO fix this in gsconfig) ds = cat.get_store(dsname, workspace=workspace) return ds
def layer_type(filename): """Finds out if a filename is a Feature or a Vector returns a gsconfig resource_type string that can be either 'featureType' or 'coverage' """ extension = os.path.splitext(filename)[1] if extension.lower() in ['.shp']: return FeatureType.resource_type elif extension.lower() in ['.tif', '.tiff', '.geotiff', '.geotif']: return Coverage.resource_type else: msg = ('Saving of extension [%s] is not implemented' % extension) raise GeoNodeException(msg)
def create_dataset(name, title, owner_name, geometry_type, attributes=None): """ Create an empty layer in GeoServer and register it in GeoNode. """ # first validate parameters if geometry_type not in ('Point', 'LineString', 'Polygon'): msg = 'geometry must be Point, LineString or Polygon' logger.error(msg) raise GeoNodeException(msg) name = get_valid_name(name) # we can proceed logger.debug('Creating the layer in GeoServer') workspace, datastore = create_gs_dataset(name, title, geometry_type, attributes) logger.debug('Creating the layer in GeoNode') return create_gn_dataset(workspace, datastore, name, title, owner_name)
def get_valid_layer_name(layer, overwrite): """Checks if the layer is a string and fetches it from the database. """ # The first thing we do is get the layer name string if isinstance(layer, Layer): layer_name = layer.name elif isinstance(layer, basestring): layer_name = layer else: msg = ('You must pass either a filename or a GeoNode layer object') raise GeoNodeException(msg) if overwrite: return layer_name else: return get_valid_name(layer_name)
def get_wcs_record(instance, retry=True): wcs = WebCoverageService(ogc_server_settings.public_url + 'wcs', '1.0.0') key = instance.workspace + ':' + instance.name if key in wcs.contents: return wcs.contents[key] else: msg = ("Layer '%s' was not found in WCS service at %s." % (key, ogc_server_settings.public_url)) if retry: logger.debug(msg + ' Waiting a couple of seconds before trying again.') time.sleep(2) return get_wcs_record(instance, retry=False) else: raise GeoNodeException(msg)
def wcs_links(wcs_url, identifier, exclude_formats=True, quiet=True, version='1.0.0'): #FIXME(Ariel): This would only work for layers marked for public view, # what about the ones with permissions enabled? try: wcs = WebCoverageService(wcs_url, version=version) except ServiceException, err: err_msg = 'WCS server returned exception: %s' % err if not quiet: logger.warn(err_msg) raise GeoNodeException(err_msg)
def set_workflow_perms(self, approved=False, published=False): """ | N/PUBLISHED | PUBLISHED -------------------------------------------- N/APPROVED | GM/OWR | - APPROVED | registerd | all -------------------------------------------- """ try: with transaction.atomic(): anonymous_group = Group.objects.get(name='anonymous') members_group = None if approved: if groups_settings.AUTO_ASSIGN_REGISTERED_MEMBERS_TO_REGISTERED_MEMBERS_GROUP_NAME: _members_group_name = groups_settings.REGISTERED_MEMBERS_GROUP_NAME members_group = Group.objects.get( name=_members_group_name) for perm in VIEW_PERMISSIONS: assign_perm(perm, members_group, self.get_self_resource()) else: for perm in VIEW_PERMISSIONS: assign_perm(perm, anonymous_group, self.get_self_resource()) if published: for perm in VIEW_PERMISSIONS: assign_perm(perm, anonymous_group, self.get_self_resource()) # Set the GeoFence Rules (user = None) if approved or published: if self.polymorphic_ctype.name == 'layer': if settings.OGC_SERVER['default'].get( "GEOFENCE_SECURITY_ENABLED", False): if not getattr(settings, 'DELAYED_SECURITY_SIGNALS', False): if approved and members_group: sync_geofence_with_guardian( self.layer, VIEW_PERMISSIONS, group=members_group) sync_geofence_with_guardian( self.layer, VIEW_PERMISSIONS) else: self.set_dirty_state() except Exception as e: raise GeoNodeException(e)
def create_geonode_service(self, owner, parent=None): """Create a new geonode.service.models.Service instance :arg owner: The user who will own the service instance :type owner: geonode.people.models.Profile """ cleaned_url, service, version, request = WmsServiceHandler.get_cleaned_url_params( self.url) with transaction.atomic(): instance = models.Service.objects.create( uuid=str(uuid4()), base_url= f"{cleaned_url.scheme}://{cleaned_url.netloc}{cleaned_url.path}" .encode("utf-8", "ignore").decode('utf-8'), extra_queryparams=cleaned_url.query, type=self.service_type, method=self.indexing_method, owner=owner, metadata_only=True, version=str(self.parsed_service.identification.version).encode( "utf-8", "ignore").decode('utf-8'), name=self.name, title=str(self.parsed_service.identification.title).encode( "utf-8", "ignore").decode('utf-8') or self.name, abstract=str( self.parsed_service.identification.abstract).encode( "utf-8", "ignore").decode('utf-8') or _("Not provided"), operations=OgcWmsHarvester.get_wms_operations( self.parsed_service.url, version=version)) service_harvester = Harvester.objects.create( name=self.name, default_owner=owner, scheduling_enabled=False, remote_url=instance.service_url, harvester_type=enumerations.HARVESTER_TYPES[self.service_type], harvester_type_specific_configuration=self. get_harvester_configuration_options()) if service_harvester.update_availability(): service_harvester.initiate_update_harvestable_resources() else: logger.exception( GeoNodeException("Could not reach remote endpoint.")) instance.harvester = service_harvester self.geonode_service_id = instance.id return instance
def get_valid_user(user=None): """Gets the default user or creates it if it does not exist """ if user is None: theuser = get_default_user() elif isinstance(user, str): theuser = get_user_model().objects.get(username=user) elif user == user.get_anonymous(): raise GeoNodeException('The user uploading files must not ' 'be anonymous') else: theuser = user # FIXME: Pass a user in the unit tests that is not yet saved ;) assert isinstance(theuser, get_user_model()) return theuser
def get_valid_layer_name(layer=None, overwrite=False): """Checks if the layer is a string and fetches it from the database. """ # The first thing we do is get the layer name string if isinstance(layer, Layer): layer_name = layer.name elif isinstance(layer, basestring): layer_name = layer else: msg = ('You must pass either a filename or a GeoNode layer object') raise GeoNodeException(msg) if overwrite: #FIXME: What happens if there is a store in GeoServer with that name # that is not registered in GeoNode? return layer_name else: return get_valid_name(layer_name)
def create_geonode_service(self, owner, parent=None): """Create a new geonode.service.models.Service instance :arg owner: The user who will own the service instance :type owner: geonode.people.models.Profile """ with transaction.atomic(): instance = models.Service.objects.create( uuid=str(uuid4()), base_url=self.url, type=self.service_type, method=self.indexing_method, owner=owner, metadata_only=True, version=str( self.parsed_service._json_struct.get( "currentVersion", 0.0)).encode("utf-8", "ignore").decode('utf-8'), name=self.name, title=self.title, abstract=str( self.parsed_service._json_struct.get("serviceDescription") ).encode("utf-8", "ignore").decode('utf-8') or _("Not provided")) service_harvester = Harvester.objects.create( name=self.name, default_owner=owner, scheduling_enabled=False, remote_url=instance.service_url, harvester_type=enumerations.HARVESTER_TYPES[self.service_type], harvester_type_specific_configuration=self. get_harvester_configuration_options()) if service_harvester.update_availability(): service_harvester.initiate_update_harvestable_resources() else: logger.exception( GeoNodeException("Could not reach remote endpoint.")) instance.harvester = service_harvester self.geonode_service_id = instance.id return instance
def geoserver_post_save_local(instance, *args, **kwargs): """Send information to geoserver. The attributes sent include: * Title * Abstract * Name * Keywords * Metadata Links, * Point of Contact name and url """ instance.refresh_from_db() # Don't run this signal if is a Layer from a remote service if getattr(instance, "remote_service", None) is not None: return # Don't run this signal handler if it is a tile layer or a remote store (Service) # Currently only gpkg files containing tiles will have this type & will be served via MapProxy. if hasattr(instance, 'storeType') and getattr(instance, 'storeType') in ['tileStore', 'remoteStore']: return instance gs_resource = None values = None _tries = 0 _max_tries = getattr(ogc_server_settings, "MAX_RETRIES", 2) # If the store in None then it's a new instance from an upload, # only in this case run the geoserver_upload method if not instance.store or getattr(instance, 'overwrite', False): base_file, info = instance.get_base_file() # There is no need to process it if there is not file. if base_file is None: return gs_name, workspace, values, gs_resource = geoserver_upload(instance, base_file.file.path, instance.owner, instance.name, overwrite=True, title=instance.title, abstract=instance.abstract, # keywords=instance.keywords, charset=instance.charset) def fetch_gs_resource(values, tries): try: gs_resource = gs_catalog.get_resource( name=instance.name, store=instance.store, workspace=instance.workspace) except Exception: try: gs_resource = gs_catalog.get_resource( name=instance.alternate, store=instance.store, workspace=instance.workspace) except Exception: try: gs_resource = gs_catalog.get_resource( name=instance.alternate or instance.typename) except Exception: gs_resource = None if gs_resource: gs_resource.title = instance.title or "" gs_resource.abstract = instance.abstract or "" gs_resource.name = instance.name or "" if not values: values = dict(store=gs_resource.store.name, storeType=gs_resource.store.resource_type, alternate=gs_resource.store.workspace.name + ':' + gs_resource.name, title=gs_resource.title or gs_resource.store.name, abstract=gs_resource.abstract or '', owner=instance.owner) else: msg = "There isn't a geoserver resource for this layer: %s" % instance.name logger.exception(msg) if tries >= _max_tries: # raise GeoNodeException(msg) return (values, None) gs_resource = None sleep(3.00) return (values, gs_resource) while not gs_resource and _tries < _max_tries: values, gs_resource = fetch_gs_resource(values, _tries) _tries += 1 # Get metadata links metadata_links = [] for link in instance.link_set.metadata(): metadata_links.append((link.mime, link.name, link.url)) if gs_resource: logger.debug("Found geoserver resource for this layer: %s" % instance.name) gs_resource.metadata_links = metadata_links # gs_resource should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): try: gs_catalog.save(gs_resource) except geoserver.catalog.FailedRequestError as e: msg = ('Error while trying to save resource named %s in GeoServer, ' 'try to use: "%s"' % (gs_resource, str(e))) e.args = (msg,) logger.exception(e) # Update Attribution link if instance.poc: # gsconfig now utilizes an attribution dictionary gs_resource.attribution = {'title': str(instance.poc), 'width': None, 'height': None, 'href': None, 'url': None, 'type': None} profile = get_user_model().objects.get(username=instance.poc.username) site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL gs_resource.attribution_link = site_url + profile.get_absolute_url() # gs_resource should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): try: gs_catalog.save(gs_resource) except geoserver.catalog.FailedRequestError as e: msg = ('Error while trying to save layer named %s in GeoServer, ' 'try to use: "%s"' % (gs_resource, str(e))) e.args = (msg,) logger.exception(e) else: msg = "There isn't a geoserver resource for this layer: %s" % instance.name logger.warn(msg) if isinstance(instance, ResourceBase): if hasattr(instance, 'layer'): instance = instance.layer else: return # Save layer attributes set_attributes_from_geoserver(instance) # Save layer styles set_styles(instance, gs_catalog) # Invalidate GeoWebCache for the updated resource try: _stylefilterparams_geowebcache_layer(instance.alternate) _invalidate_geowebcache_layer(instance.alternate) except Exception: pass if instance.storeType == "remoteStore": return if gs_resource: """Get information from geoserver. The attributes retrieved include: * Bounding Box * SRID * Download links (WMS, WCS or WFS and KML) * Styles (SLD) """ try: instance.abstract = gs_resource.abstract or '' except Exception as e: logger.exception(e) instance.abstract = '' instance.workspace = gs_resource.store.workspace.name instance.store = gs_resource.store.name try: bbox = gs_resource.native_bbox # Set bounding box values instance.bbox_x0 = bbox[0] instance.bbox_x1 = bbox[1] instance.bbox_y0 = bbox[2] instance.bbox_y1 = bbox[3] instance.srid = bbox[4] except Exception as e: logger.exception(e) if instance.srid: instance.srid_url = "http://www.spatialreference.org/ref/" + \ instance.srid.replace(':', '/').lower() + "/" elif instance.bbox_x0 and instance.bbox_x1 and instance.bbox_y0 and instance.bbox_y1: # Guessing 'EPSG:4326' by default instance.srid = 'EPSG:4326' else: raise GeoNodeException("Invalid Projection. Layer is missing CRS!") # Iterate over values from geoserver. if gs_resource: for key in ['alternate', 'store', 'storeType']: # attr_name = key if 'typename' not in key else 'alternate' # print attr_name setattr(instance, key, values[key]) if gs_resource: try: if settings.RESOURCE_PUBLISHING: if instance.is_published != gs_resource.advertised: if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): gs_resource.advertised = 'true' gs_catalog.save(gs_resource) if not settings.FREETEXT_KEYWORDS_READONLY: # AF: Warning - this won't allow people to have empty keywords on GeoNode if len(instance.keyword_list()) == 0 and gs_resource.keywords: for keyword in gs_resource.keywords: if keyword not in instance.keyword_list(): instance.keywords.add(keyword) if any(instance.keyword_list()): keywords = instance.keyword_list() gs_resource.keywords = [kw for kw in list(set(keywords))] # gs_resource should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): gs_catalog.save(gs_resource) except Exception as e: msg = ('Error while trying to save resource named %s in GeoServer, ' 'try to use: "%s"' % (gs_resource, str(e))) e.args = (msg,) logger.exception(e) to_update = { 'title': instance.title or instance.name, 'abstract': instance.abstract or "", 'alternate': instance.alternate, 'bbox_x0': instance.bbox_x0, 'bbox_x1': instance.bbox_x1, 'bbox_y0': instance.bbox_y0, 'bbox_y1': instance.bbox_y1, 'srid': instance.srid } # Update ResourceBase resources = ResourceBase.objects.filter(id=instance.resourcebase_ptr.id) resources.update(**to_update) # to_update['name'] = instance.name, to_update['workspace'] = instance.workspace to_update['store'] = instance.store to_update['storeType'] = instance.storeType to_update['typename'] = instance.alternate # Save all the modified information in the instance without triggering signals. Layer.objects.filter(id=instance.id).update(**to_update) # Refresh from DB instance.refresh_from_db() # Updating the Catalogue catalogue_post_save(instance=instance, sender=instance.__class__) # store the resource to avoid another geoserver call in the post_save if gs_resource: instance.gs_resource = gs_resource # some thumbnail generators will update thumbnail_url. If so, don't # immediately re-generate the thumbnail here. use layer#save(update_fields=['thumbnail_url']) if gs_resource: logger.debug("... Creating Default Resource Links for Layer [%s]" % (instance.alternate)) set_resource_default_links(instance, instance, prune=True) if 'update_fields' in kwargs and kwargs['update_fields'] is not None and \ 'thumbnail_url' in kwargs['update_fields']: logger.debug("... Creating Thumbnail for Layer [%s]" % (instance.alternate)) create_gs_thumbnail(instance, overwrite=True) # Updating HAYSTACK Indexes if needed if settings.HAYSTACK_SEARCH: from django.core.management import call_command call_command('update_index')
def get_files(filename): """Converts the data to Shapefiles or Geotiffs and returns a dictionary with all the required files """ files = {} # Verify if the filename is in ascii format. try: filename.decode('ascii') except UnicodeEncodeError: msg = "Please use only characters from the english alphabet for the filename. '%s' is not yet supported." \ % os.path.basename(filename).encode('UTF-8') raise GeoNodeException(msg) # Make sure the file exists. if not os.path.exists(filename): msg = ('Could not open %s. Make sure you are using a ' 'valid file' % filename) logger.warn(msg) raise GeoNodeException(msg) base_name, extension = os.path.splitext(filename) # Replace special characters in filenames - []{}() glob_name = re.sub(r'([\[\]\(\)\{\}])', r'[\g<1>]', base_name) if extension.lower() == '.shp': required_extensions = dict(shp='.[sS][hH][pP]', dbf='.[dD][bB][fF]', shx='.[sS][hH][xX]') for ext, pattern in required_extensions.iteritems(): matches = glob.glob(glob_name + pattern) if len(matches) == 0: msg = ('Expected helper file %s does not exist; a Shapefile ' 'requires helper files with the following extensions: ' '%s') % (base_name + "." + ext, required_extensions.keys()) raise GeoNodeException(msg) elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) else: files[ext] = matches[0] matches = glob.glob(glob_name + ".[pP][rR][jJ]") if len(matches) == 1: files['prj'] = matches[0] elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) elif extension.lower() in cov_exts: files[extension.lower().replace('.', '')] = filename matches = glob.glob(glob_name + ".[sS][lL][dD]") if len(matches) == 1: files['sld'] = matches[0] elif len(matches) > 1: msg = ('Multiple style files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) matches = glob.glob(base_name + ".[xX][mM][lL]") # shapefile XML metadata is sometimes named base_name.shp.xml # try looking for filename.xml if base_name.xml does not exist if len(matches) == 0: matches = glob.glob(filename + ".[xX][mM][lL]") if len(matches) == 1: files['xml'] = matches[0] elif len(matches) > 1: msg = ('Multiple XML files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) return files
def upload(incoming, user=None, overwrite=False, keywords=(), skip=True, ignore_errors=True, verbosity=1, console=None): """Upload a directory of spatial data files to GeoNode This function also verifies that each layer is in GeoServer. Supported extensions are: .shp, .tif, and .zip (of a shapefile). It catches GeoNodeExceptions and gives a report per file """ if verbosity > 1: print >> console, "Verifying that GeoNode is running ..." if console is None: console = open(os.devnull, 'w') potential_files = [] if os.path.isfile(incoming): ___, short_filename = os.path.split(incoming) basename, extension = os.path.splitext(short_filename) filename = incoming if extension in ['.tif', '.shp', '.zip']: potential_files.append((basename, filename)) elif not os.path.isdir(incoming): msg = ('Please pass a filename or a directory name as the "incoming" ' 'parameter, instead of %s: %s' % (incoming, type(incoming))) logger.exception(msg) raise GeoNodeException(msg) else: datadir = incoming for root, dirs, files in os.walk(datadir): for short_filename in files: basename, extension = os.path.splitext(short_filename) filename = os.path.join(root, short_filename) if extension in ['.tif', '.shp', '.zip']: potential_files.append((basename, filename)) # After gathering the list of potential files, # let's process them one by one. number = len(potential_files) if verbosity > 1: msg = "Found %d potential layers." % number print >> console, msg output = [] for i, file_pair in enumerate(potential_files): basename, filename = file_pair existing_layers = Layer.objects.filter(name=basename) if existing_layers.count() > 0: existed = True else: existed = False if existed and skip: save_it = False status = 'skipped' layer = existing_layers[0] if verbosity > 0: msg = ('Stopping process because ' '--overwrite was not set ' 'and a layer with this name already exists.') print >> sys.stderr, msg else: save_it = True if save_it: try: layer = file_upload( filename, user=user, overwrite=overwrite, keywords=keywords, ) if not existed: status = 'created' else: status = 'updated' except Exception as e: if ignore_errors: status = 'failed' exception_type, error, traceback = sys.exc_info() else: if verbosity > 0: msg = ('Stopping process because ' '--ignore-errors was not set ' 'and an error was found.') print >> sys.stderr, msg msg = 'Failed to process %s' % filename raise Exception(msg, e), None, sys.exc_info()[2] msg = "[%s] Layer for '%s' (%d/%d)" % (status, filename, i + 1, number) info = {'file': filename, 'status': status} if status == 'failed': info['traceback'] = traceback info['exception_type'] = exception_type info['error'] = error else: info['name'] = layer.name output.append(info) if verbosity > 0: print >> console, msg return output
def geoserver_post_save_local(instance, *args, **kwargs): """Send information to geoserver. The attributes sent include: * Title * Abstract * Name * Keywords * Metadata Links, * Point of Contact name and url """ # Don't run this signal if is a Layer from a remote service if getattr(instance, "remote_service", None) is not None: return # Don't run this signal handler if it is a tile layer or a remote store (Service) # Currently only gpkg files containing tiles will have this type & will be served via MapProxy. if hasattr(instance, 'storeType') and getattr( instance, 'storeType') in ['tileStore', 'remoteStore']: return instance gs_resource = None values = None # If the store in None then it's a new instance from an upload, # only in this case run the geoserver_upload method if not instance.store or getattr(instance, 'overwrite', False): base_file, info = instance.get_base_file() # There is no need to process it if there is not file. if base_file is None: return gs_name, workspace, values, gs_resource = geoserver_upload( instance, base_file.file.path, instance.owner, instance.name, overwrite=True, title=instance.title, abstract=instance.abstract, # keywords=instance.keywords, charset=instance.charset) if not gs_resource: gs_resource = gs_catalog.get_resource(instance.name, store=instance.store, workspace=instance.workspace) if not gs_resource: gs_resource = gs_catalog.get_resource(instance.alternate) if gs_resource: gs_resource.title = instance.title or "" gs_resource.abstract = instance.abstract or "" gs_resource.name = instance.name or "" if not values: values = dict(store=gs_resource.store.name, storeType=gs_resource.store.resource_type, alternate=gs_resource.store.workspace.name + ':' + gs_resource.name, title=gs_resource.title or gs_resource.store.name, abstract=gs_resource.abstract or '', owner=instance.owner) else: msg = "There isn't a geoserver resource for this layer: %s" % instance.name logger.exception(msg) raise Exception(msg) # Get metadata links metadata_links = [] for link in instance.link_set.metadata(): metadata_links.append((link.mime, link.name, link.url)) gs_resource.metadata_links = metadata_links # gs_resource should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): try: gs_catalog.save(gs_resource) except geoserver.catalog.FailedRequestError as e: msg = ( 'Error while trying to save resource named %s in GeoServer, ' 'try to use: "%s"' % (gs_resource, str(e))) e.args = (msg, ) logger.exception(e) gs_layer = gs_catalog.get_layer(instance.name) if not gs_layer: gs_layer = gs_catalog.get_layer(instance.alternate) if gs_layer and instance.poc: # gsconfig now utilizes an attribution dictionary gs_layer.attribution = { 'title': str(instance.poc), 'width': None, 'height': None, 'href': None, 'url': None, 'type': None } profile = Profile.objects.get(username=instance.poc.username) gs_layer.attribution_link = settings.SITEURL[: -1] + profile.get_absolute_url( ) # gs_layer should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): try: gs_catalog.save(gs_layer) except geoserver.catalog.FailedRequestError as e: msg = ( 'Error while trying to save layer named %s in GeoServer, ' 'try to use: "%s"' % (gs_layer, str(e))) e.args = (msg, ) logger.exception(e) if type(instance) is ResourceBase: if hasattr(instance, 'layer'): instance = instance.layer else: return if instance.storeType == "remoteStore": # Save layer attributes set_attributes_from_geoserver(instance) return """Get information from geoserver. The attributes retrieved include: * Bounding Box * SRID * Download links (WMS, WCS or WFS and KML) * Styles (SLD) """ # instance.name = instance.name or gs_layer.name # instance.title = instance.title or gs_resource.title instance.abstract = gs_resource.abstract or '' instance.workspace = gs_resource.store.workspace.name instance.store = gs_resource.store.name bbox = gs_resource.native_bbox # Set bounding box values instance.bbox_x0 = bbox[0] instance.bbox_x1 = bbox[1] instance.bbox_y0 = bbox[2] instance.bbox_y1 = bbox[3] instance.srid = bbox[4] if instance.srid: instance.srid_url = "http://www.spatialreference.org/ref/" + \ instance.srid.replace(':', '/').lower() + "/" else: raise GeoNodeException("Invalid Projection. Layer is missing CRS!") # Iterate over values from geoserver. for key in ['alternate', 'store', 'storeType']: # attr_name = key if 'typename' not in key else 'alternate' # print attr_name setattr(instance, key, values[key]) if settings.RESOURCE_PUBLISHING: if instance.is_published != gs_resource.advertised: if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): gs_resource.advertised = 'true' if instance.is_published else 'false' gs_catalog.save(gs_resource) if not settings.FREETEXT_KEYWORDS_READONLY: if gs_resource.keywords: for keyword in gs_resource.keywords: if keyword not in instance.keyword_list(): instance.keywords.add(keyword) if any(instance.keyword_list()): keywords = instance.keyword_list() gs_resource.keywords = list(set(keywords)) # gs_resource should only be called if # ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True): try: gs_catalog.save(gs_resource) except geoserver.catalog.FailedRequestError as e: msg = ( 'Error while trying to save resource named %s in GeoServer, ' 'try to use: "%s"' % (gs_resource, str(e))) e.args = (msg, ) logger.exception(e) to_update = { 'title': instance.title or instance.name, 'abstract': instance.abstract or "", 'alternate': instance.alternate, 'bbox_x0': instance.bbox_x0, 'bbox_x1': instance.bbox_x1, 'bbox_y0': instance.bbox_y0, 'bbox_y1': instance.bbox_y1, 'srid': instance.srid } # Update ResourceBase resources = ResourceBase.objects.filter(id=instance.resourcebase_ptr.id) resources.update(**to_update) # to_update['name'] = instance.name, to_update['workspace'] = instance.workspace to_update['store'] = instance.store to_update['storeType'] = instance.storeType to_update['typename'] = instance.alternate # Save all the modified information in the instance without triggering signals. Layer.objects.filter(id=instance.id).update(**to_update) # Refresh from DB instance.refresh_from_db() # store the resource to avoid another geoserver call in the post_save instance.gs_resource = gs_resource bbox = gs_resource.native_bbox dx = float(bbox[1]) - float(bbox[0]) dy = float(bbox[3]) - float(bbox[2]) dataAspect = 1 if dy == 0 else dx / dy height = 550 width = int(height * dataAspect) # Set download links for WMS, WCS or WFS and KML links = wms_links(ogc_server_settings.public_url + 'wms?', instance.alternate.encode('utf-8'), instance.bbox_string, instance.srid, height, width) for ext, name, mime, wms_url in links: Link.objects.get_or_create(resource=instance.resourcebase_ptr, name=ugettext(name), defaults=dict( extension=ext, url=wms_url, mime=mime, link_type='image', )) if instance.storeType == "dataStore": links = wfs_links(ogc_server_settings.public_url + 'wfs?', instance.alternate.encode('utf-8')) for ext, name, mime, wfs_url in links: if mime == 'SHAPE-ZIP': name = 'Zipped Shapefile' Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=wfs_url, defaults=dict( extension=ext, name=name, mime=mime, url=wfs_url, link_type='data', )) gs_store_type = gs_resource.store.type.lower( ) if gs_resource.store.type else None geogig_repository = gs_resource.store.connection_parameters.get( 'geogig_repository', '') geogig_repo_name = geogig_repository.replace('geoserver://', '') if gs_store_type == 'geogig' and geogig_repo_name: repo_url = '{url}geogig/repos/{repo_name}'.format( url=ogc_server_settings.public_url, repo_name=geogig_repo_name) path = gs_resource.dom.findall('nativeName') if path: path = 'path={path}'.format(path=path[0].text) Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=repo_url, defaults=dict(extension='html', name='Clone in GeoGig', mime='text/xml', link_type='html')) def command_url(command): return "{repo_url}/{command}.json?{path}".format( repo_url=repo_url, path=path, command=command) Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=command_url('log'), defaults=dict(extension='json', name='GeoGig log', mime='application/json', link_type='html')) Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=command_url('statistics'), defaults=dict(extension='json', name='GeoGig statistics', mime='application/json', link_type='html')) elif instance.storeType == 'coverageStore': links = wcs_links(ogc_server_settings.public_url + 'wcs?', instance.alternate.encode('utf-8'), ','.join(str(x) for x in instance.bbox[0:4]), instance.srid) for ext, name, mime, wcs_url in links: Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=wcs_url, defaults=dict( extension=ext, name=name, mime=mime, link_type='data', )) kml_reflector_link_download = ogc_server_settings.public_url + "wms/kml?" + \ urllib.urlencode({'layers': instance.alternate.encode('utf-8'), 'mode': "download"}) Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=kml_reflector_link_download, defaults=dict( extension='kml', name="KML", mime='text/xml', link_type='data', )) kml_reflector_link_view = ogc_server_settings.public_url + "wms/kml?" + \ urllib.urlencode({'layers': instance.alternate.encode('utf-8'), 'mode': "refresh"}) Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=kml_reflector_link_view, defaults=dict( extension='kml', name="View in Google Earth", mime='text/xml', link_type='data', )) html_link_url = '%s%s' % (settings.SITEURL[:-1], instance.get_absolute_url()) Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=html_link_url, defaults=dict( extension='html', name=instance.alternate, mime='text/html', link_type='html', )) # some thumbnail generators will update thumbnail_url. If so, don't # immediately re-generate the thumbnail here. use layer#save(update_fields=['thumbnail_url']) if not ('update_fields' in kwargs and kwargs['update_fields'] is not None and 'thumbnail_url' in kwargs['update_fields']): logger.info("Creating Thumbnail for Layer [%s]" % (instance.alternate)) create_gs_thumbnail(instance, overwrite=True) legend_url = ogc_server_settings.PUBLIC_LOCATION + \ 'wms?request=GetLegendGraphic&format=image/png&WIDTH=20&HEIGHT=20&LAYER=' + \ instance.alternate + '&legend_options=fontAntiAliasing:true;fontSize:12;forceLabels:on' Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=legend_url, defaults=dict( extension='png', name='Legend', url=legend_url, mime='image/png', link_type='image', )) ogc_wms_path = '%s/ows' % instance.workspace ogc_wms_url = urljoin(ogc_server_settings.public_url, ogc_wms_path) ogc_wms_name = 'OGC WMS: %s Service' % instance.workspace Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=ogc_wms_url, defaults=dict( extension='html', name=ogc_wms_name, url=ogc_wms_url, mime='text/html', link_type='OGC:WMS', )) if instance.storeType == "dataStore": ogc_wfs_path = '%s/wfs' % instance.workspace ogc_wfs_url = urljoin(ogc_server_settings.public_url, ogc_wfs_path) ogc_wfs_name = 'OGC WFS: %s Service' % instance.workspace Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=ogc_wfs_url, defaults=dict( extension='html', name=ogc_wfs_name, url=ogc_wfs_url, mime='text/html', link_type='OGC:WFS', )) if instance.storeType == "coverageStore": ogc_wcs_path = '%s/wcs' % instance.workspace ogc_wcs_url = urljoin(ogc_server_settings.public_url, ogc_wcs_path) ogc_wcs_name = 'OGC WCS: %s Service' % instance.workspace Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=ogc_wcs_url, defaults=dict( extension='html', name=ogc_wcs_name, url=ogc_wcs_url, mime='text/html', link_type='OGC:WCS', )) # remove links that belong to and old address for link in instance.link_set.all(): if not urlparse(settings.SITEURL).hostname == urlparse( link.url).hostname and not urlparse( ogc_server_settings.public_url).hostname == urlparse( link.url).hostname: link.delete() # Define the link after the cleanup, we should use this more rather then remove # potential parasites tile_url = ('%sgwc/service/gmaps?' % ogc_server_settings.public_url + 'layers=%s' % instance.alternate.encode('utf-8') + '&zoom={z}&x={x}&y={y}' + '&format=image/png8') link, created = Link.objects.get_or_create( resource=instance.resourcebase_ptr, extension='tiles', name="Tiles", mime='image/png', link_type='image', ) if created: Link.objects.filter(pk=link.pk).update(url=tile_url) # Save layer attributes set_attributes_from_geoserver(instance) # Save layer styles set_styles(instance, gs_catalog) # NOTTODO by simod: we should not do this! # need to be removed when fixing #2015 catalogue_post_save(instance, Layer)