def _get_indexed_layer_fields(self, layer_meta): bbox = utils.decimal_encode(layer_meta.boundingBox) return { "name": layer_meta.name, "store": self.name, "storeType": "remoteStore", "workspace": "remoteWorkspace", "typename": layer_meta.name, "alternate": layer_meta.name, "title": layer_meta.title, "abstract": layer_meta.abstract, "bbox_polygon": BBOXHelper.from_xy([bbox[0], bbox[2], bbox[1], bbox[3]]).as_polygon(), "srid": bbox[4] if len(bbox) > 4 else "EPSG:4326", "keywords": [keyword[:100] for keyword in layer_meta.keywords], }
def _get_indexed_layer_fields(self, layer_meta): srs = f"EPSG:{layer_meta.extent.spatialReference.wkid}" bbox = utils.decimal_encode([ layer_meta.extent.xmin, layer_meta.extent.ymin, layer_meta.extent.xmax, layer_meta.extent.ymax ]) typename = slugify( f"{layer_meta.id}-{''.join(c for c in layer_meta.title if ord(c) < 128)}" ) return { "name": layer_meta.title, "store": self.name, "storeType": "remoteStore", "workspace": "remoteWorkspace", "typename": typename, "alternate": f"{slugify(self.url)}:{layer_meta.id}", "title": layer_meta.title, "abstract": layer_meta.abstract, "bbox_polygon": BBOXHelper.from_xy([bbox[0], bbox[2], bbox[1], bbox[3]]).as_polygon(), "srid": srs, "keywords": ['ESRI', 'ArcGIS REST MapServer', layer_meta.title], }
def _get_cascaded_layer_fields(self, geoserver_resource): name = geoserver_resource.name workspace = geoserver_resource.workspace.name if hasattr( geoserver_resource, 'workspace') else None store = geoserver_resource.store if hasattr(geoserver_resource, 'store') else None bbox = utils.decimal_encode(geoserver_resource.native_bbox) if hasattr(geoserver_resource, 'native_bbox') else \ utils.decimal_encode(geoserver_resource.boundingBox) return { "name": name, "workspace": workspace or "remoteWorkspace", "store": store.name if store and hasattr(store, 'name') else self.name, "typename": "{}:{}".format(workspace, name) if workspace not in name else name, "alternate": "{}:{}".format(workspace, name) if workspace not in name else name, "storeType": "remoteStore", "title": geoserver_resource.title, "abstract": geoserver_resource.abstract, "bbox_polygon": BBOXHelper.from_xy([bbox[0], bbox[2], bbox[1], bbox[3]]).as_polygon(), "srid": bbox[4] if len(bbox) > 4 else "EPSG:4326", }
def _get_indexed_dataset_fields(self, dataset_meta): bbox = utils.decimal_encode(dataset_meta.boundingBox) if len(bbox) < 4: raise RuntimeError(f"Resource BBOX is not valid: {bbox}") return { "name": dataset_meta.name, "store": self.name, "subtype": "remote", "workspace": "remoteWorkspace", "typename": dataset_meta.name, "alternate": dataset_meta.name, "title": dataset_meta.title, "abstract": dataset_meta.abstract, "bbox_polygon": BBOXHelper.from_xy([bbox[0], bbox[2], bbox[1], bbox[3]]).as_polygon(), "srid": bbox[4] if len(bbox) > 4 else "EPSG:4326", "keywords": [keyword[:100] for keyword in dataset_meta.keywords], }
def form_valid(self, form): """ If the form is valid, save the associated model. """ doc_form = form.cleaned_data file = doc_form.pop('doc_file', None) if file: tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT) dirname = os.path.basename(tempdir) filepath = storage_manager.save(f"{dirname}/{file.name}", file) storage_path = storage_manager.path(filepath) self.object = resource_manager.create( None, resource_type=Document, defaults=dict(owner=self.request.user, doc_url=doc_form.pop('doc_url', None), title=doc_form.pop('title', file.name), files=[storage_path])) if tempdir != os.path.dirname(storage_path): shutil.rmtree(tempdir, ignore_errors=True) else: self.object = resource_manager.create( None, resource_type=Document, defaults=dict(owner=self.request.user, doc_url=doc_form.pop('doc_url', None), title=doc_form.pop('title', None))) if settings.ADMIN_MODERATE_UPLOADS: self.object.is_approved = False if settings.RESOURCE_PUBLISHING: self.object.is_published = False resource_manager.set_permissions( None, instance=self.object, permissions=form.cleaned_data["permissions"], created=True) abstract = None date = None regions = [] keywords = [] bbox = None url = hookset.document_detail_url(self.object) out = {'success': False} if getattr(settings, 'EXIF_ENABLED', False): try: from geonode.documents.exif.utils import exif_extract_metadata_doc exif_metadata = exif_extract_metadata_doc(self.object) if exif_metadata: date = exif_metadata.get('date', None) keywords.extend(exif_metadata.get('keywords', [])) bbox = exif_metadata.get('bbox', None) abstract = exif_metadata.get('abstract', None) except Exception: logger.debug("Exif extraction failed.") resource_manager.update( self.object.uuid, instance=self.object, keywords=keywords, regions=regions, vals=dict(abstract=abstract, date=date, date_type="Creation", bbox_polygon=BBOXHelper.from_xy(bbox).as_polygon() if bbox else None), notify=True) resource_manager.set_thumbnail(self.object.uuid, instance=self.object, overwrite=False) register_event(self.request, EventType.EVENT_UPLOAD, self.object) if self.request.GET.get('no__redirect', False): out['success'] = True out['url'] = url if out['success']: status_code = 200 else: status_code = 400 return HttpResponse(json.dumps(out), content_type='application/json', status=status_code) else: return HttpResponseRedirect(url)
def file_upload(filename, layer=None, gtype=None, name=None, user=None, title=None, abstract=None, license=None, category=None, keywords=None, regions=None, date=None, skip=True, overwrite=False, charset='UTF-8', is_approved=True, is_published=True, metadata_uploaded_preserve=False, metadata_upload_form=False): """Saves a layer in GeoNode asking as little information as possible. Only filename is required, user and title are optional. :return: Uploaded layer :rtype: Layer """ if keywords is None: keywords = [] if regions is None: regions = [] # Get a valid user theuser = get_valid_user(user) # Create a new upload session if layer: latest_uploads = UploadSession.objects.filter( resource=layer).order_by('-date') if latest_uploads.count() > 1: upload_session = latest_uploads.first() else: upload_session, _created = UploadSession.objects.get_or_create( resource=layer) upload_session.user = theuser upload_session.layerfile_set.all().delete() else: upload_session = UploadSession.objects.create(user=theuser) # Get all the files uploaded with the layer if os.path.exists(filename): files = get_files(filename) else: raise Exception( _("You are attempting to replace a vector layer with an unknown format." )) # We are going to replace an existing Layer... if layer and overwrite: validate_input_source(layer, filename, files, gtype, action_type='replace') # Set a default title that looks nice ... if title is None: basename = os.path.splitext(os.path.basename(filename))[0] title = basename.title().replace('_', ' ') # Create a name from the title if it is not passed. if name is None: name = slugify(title).replace('-', '_') elif not overwrite: name = slugify(name) # assert that name is slugified if license is not None: licenses = License.objects.filter( Q(name__iexact=license) | Q(abbreviation__iexact=license) | Q(url__iexact=license) | Q(description__iexact=license)) if len(licenses) == 1: license = licenses[0] else: license = None if category is not None: try: categories = TopicCategory.objects.filter( Q(identifier__iexact=category) | Q(gn_description__iexact=category)) if len(categories) == 1: category = categories[0] else: category = None except Exception: pass # Generate a name that is not taken if overwrite is False. valid_name = get_valid_layer_name(name, overwrite) # Add them to the upload session (new file fields are created). assigned_name = None for type_name, fn in files.items(): with open(fn, 'rb') as f: upload_session.layerfile_set.create( name=type_name, file=File(f, name=f'{assigned_name or valid_name}.{type_name}')) # save the system assigned name for the remaining files if not assigned_name: the_file = upload_session.layerfile_set.all()[0].file.name assigned_name = os.path.splitext(os.path.basename(the_file))[0] # Get a bounding box *bbox, srid = get_bbox(filename) bbox_polygon = BBOXHelper.from_xy(bbox).as_polygon() if srid: srid_url = f"http://www.spatialreference.org/ref/{srid.replace(':', '/').lower()}/" # noqa bbox_polygon.srid = int(srid.split(':')[1]) # by default, if RESOURCE_PUBLISHING=True then layer.is_published # must be set to False if not overwrite: if settings.RESOURCE_PUBLISHING: is_published = False if settings.ADMIN_MODERATE_UPLOADS: is_approved = False defaults = { 'upload_session': upload_session, 'title': title, 'abstract': abstract, 'owner': user, 'charset': charset, 'bbox_polygon': bbox_polygon, 'srid': 'EPSG:4326', 'is_approved': is_approved, 'is_published': is_published, 'license': license, 'category': category } # set metadata if 'xml' in files: with open(files['xml']) as f: xml_file = f.read() defaults['metadata_uploaded'] = True defaults['metadata_uploaded_preserve'] = metadata_uploaded_preserve # get model properties from XML identifier, vals, regions, keywords = set_metadata(xml_file) if defaults['metadata_uploaded_preserve']: defaults['metadata_xml'] = xml_file if identifier: if ResourceBase.objects.filter(uuid=identifier).count(): logger.error( "The UUID identifier from the XML Metadata is already in use in this system." ) raise GeoNodeException( _("The UUID identifier from the XML Metadata is already in use in this system." )) else: defaults['uuid'] = identifier for key, value in vals.items(): if key == 'spatial_representation_type': value = SpatialRepresentationType(identifier=value) elif key == 'topic_category': value, created = TopicCategory.objects.get_or_create( identifier=value, defaults={ 'description': '', 'gn_description': value }) key = 'category' defaults[key] = value regions_resolved, regions_unresolved = resolve_regions(regions) if keywords and regions_unresolved: keywords.extend(convert_keyword(regions_unresolved)) # If it is a vector file, create the layer in postgis. if is_vector(filename): defaults['storeType'] = 'dataStore' # If it is a raster file, get the resolution. if is_raster(filename): defaults['storeType'] = 'coverageStore' # Create a Django object. created = False layer = None try: with transaction.atomic(): if overwrite: try: layer = Layer.objects.get(name=valid_name) except Layer.DoesNotExist: layer = None if not layer: if not metadata_upload_form: layer = Layer.objects.filter( name=valid_name, workspace=settings.DEFAULT_WORKSPACE).first() if not layer: layer = Layer.objects.create( name=valid_name, workspace=settings.DEFAULT_WORKSPACE) created = True elif identifier: layer = Layer.objects.filter(uuid=identifier).first() if not layer: layer = Layer.objects.create(uuid=identifier) created = True except IntegrityError: raise # Delete the old layers if overwrite is true # and the layer was not just created # process the layer again after that by # doing a layer.save() if not created and overwrite: # update with new information defaults['title'] = defaults.get('title', None) or layer.title defaults['abstract'] = defaults.get('abstract', None) or layer.abstract defaults['bbox_polygon'] = defaults.get('bbox_polygon', None) or layer.bbox_polygon defaults['ll_bbox_polygon'] = defaults.get( 'll_bbox_polygon', None) or layer.ll_bbox_polygon defaults['is_approved'] = defaults.get( 'is_approved', is_approved) or layer.is_approved defaults['is_published'] = defaults.get( 'is_published', is_published) or layer.is_published defaults['license'] = defaults.get('license', None) or layer.license defaults['category'] = defaults.get('category', None) or layer.category if upload_session: if layer.upload_session: layer.upload_session.date = upload_session.date layer.upload_session.user = upload_session.user layer.upload_session.error = upload_session.error layer.upload_session.traceback = upload_session.traceback layer.upload_session.context = upload_session.context upload_session = layer.upload_session else: layer.upload_session = upload_session if upload_session: defaults['upload_session'] = upload_session upload_session.resource = layer upload_session.processed = False upload_session.save() layer = KeywordHandler(layer, keywords).set_keywords() # Assign the regions (needs to be done after saving) regions_resolved = list(set(regions_resolved)) if regions_resolved: if len(regions_resolved) > 0: if not layer.regions: layer.regions = regions_resolved else: layer.regions.clear() layer.regions.add(*regions_resolved) # Assign and save the charset using the Layer class' object (layer) if charset != 'UTF-8': layer.charset = charset if not defaults.get('title', title): defaults['title'] = layer.title or layer.name if not defaults.get('abstract', abstract): defaults['abstract'] = layer.abstract or '' to_update = {} to_update['upload_session'] = defaults.pop('upload_session', layer.upload_session) to_update['storeType'] = defaults.pop('storeType', layer.storeType) to_update['charset'] = defaults.pop('charset', layer.charset) to_update.update(defaults) if defaults.get('date', date) is not None: to_update['date'] = defaults.get( 'date', datetime.strptime(date, '%Y-%m-%d %H:%M:%S') if date else None) # Update ResourceBase if not to_update: pass else: try: with transaction.atomic(): if 'spatial_representation_type' in defaults: _spatial_ref_type = defaults.pop( 'spatial_representation_type') _spatial_ref_type.save() defaults['spatial_representation_type'] = _spatial_ref_type ResourceBase.objects.filter( id=layer.resourcebase_ptr.id).update(**defaults) Layer.objects.filter(id=layer.id).update(**to_update) # Refresh from DB layer.refresh_from_db() # Pass the parameter overwrite to tell whether the # geoserver_post_save_signal should upload the new file or not layer.overwrite = overwrite # Blank out the store if overwrite is true. # geoserver_post_save_signal should upload the new file if needed layer.store = '' if overwrite else layer.store except IntegrityError: raise try: with transaction.atomic(): layer.save(notify=True) except IntegrityError: raise return layer
def form_valid(self, form): """ If the form is valid, save the associated model. """ self.object = form.save(commit=False) self.object.owner = self.request.user if settings.ADMIN_MODERATE_UPLOADS: self.object.is_approved = False if settings.RESOURCE_PUBLISHING: self.object.is_published = False self.object.save() form.save_many2many() self.object.set_permissions(form.cleaned_data['permissions']) abstract = None date = None regions = [] keywords = [] bbox = None out = {'success': False} if getattr(settings, 'EXIF_ENABLED', False): try: from geonode.documents.exif.utils import exif_extract_metadata_doc exif_metadata = exif_extract_metadata_doc(self.object) if exif_metadata: date = exif_metadata.get('date', None) keywords.extend(exif_metadata.get('keywords', [])) bbox = exif_metadata.get('bbox', None) abstract = exif_metadata.get('abstract', None) except Exception: logger.error("Exif extraction failed.") if abstract: self.object.abstract = abstract if date: self.object.date = date self.object.date_type = "Creation" if len(regions) > 0: self.object.regions.add(*regions) if len(keywords) > 0: self.object.keywords.add(*keywords) if bbox: bbox = BBOXHelper.from_xy(bbox) self.object.bbox_polygon = bbox.as_polygon() self.object.save(notify=True) register_event(self.request, EventType.EVENT_UPLOAD, self.object) if self.request.GET.get('no__redirect', False): out['success'] = True out['url'] = reverse('document_detail', args=(self.object.id, )) if out['success']: status_code = 200 else: status_code = 400 return HttpResponse(json.dumps(out), content_type='application/json', status=status_code) else: return HttpResponseRedirect( reverse('document_detail', args=(self.object.id, )))
def set_geonode_map(self, caller, serializer, map_obj=None, data=None, attributes=None): def decode_base64(data): """Decode base64, padding being optional. :param data: Base64 data as an ASCII byte string :returns: The decoded byte string. """ _thumbnail_format = 'png' _invalid_padding = data.find(';base64,') if _invalid_padding: _thumbnail_format = data[data.find('image/') + len('image/'):_invalid_padding] data = data[_invalid_padding + len(';base64,'):] missing_padding = len(data) % 4 if missing_padding != 0: data += b'=' * (4 - missing_padding) return (base64.b64decode(data), _thumbnail_format) _map_name = None _map_title = None _map_abstract = None _map_thumbnail = None _map_thumbnail_format = 'png' if attributes: for _a in attributes: if _a['name'] == 'name' and 'value' in _a: _map_name = _a['value'] if _a['name'] == 'title' and 'value' in _a: _map_title = _a['value'] if _a['name'] == 'abstract' and 'value' in _a: _map_abstract = _a['value'] if 'thumb' in _a['name'] and 'value' in _a: try: (_map_thumbnail, _map_thumbnail_format) = decode_base64(_a['value']) except Exception: if _a['value']: _map_thumbnail = _a['value'] _map_thumbnail_format = 'link' elif map_obj: _map_title = map_obj.title _map_abstract = map_obj.abstract _map_name = _map_name or None if not _map_name and 'name' in serializer.validated_data: _map_name = serializer.validated_data['name'] _map_title = _map_title or _map_name _map_abstract = _map_abstract or "" if data: try: _map_conf = dict(data) _map_conf["about"] = { "name": _map_name, "title": _map_title, "abstract": _map_abstract } _map_conf['sources'] = {} from geonode.layers.views import layer_detail _map_obj = data.pop('map', None) if _map_obj: _map_bbox = [] for _lyr in _map_obj['layers']: _lyr_context = {} _lyr_store = _lyr['store'] if 'store' in _lyr else None if not _lyr_store: try: _url = urlparse(_lyr['catalogURL']) _lyr_store = Layer.objects.get( uuid=parse_qs(_url.query)['id'][0]).store except Exception: try: _lyr_store = Layer.objects.get( alternate=_lyr['name'], remote_service__base_url=_lyr['url'] ).store except Exception: _lyr_store = None _lyr_name = "%s:%s" % ( _lyr_store, _lyr['name']) if _lyr_store else _lyr['name'] try: # Retrieve the Layer Params back from GeoNode _gn_layer = layer_detail(caller.request, _lyr_name) if _gn_layer and _gn_layer.context_data: _context_data = json.loads( _gn_layer.context_data['viewer']) for _gn_layer_ctx in _context_data['map'][ 'layers']: if 'name' in _gn_layer_ctx and _gn_layer_ctx[ 'name'] == _lyr['name']: _lyr['store'] = _lyr_store if 'style' in _lyr: _lyr_context['style'] = _lyr[ 'style'] _lyr_context = _gn_layer_ctx _src_idx = _lyr_context['source'] _map_conf['sources'][ _src_idx] = _context_data[ 'sources'][_src_idx] except Http404: tb = traceback.format_exc() logger.debug(tb) except Exception: raise # Store ms2 layer idq if "id" in _lyr and _lyr["id"]: _lyr['extraParams'] = {"msId": _lyr["id"]} # Store the Capabilities Document into the Layer Params of GeoNode if _lyr_context: if 'ftInfoTemplate' in _lyr_context: _lyr['ftInfoTemplate'] = _lyr_context[ 'ftInfoTemplate'] if 'getFeatureInfo' in _lyr_context: _lyr['getFeatureInfo'] = _lyr_context[ 'getFeatureInfo'] if 'capability' in _lyr_context: _lyr['capability'] = _lyr_context['capability'] if 'bbox' in _lyr_context['capability']: _lyr_bbox = _lyr_context['capability'][ 'bbox'] if _map_obj['projection'] in _lyr_bbox: x0 = _lyr_bbox[ _map_obj['projection']]['bbox'][0] x1 = _lyr_bbox[ _map_obj['projection']]['bbox'][2] y0 = _lyr_bbox[ _map_obj['projection']]['bbox'][1] y1 = _lyr_bbox[ _map_obj['projection']]['bbox'][3] if len(_map_bbox) == 0: _map_bbox = [x0, x1, y0, y1] else: from geonode.utils import bbox_to_wkt from django.contrib.gis.geos import GEOSGeometry _l_wkt = bbox_to_wkt( x0, x1, y0, y1, srid=_map_obj['projection']) _m_wkt = bbox_to_wkt( _map_bbox[0], _map_bbox[2], _map_bbox[1], _map_bbox[3], srid=_map_obj['projection']) _map_srid = int( _map_obj['projection'][5:]) _l_poly = GEOSGeometry( _l_wkt, srid=_map_srid) _m_poly = GEOSGeometry( _m_wkt, srid=_map_srid).union(_l_poly) _map_bbox = _m_poly.extent if 'source' in _lyr_context: _source = _map_conf['sources'][ _lyr_context['source']] if 'remote' in _source and _source[ 'remote'] is True: _lyr['source'] = _lyr_context['source'] elif 'source' in _lyr: _map_conf['sources'][_lyr['source']] = {} event_type = None if is_analytics_enabled: event_type = EventType.EVENT_CHANGE if not map_obj: # Update Map BBox if 'bbox' not in _map_obj and (not _map_bbox or len(_map_bbox) != 4): _map_bbox = _map_obj['maxExtent'] # Must be in the form : [x0, x1, y0, y1] _map_obj['bbox'] = [ _map_bbox[0], _map_bbox[1], _map_bbox[2], _map_bbox[3] ] # Create a new GeoNode Map from geonode.maps.models import Map map_obj = Map(title=_map_title, owner=caller.request.user, center_x=_map_obj['center']['x'], center_y=_map_obj['center']['y'], projection=_map_obj['projection'], zoom=_map_obj['zoom'], srid=_map_obj['projection']) if 'bbox' in _map_obj: if hasattr(map_obj, 'bbox_polygon'): map_obj.bbox_polygon = BBOXHelper.from_xy( _map_obj['bbox']).as_polygon() else: map_obj.bbox_x0 = _map_obj['bbox'][0] map_obj.bbox_y0 = _map_obj['bbox'][1] map_obj.bbox_x1 = _map_obj['bbox'][2] map_obj.bbox_y1 = _map_obj['bbox'][3] map_obj.save() if is_analytics_enabled: event_type = EventType.EVENT_CREATE # Dumps thumbnail from MapStore2 Interface if _map_thumbnail: # note: dumping a thumbnail should be performed before update_from_viewer(), to remove # a race hazard. update_from_viewer() saves an existing map without a thumbnail, # triggering asynchronous generation of a default thumbnail, which may overwrite uploaded thumb if _map_thumbnail_format == 'link': map_obj.thumbnail_url = _map_thumbnail else: _map_thumbnail_filename = "map-%s-thumb.%s" % ( map_obj.uuid, _map_thumbnail_format) map_obj.save_thumbnail(_map_thumbnail_filename, _map_thumbnail) # Update GeoNode Map _map_conf['map'] = _map_obj map_obj.update_from_viewer(_map_conf, context={'config': _map_conf}) if is_analytics_enabled: register_event(caller.request, event_type, map_obj) serializer.validated_data['id'] = map_obj.id serializer.save(user=caller.request.user) except Exception as e: tb = traceback.format_exc() logger.error(tb) raise APIException(e) else: raise APIException("Map Configuration (data) is Mandatory!")
def set_geonode_map(self, caller, serializer, map_obj=None, data=None): _map_name = serializer.validated_data['title'] or map_obj.title _map_title = serializer.validated_data['title'] or map_obj.title _map_abstract = serializer.validated_data.get( 'abstract', '') or '' if not hasattr(map_obj, 'abstract') else map_obj.abstract if data: try: data_blob = data.copy() _map_conf = dict(data) _map_conf["about"] = { "name": _map_name, "title": _map_title, "abstract": _map_abstract } _map_conf['sources'] = {} from geonode.layers.views import layer_detail _map_obj = data.pop('map', None) if _map_obj: _map_bbox = [] for _lyr in _map_obj['layers']: _lyr_context = {} _lyr_store = _lyr['store'] if 'store' in _lyr else None if not _lyr_store: try: _url = urlparse(_lyr['catalogURL']) _lyr_store = Layer.objects.get( uuid=parse_qs(_url.query)['id'][0]).store except Exception: try: _lyr_store = Layer.objects.get( alternate=_lyr['name'], remote_service__base_url=_lyr['url'] ).store except Exception: _lyr_store = None _lyr_name = f"{_lyr_store}:{_lyr['name']}" if _lyr_store else _lyr[ 'name'] try: # Retrieve the Layer Params back from GeoNode _gn_layer = layer_detail(caller.request, _lyr_name) if _gn_layer and _gn_layer.context_data: _context_data = json.loads( _gn_layer.context_data['viewer']) for _gn_layer_ctx in _context_data['map'][ 'layers']: if 'name' in _gn_layer_ctx and _gn_layer_ctx[ 'name'] == _lyr['name']: _lyr['store'] = _lyr_store if 'style' in _lyr: _lyr_context['style'] = _lyr[ 'style'] if 'capability' in _gn_layer_ctx: # Add selected style to capability in layer_params _gn_layer_ctx['capability'][ 'style'] = _lyr['style'] _lyr_context = _gn_layer_ctx _src_idx = _lyr_context['source'] _map_conf['sources'][ _src_idx] = _context_data[ 'sources'][_src_idx] except Http404: tb = traceback.format_exc() logger.debug(tb) except Exception: raise # Store ms2 layer idq if "id" in _lyr and _lyr["id"]: _lyr['extraParams'] = {"msId": _lyr["id"]} # Store the Capabilities Document into the Layer Params of GeoNode if _lyr_context: if 'ftInfoTemplate' in _lyr_context: _lyr['ftInfoTemplate'] = _lyr_context[ 'ftInfoTemplate'] if 'getFeatureInfo' in _lyr_context: _lyr['getFeatureInfo'] = _lyr_context[ 'getFeatureInfo'] if 'capability' in _lyr_context: _lyr['capability'] = _lyr_context['capability'] if 'bbox' in _lyr_context['capability']: _lyr_bbox = _lyr_context['capability'][ 'bbox'] if _map_obj['projection'] in _lyr_bbox: x0 = _lyr_bbox[ _map_obj['projection']]['bbox'][0] x1 = _lyr_bbox[ _map_obj['projection']]['bbox'][2] y0 = _lyr_bbox[ _map_obj['projection']]['bbox'][1] y1 = _lyr_bbox[ _map_obj['projection']]['bbox'][3] if len(_map_bbox) == 0: _map_bbox = [x0, x1, y0, y1] else: from geonode.utils import bbox_to_wkt from django.contrib.gis.geos import GEOSGeometry _l_wkt = bbox_to_wkt( x0, x1, y0, y1, srid=_map_obj['projection']) _m_wkt = bbox_to_wkt( _map_bbox[0], _map_bbox[2], _map_bbox[1], _map_bbox[3], srid=_map_obj['projection']) _map_srid = int( _map_obj['projection'][5:]) _l_poly = GEOSGeometry( _l_wkt, srid=_map_srid) _m_poly = GEOSGeometry( _m_wkt, srid=_map_srid).union(_l_poly) _map_bbox = _m_poly.extent if 'source' in _lyr_context: _source = _map_conf['sources'][ _lyr_context['source']] if 'remote' in _source and _source[ 'remote'] is True: _lyr['source'] = _lyr_context['source'] elif 'source' in _lyr: _map_conf['sources'][_lyr['source']] = {} event_type = None if is_analytics_enabled: event_type = EventType.EVENT_CHANGE if not map_obj: # Create a new GeoNode Map from geonode.maps.models import Map map_obj = Map(title=_map_title, abstract=_map_abstract, owner=caller.request.user, center_x=_map_obj['center']['x'], center_y=_map_obj['center']['y'], projection=_map_obj['projection'], zoom=_map_obj['zoom'], srid=_map_obj['projection'], resource_type="map") if 'bbox' in _map_obj: if hasattr(map_obj, 'bbox_polygon'): map_obj.bbox_polygon = BBOXHelper.from_xy( _map_obj['bbox']).as_polygon() else: map_obj.bbox_x0 = _map_obj['bbox'][0] map_obj.bbox_y0 = _map_obj['bbox'][1] map_obj.bbox_x1 = _map_obj['bbox'][2] map_obj.bbox_y1 = _map_obj['bbox'][3] elif hasattr(map_obj, 'bbox_polygon' ) and map_obj.bbox_polygon is None: # set the bbox_polygon to the obj and then to serializer instance map_obj.set_bounds_from_center_and_zoom( _map_obj['center']['x'], _map_obj['center']['y'], _map_obj['zoom']) if is_analytics_enabled: event_type = EventType.EVENT_CREATE else: map_obj.title = _map_title map_obj.abstract = _map_abstract if not map_obj.uuid: map_obj.uuid = str(uuid4()) map_obj.blob = data_blob # Update GeoNode Map _map_conf['map'] = _map_obj if is_analytics_enabled: register_event(caller.request, event_type, map_obj) serializer.instance = map_obj serializer.save() serializer.instance.update_from_viewer( _map_conf, context={'config': _map_conf}) return serializer except Exception as e: tb = traceback.format_exc() logger.error(tb) raise APIException(e) else: raise APIException("Map Configuration (data) is Mandatory!")
def create_thumbnail( instance: Union[Dataset, Map], wms_version: str = settings.OGC_SERVER["default"].get("WMS_VERSION", "1.1.1"), bbox: Optional[Union[List, Tuple]] = None, forced_crs: Optional[str] = None, styles: Optional[List] = None, overwrite: bool = False, background_zoom: Optional[int] = None, ) -> None: """ Function generating and saving a thumbnail of the given instance (Dataset or Map), which is composed of outcomes of WMS GetMap queries to the instance's datasets providers, and an outcome of querying background provider for thumbnail's background (by default Slippy Map provider). :param instance: instance of Dataset or Map models :param wms_version: WMS version of the query :param bbox: bounding box of the thumbnail in format: (west, east, south, north, CRS), where CRS is in format "EPSG:XXXX" :param forced_crs: CRS which should be used to fetch data from WMS services in format "EPSG:XXXX". By default all data is translated and retrieved in EPSG:3857, since this enables background fetching from Slippy Maps providers. Forcing another CRS can cause skipping background generation in the thumbnail :param styles: styles, which OGC server should use for rendering an image :param overwrite: overwrite existing thumbnail :param background_zoom: zoom of the XYZ Slippy Map used to retrieve background image, if Slippy Map is used as background """ instance.refresh_from_db() default_thumbnail_name = _generate_thumbnail_name(instance) mime_type = "image/png" width = settings.THUMBNAIL_SIZE["width"] height = settings.THUMBNAIL_SIZE["height"] if default_thumbnail_name is None: # instance is Map and has no datasets defined utils.assign_missing_thumbnail(instance) return # handle custom, uploaded thumbnails, which may have different extensions from the default thumbnail thumbnail_exists = False if instance.thumbnail_url and instance.thumbnail_url != static(utils.MISSING_THUMB): thumbnail_exists = utils.thumb_exists(instance.thumbnail_url.rsplit('/')[-1]) if (thumbnail_exists or utils.thumb_exists(default_thumbnail_name)) and not overwrite: logger.debug(f"Thumbnail for {instance.name} already exists. Skipping thumbnail generation.") return # --- determine target CRS and bbox --- target_crs = forced_crs.upper() if forced_crs is not None else "EPSG:3857" compute_bbox_from_datasets = False is_map_with_datasets = True if isinstance(instance, Map): is_map_with_datasets = MapLayer.objects.filter(map=instance, local=True).exclude(dataset=None).count() > 0 if bbox: bbox = utils.clean_bbox(bbox, target_crs) elif instance.ll_bbox_polygon: _bbox = BBOXHelper(instance.ll_bbox_polygon.extent) srid = instance.ll_bbox_polygon.srid bbox = [_bbox.xmin, _bbox.xmax, _bbox.ymin, _bbox.ymax, f"EPSG:{srid}"] bbox = utils.clean_bbox(bbox, target_crs) else: compute_bbox_from_datasets = True # --- define dataset locations --- locations, datasets_bbox = _datasets_locations(instance, compute_bbox=compute_bbox_from_datasets, target_crs=target_crs) if compute_bbox_from_datasets and is_map_with_datasets: if not datasets_bbox: raise ThumbnailError(f"Thumbnail generation couldn't determine a BBOX for: {instance}.") else: bbox = datasets_bbox # --- expand the BBOX to match the set thumbnail's ratio (prevent thumbnail's distortions) --- bbox = utils.expand_bbox_to_ratio(bbox) if bbox else None # --- add default style --- if not styles and hasattr(instance, "default_style"): if instance.default_style: styles = [instance.default_style.name] # --- fetch WMS datasets --- partial_thumbs = [] for ogc_server, datasets, _styles in locations: if isinstance(instance, Map) and len(datasets) == len(_styles): styles = _styles try: partial_thumbs.append( utils.get_map( ogc_server, datasets, wms_version=wms_version, bbox=bbox, mime_type=mime_type, styles=styles, width=width, height=height, ) ) except Exception as e: logger.error(f"Exception occurred while fetching partial thumbnail for {instance.title}.") logger.exception(e) if not partial_thumbs and is_map_with_datasets: utils.assign_missing_thumbnail(instance) raise ThumbnailError("Thumbnail generation failed - no image retrieved from WMS services.") # --- merge retrieved WMS images --- merged_partial_thumbs = Image.new("RGBA", (width, height), (255, 255, 255, 0)) for image in partial_thumbs: if image: content = BytesIO(image) try: img = Image.open(content) img.verify() # verify that it is, in fact an image img = Image.open(BytesIO(image)) # "re-open" the file (required after running verify method) merged_partial_thumbs.paste(img, mask=img.convert('RGBA')) except UnidentifiedImageError as e: logger.error(f"Thumbnail generation. Error occurred while fetching dataset image: {image}") logger.exception(e) # --- fetch background image --- try: BackgroundGenerator = import_string(settings.THUMBNAIL_BACKGROUND["class"]) background = BackgroundGenerator(width, height).fetch(bbox, background_zoom) if bbox else None except Exception as e: logger.error(f"Thumbnail generation. Error occurred while fetching background image: {e}") logger.exception(e) background = None # --- overlay image with background --- thumbnail = Image.new("RGB", (width, height), (250, 250, 250)) if background is not None: thumbnail.paste(background, (0, 0)) thumbnail.paste(merged_partial_thumbs, (0, 0), merged_partial_thumbs) # convert image to the format required by save_thumbnail with BytesIO() as output: thumbnail.save(output, format="PNG") content = output.getvalue() # save thumbnail instance.save_thumbnail(default_thumbnail_name, image=content) return instance.thumbnail_url