def _create_dataset(self, geonode_service, **resource_fields): # bear in mind that in ``geonode.layers.models`` there is a # ``pre_save_dataset`` function handler that is connected to the # ``pre_save`` signal for the Dataset model. This handler does a check # for common fields (such as abstract and title) and adds # sensible default values keywords = resource_fields.pop("keywords", []) defaults = dict(owner=geonode_service.owner, remote_service=geonode_service, remote_typename=geonode_service.name, sourcetype=base_enumerations.SOURCE_TYPE_REMOTE, ptype=getattr(geonode_service, "ptype", "gxp_wmscsource"), **resource_fields) if geonode_service.method == INDEXED: defaults['ows_url'] = geonode_service.service_url geonode_dataset = resource_manager.create(None, resource_type=Dataset, defaults=defaults) resource_manager.update(geonode_dataset.uuid, instance=geonode_dataset, keywords=keywords, notify=True) resource_manager.set_permissions(geonode_dataset.uuid, instance=geonode_dataset) return geonode_dataset
def create_gn_dataset(workspace, datastore, name, title, owner_name): """ Associate a layer in GeoNode for a given layer in GeoServer. """ owner = get_user_model().objects.get(username=owner_name) layer = resource_manager.create( str(uuid.uuid4()), resource_type=Dataset, defaults=dict( name=name, workspace=workspace.name, store=datastore.name, subtype='vector', alternate=f'{workspace.name}:{name}', title=title, owner=owner, srid='EPSG:4326', bbox_polygon=Polygon.from_bbox(BBOX), ll_bbox_polygon=Polygon.from_bbox(BBOX), data_quality_statement=DATA_QUALITY_MESSAGE )) to_update = {} if settings.ADMIN_MODERATE_UPLOADS: to_update['is_approved'] = to_update['was_approved'] = False if settings.RESOURCE_PUBLISHING: to_update['is_published'] = to_update['was_published'] = False resource_manager.update(layer.uuid, instance=layer, vals=to_update) resource_manager.set_thumbnail(None, instance=layer) return layer
def form_valid(self, form): """ If the form is valid, save the associated model. """ doc_form = form.cleaned_data file = doc_form.pop('doc_file', None) if file: tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT) dirname = os.path.basename(tempdir) filepath = storage_manager.save(f"{dirname}/{file.name}", file) storage_path = storage_manager.path(filepath) self.object = resource_manager.update( self.object.uuid, instance=self.object, vals=dict( owner=self.request.user, files=[storage_path]) ) if tempdir != os.path.dirname(storage_path): shutil.rmtree(tempdir, ignore_errors=True) register_event(self.request, EventType.EVENT_CHANGE, self.object) url = hookset.document_detail_url(self.object) return HttpResponseRedirect(url)
def _create_and_update(self, validated_data, instance=None): # Extract JSON blob _data = {} if 'blob' in validated_data: _data = validated_data.pop('blob') # Create a new instance if not instance: _instance = resource_manager.create(None, resource_type=self.Meta.model, defaults=validated_data) else: _instance = instance try: self.Meta.model.objects.filter(pk=_instance.id).update( **validated_data) _instance.refresh_from_db() except Exception as e: raise ValidationError(e) # Let's finalize the instance and notify the users validated_data['blob'] = _data return resource_manager.update(_instance.uuid, instance=_instance, vals=validated_data, notify=True)
def update_geonode_resource( self, harvested_info: HarvestedResourceInfo, harvestable_resource: "HarvestableResource", # noqa harvesting_session_id: int, ): """Create or update a local GeoNode resource with the input harvested information.""" defaults = self.get_geonode_resource_defaults( harvested_info, harvestable_resource) geonode_resource = harvestable_resource.geonode_resource if geonode_resource is None: geonode_resource = resource_manager.create( str(harvested_info.resource_descriptor.uuid), self.get_geonode_resource_type( harvestable_resource.remote_resource_type), defaults ) else: if not geonode_resource.uuid == str(harvested_info.resource_descriptor.uuid): raise RuntimeError( f"Resource {geonode_resource!r} already exists locally but its " f"UUID ({geonode_resource.uuid}) does not match the one found on " f"the remote resource {harvested_info.resource_descriptor.uuid!r}") geonode_resource = resource_manager.update( str(harvested_info.resource_descriptor.uuid), vals=defaults) keywords = list( harvested_info.resource_descriptor.identification.other_keywords ) + geonode_resource.keyword_list() harvester = models.Harvester.objects.get(pk=self.harvester_id) keywords.append( harvester.name.lower().replace( 'harvester ', '').replace( 'harvester_', '').replace( 'harvester', '').strip() ) regions = harvested_info.resource_descriptor.identification.place_keywords resource_manager.update( str(harvested_info.resource_descriptor.uuid), regions=regions, keywords=list(set(keywords))) harvestable_resource.geonode_resource = geonode_resource harvestable_resource.save() self.finalize_resource_update( geonode_resource, harvested_info, harvestable_resource, harvesting_session_id )
def _post_change_routines(self, instance: Map, create_action_perfomed: bool, additional_data: dict): # Step 1: Handle Maplayers signals if this is and update action if not create_action_perfomed: dataset_names_before_changes = additional_data.pop( "dataset_names_before_changes", []) dataset_names_after_changes = [ lyr.alternate for lyr in instance.datasets ] if dataset_names_before_changes != dataset_names_after_changes: map_changed_signal.send_robust(sender=instance, what_changed="datasets") # Step 2: Register Event event_type = EventType.EVENT_CREATE if create_action_perfomed else EventType.EVENT_CHANGE register_event(self.request, event_type, instance) # Step 3: Resource Manager resource_manager.update(instance.uuid, instance=instance, notify=True)
def _update_existing_geonode_resource(self, geonode_resource: ResourceBase, defaults: typing.Dict): resource_defaults = defaults.copy() if len(resource_defaults.get("files", [])) > 0: result = resource_manager.replace(geonode_resource, vals=resource_defaults) else: result = resource_manager.update(geonode_resource.uuid, vals=resource_defaults) return result
def _create_layer(self, geonode_service, **resource_fields): # bear in mind that in ``geonode.layers.models`` there is a # ``pre_save_layer`` function handler that is connected to the # ``pre_save`` signal for the Layer model. This handler does a check # for common fields (such as abstract and title) and adds # sensible default values keywords = resource_fields.pop("keywords", []) geonode_layer = resource_manager.create( None, resource_type=Layer, defaults=dict( owner=geonode_service.owner, remote_service=geonode_service, **resource_fields ) ) resource_manager.update(geonode_layer.uuid, instance=geonode_layer, keywords=keywords, notify=True) resource_manager.set_permissions(geonode_layer.uuid, instance=geonode_layer) return geonode_layer
def update_geonode_resource( self, harvested_info: HarvestedResourceInfo, harvestable_resource: "HarvestableResource", # noqa ): """Create or update a local GeoNode resource with the input harvested information. If the underlying harvestable resource already exists as a local GeoNode resource, then it is updated. Otherwise it is created locally. If something goes wrong with the update of the geonode resource this method should raise a `RuntimeError`. This will be caught by the harvesting task and handled appropriately. """ defaults = self.get_geonode_resource_defaults(harvested_info, harvestable_resource) geonode_resource = harvestable_resource.geonode_resource if geonode_resource is None: geonode_resource_type = self.get_geonode_resource_type( harvestable_resource.remote_resource_type) geonode_resource = self._create_new_geonode_resource( geonode_resource_type, defaults) elif not geonode_resource.uuid == str( harvested_info.resource_descriptor.uuid): raise RuntimeError( f"Resource {geonode_resource!r} already exists locally but its " f"UUID ({geonode_resource.uuid}) does not match the one found on " f"the remote resource {harvested_info.resource_descriptor.uuid!r}" ) else: geonode_resource = self._update_existing_geonode_resource( geonode_resource, defaults) keywords = _consolidate_resource_keywords( harvested_info.resource_descriptor, geonode_resource, self.harvester_id) regions = harvested_info.resource_descriptor.identification.place_keywords # Make sure you set the "harvestable_resource.geonode_resource" before calling the "resource_manager" harvestable_resource.geonode_resource = geonode_resource harvestable_resource.save() geonode_resource = resource_manager.update(str( harvested_info.resource_descriptor.uuid), regions=regions, keywords=keywords) self.finalize_resource_update( geonode_resource, harvested_info, harvestable_resource, )
def create(self, validated_data): # Sanity checks if 'name' not in validated_data or \ 'owner' not in validated_data: raise ValidationError("No valid data: 'name' and 'owner' are mandatory fields!") if self.Meta.model.objects.filter(name=validated_data['name']).count(): raise ValidationError("A GeoApp with the same 'name' already exists!") # Extract users' profiles _user_profiles = {} for _key, _value in validated_data.items(): if _key in ('owner', 'poc', 'metadata_owner'): _user_profiles[_key] = _value for _key, _value in _user_profiles.items(): validated_data.pop(_key) _u = get_user_model().objects.filter(username=_value).first() if _u: validated_data[_key] = _u else: raise ValidationError(f"The specified '{_key}' does not exist!") # Extract JSON blob _data = {} if 'blob' in validated_data: _data = validated_data.pop('blob') # Create a new instance _instance = resource_manager.create( None, resource_type=self.Meta.model, defaults=validated_data) return resource_manager.update( _instance.uuid, instance=_instance, vals=dict( blob=_data ), notify=True)
def update_geonode_resource( self, harvested_info: HarvestedResourceInfo, harvestable_resource: "HarvestableResource", # noqa harvesting_session_id: int, ): """Create or update a local GeoNode resource with the input harvested information.""" harvester = models.Harvester.objects.get(pk=self.harvester_id) defaults = self.get_geonode_resource_defaults( harvested_info.resource_descriptor, harvestable_resource) geonode_resource = harvestable_resource.geonode_resource if geonode_resource is None: geonode_resource = resource_manager.create( str(harvested_info.resource_descriptor.uuid), self.get_geonode_resource_type( harvestable_resource.remote_resource_type), defaults ) else: if not geonode_resource.uuid == str(harvested_info.resource_descriptor.uuid): raise RuntimeError( f"Resource {geonode_resource!r} already exists locally but its " f"UUID ({geonode_resource.uuid}) does not match the one found on " f"the remote resource {harvested_info.resource_descriptor.uuid!r}") geonode_resource = resource_manager.update( str(harvested_info.resource_descriptor.uuid), vals=defaults) resource_manager.set_permissions( str(harvested_info.resource_descriptor.uuid), instance=geonode_resource, permissions=harvester.default_access_permissions) harvestable_resource.geonode_resource = geonode_resource harvestable_resource.save() self.finalize_resource_update( geonode_resource, harvested_info, harvestable_resource, harvesting_session_id )
def update_from_viewer(self, conf, context=None): """ Update this Map's details by parsing a JSON object as produced by a GXP Viewer. This method automatically persists to the database! """ template_name = hookset.update_from_viewer(conf, context=context) if not isinstance(context, dict): try: context = json.loads(ensure_string(context)) except Exception: pass conf = context.get("config", {}) if not isinstance(conf, dict) or isinstance(conf, bytes): try: conf = json.loads(ensure_string(conf)) except Exception: conf = {} about = conf.get("about", {}) self.title = conf.get("title", about.get("title", "")) self.abstract = conf.get("abstract", about.get("abstract", "")) _map = conf.get("map", {}) center = _map.get("center", settings.DEFAULT_MAP_CENTER) self.zoom = _map.get("zoom", settings.DEFAULT_MAP_ZOOM) if isinstance(center, dict): self.center_x = center.get('x') self.center_y = center.get('y') else: self.center_x, self.center_y = center projection = _map.get("projection", settings.DEFAULT_MAP_CRS) bbox = _map.get("bbox", None) if bbox: self.set_bounds_from_bbox(bbox, projection) else: self.set_bounds_from_center_and_zoom(self.center_x, self.center_y, self.zoom) if self.projection is None or self.projection == '': self.projection = projection if self.uuid is None or self.uuid == '': self.uuid = str(uuid.uuid1()) def source_for(layer): try: return conf["sources"][layer["source"]] except Exception: if 'url' in layer: return {'url': layer['url']} else: return {} layers = [lyr for lyr in _map.get("layers", [])] dataset_names = {lyr.alternate for lyr in self.local_datasets} self.dataset_set.all().delete() self.keywords.add(*_map.get('keywords', [])) for ordering, layer in enumerate(layers): self.dataset_set.add( dataset_from_viewer_config(self.id, MapLayer, layer, source_for(layer), ordering)) from geonode.resource.manager import resource_manager resource_manager.update(self.uuid, instance=self, notify=True) resource_manager.set_thumbnail(self.uuid, instance=self, overwrite=False) if dataset_names != {lyr.alternate for lyr in self.local_datasets}: map_changed_signal.send_robust(sender=self, what_changed='datasets') return template_name
def final_step(upload_session, user, charset="UTF-8", dataset_id=None): import_session = upload_session.import_session import_id = import_session.id _log(f'Reloading session {import_id} to check validity') try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) if Upload.objects.filter(import_id=import_id).count(): Upload.objects.filter(import_id=import_id).update(complete=False) upload = Upload.objects.filter(import_id=import_id).get() if upload.state == enumerations.STATE_RUNNING: return upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py task = import_session.tasks[0] task.set_charset(charset) # @todo see above in save_step, regarding computed unique name name = task.layer.name if dataset_id: name = Dataset.objects.get(resourcebase_ptr_id=dataset_id).name _log(f'Getting from catalog [{name}]') try: # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen gs_catalog.get_layer(name) except Exception: Upload.objects.invalidate_from_session(upload_session) raise LayerNotReady( _(f"Expected to find layer named '{name}' in geoserver")) if import_session.state == 'READY' or (import_session.state == 'PENDING' and task.state == 'READY'): import_session.commit() elif import_session.state == 'INCOMPLETE' and task.state != 'ERROR': Upload.objects.invalidate_from_session(upload_session) raise Exception(f'unknown item state: {task.state}') try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) _log(f'Creating Django record for [{name}]') target = task.target alternate = task.get_target_layer_name() dataset_uuid = None title = upload_session.dataset_title abstract = upload_session.dataset_abstract metadata_uploaded = False xml_file = upload_session.base_file[0].xml_files if xml_file: try: # get model properties from XML # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(xml_file[0], os.path.dirname(archive)) # Assign the absolute path to this file xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}" # Sanity checks if isinstance(xml_file, list): if len(xml_file) > 0: xml_file = xml_file[0] else: xml_file = None elif not isinstance(xml_file, str): xml_file = None if xml_file and os.path.exists(xml_file) and os.access( xml_file, os.R_OK): dataset_uuid, vals, regions, keywords, custom = parse_metadata( open(xml_file).read()) metadata_uploaded = True except Exception as e: Upload.objects.invalidate_from_session(upload_session) logger.error(e) raise GeoNodeException( _("Exception occurred while parsing the provided Metadata file." ), e) # look for SLD sld_file = upload_session.base_file[0].sld_files sld_uploaded = False if sld_file: # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive logger.error(f'using uploaded sld file from {archive}') zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(sld_file[0], os.path.dirname(archive), path=upload_session.tempdir) # Assign the absolute path to this file sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}" else: _sld_file = f"{os.path.dirname(upload_session.tempdir)}/{os.path.basename(sld_file[0])}" logger.error(f"copying [{sld_file[0]}] to [{_sld_file}]") try: shutil.copyfile(sld_file[0], _sld_file) sld_file = _sld_file except (IsADirectoryError, shutil.SameFileError) as e: logger.exception(e) sld_file = sld_file[0] except Exception as e: raise UploadException.from_exc(_('Error uploading Dataset'), e) sld_uploaded = True else: # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: logger.error('using provided sld file from importer') base_file = upload_session.base_file sld_file = base_file[0].sld_files[0] sld_uploaded = False logger.error(f'[sld_uploaded: {sld_uploaded}] sld_file: {sld_file}') # Make sure the layer does not exists already if dataset_uuid and Dataset.objects.filter(uuid=dataset_uuid).count(): Upload.objects.invalidate_from_session(upload_session) logger.error( "The UUID identifier from the XML Metadata is already in use in this system." ) raise GeoNodeException( _("The UUID identifier from the XML Metadata is already in use in this system." )) # Is it a regular file or an ImageMosaic? # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: saved_dataset = None has_time = has_elevation = False start = end = None if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: has_elevation = True start = datetime.datetime.strptime( upload_session.mosaic_time_value, TIME_REGEX_FORMAT[upload_session.mosaic_time_regex]) start = pytz.utc.localize(start, is_dst=False) end = start if upload_session.time and upload_session.time_info and upload_session.time_transforms: has_time = True if upload_session.append_to_mosaic_opts: try: with transaction.atomic(): saved_dataset_filter = Dataset.objects.filter( name=upload_session.append_to_mosaic_name) if not saved_dataset_filter.exists(): saved_dataset = resource_manager.create( name=upload_session.append_to_mosaic_name) created = True else: saved_dataset = saved_dataset_filter.get() created = False saved_dataset.set_dirty_state() if saved_dataset.temporal_extent_start and end: if pytz.utc.localize(saved_dataset.temporal_extent_start, is_dst=False) < end: saved_dataset.temporal_extent_end = end Dataset.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_end=end) else: saved_dataset.temporal_extent_start = end Dataset.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_start=end) except Exception as e: _log( f"There was an error updating the mosaic temporal extent: {str(e)}" ) else: try: with transaction.atomic(): saved_dataset_filter = Dataset.objects.filter( store=target.name, alternate=alternate, workspace=target.workspace_name, name=task.layer.name) if not saved_dataset_filter.exists(): saved_dataset = resource_manager.create( dataset_uuid, resource_type=Dataset, defaults=dict( store=target.name, subtype=get_dataset_storetype(target.store_type), alternate=alternate, workspace=target.workspace_name, title=title, name=task.layer.name, abstract=abstract or _('No abstract provided'), owner=user, temporal_extent_start=start, temporal_extent_end=end, is_mosaic=has_elevation, has_time=has_time, has_elevation=has_elevation, time_regex=upload_session.mosaic_time_regex)) created = True else: saved_dataset = saved_dataset_filter.get() created = False saved_dataset.set_dirty_state() except Exception as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc(_('Error configuring Dataset'), e) assert saved_dataset if not created: return saved_dataset try: saved_dataset.set_dirty_state() with transaction.atomic(): Upload.objects.update_from_session(upload_session, resource=saved_dataset) # Set default permissions on the newly created layer and send notifications permissions = upload_session.permissions # Finalize Upload resource_manager.set_permissions(None, instance=saved_dataset, permissions=permissions, created=created) resource_manager.update(None, instance=saved_dataset, xml_file=xml_file, metadata_uploaded=metadata_uploaded) resource_manager.exec('set_style', None, instance=saved_dataset, sld_uploaded=sld_uploaded, sld_file=sld_file, tempdir=upload_session.tempdir) resource_manager.exec('set_time_info', None, instance=saved_dataset, time_info=upload_session.time_info) resource_manager.set_thumbnail(None, instance=saved_dataset) saved_dataset.set_processing_state(enumerations.STATE_PROCESSED) except Exception as e: saved_dataset.set_processing_state(enumerations.STATE_INVALID) raise GeoNodeException(e) finally: saved_dataset.clear_dirty_state() try: logger.debug( f"... Cleaning up the temporary folders {upload_session.tempdir}") if upload_session.tempdir and os.path.exists(upload_session.tempdir): shutil.rmtree(upload_session.tempdir) except Exception as e: logger.warning(e) finally: Upload.objects.filter(import_id=import_id).update(complete=True) return saved_dataset
def dataset_metadata(request, layername, template='datasets/dataset_metadata.html', ajax=True): try: layer = _resolve_dataset(request, layername, 'base.change_resourcebase_metadata', _PERMISSION_MSG_METADATA) except PermissionDenied as e: return HttpResponse(Exception(_("Not allowed"), e), status=403) except Exception as e: raise Http404(Exception(_("Not found"), e)) if not layer: raise Http404(_("Not found")) dataset_attribute_set = inlineformset_factory( Dataset, Attribute, extra=0, form=LayerAttributeForm, ) current_keywords = [keyword.name for keyword in layer.keywords.all()] topic_category = layer.category topic_thesaurus = layer.tkeywords.all() # Add metadata_author or poc if missing layer.add_missing_metadata_author_or_poc() poc = layer.poc metadata_author = layer.metadata_author # assert False, str(dataset_bbox) config = layer.attribute_config() # Add required parameters for GXP lazy-loading dataset_bbox = layer.bbox bbox = [float(coord) for coord in list(dataset_bbox[0:4])] if hasattr(layer, 'srid'): config['crs'] = {'type': 'name', 'properties': layer.srid} config["srs"] = getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857') config["bbox"] = bbox if config["srs"] != 'EPSG:3857' \ else llbbox_to_mercator([float(coord) for coord in bbox]) config["title"] = layer.title config["queryable"] = True # Update count for popularity ranking, # but do not includes admins or resource owners if request.user != layer.owner and not request.user.is_superuser: Dataset.objects.filter(id=layer.id).update( popular_count=F('popular_count') + 1) if request.method == "POST": if layer.metadata_uploaded_preserve: # layer metadata cannot be edited out = { 'success': False, 'errors': METADATA_UPLOADED_PRESERVE_ERROR } return HttpResponse(json.dumps(out), content_type='application/json', status=400) thumbnail_url = layer.thumbnail_url dataset_form = DatasetForm(request.POST, instance=layer, prefix="resource", user=request.user) if not dataset_form.is_valid(): logger.error( f"Dataset Metadata form is not valid: {dataset_form.errors}") out = { 'success': False, 'errors': [ f"{x}: {y[0].messages[0]}" for x, y in dataset_form.errors.as_data().items() ] } return HttpResponse(json.dumps(out), content_type='application/json', status=400) if not layer.thumbnail_url: layer.thumbnail_url = thumbnail_url attribute_form = dataset_attribute_set( request.POST, instance=layer, prefix="dataset_attribute_set", queryset=Attribute.objects.order_by('display_order')) if not attribute_form.is_valid(): logger.error( f"Dataset Attributes form is not valid: {attribute_form.errors}" ) out = { 'success': False, "errors": [ re.sub(re.compile('<.*?>'), '', str(err)) for err in attribute_form.errors ] } return HttpResponse(json.dumps(out), content_type='application/json', status=400) category_form = CategoryForm( request.POST, prefix="category_choice_field", initial=int(request.POST["category_choice_field"]) if "category_choice_field" in request.POST and request.POST["category_choice_field"] else None) if not category_form.is_valid(): logger.error( f"Dataset Category form is not valid: {category_form.errors}") out = { 'success': False, 'errors': [ re.sub(re.compile('<.*?>'), '', str(err)) for err in category_form.errors ] } return HttpResponse(json.dumps(out), content_type='application/json', status=400) if hasattr(settings, 'THESAURUS'): tkeywords_form = TKeywordForm(request.POST) else: tkeywords_form = ThesaurusAvailableForm(request.POST, prefix='tkeywords') # set initial values for thesaurus form if not tkeywords_form.is_valid(): logger.error( f"Dataset Thesauri Keywords form is not valid: {tkeywords_form.errors}" ) out = { 'success': False, 'errors': [ re.sub(re.compile('<.*?>'), '', str(err)) for err in tkeywords_form.errors ] } return HttpResponse(json.dumps(out), content_type='application/json', status=400) else: dataset_form = DatasetForm(instance=layer, prefix="resource", user=request.user) dataset_form.disable_keywords_widget_for_non_superuser(request.user) attribute_form = dataset_attribute_set( instance=layer, prefix="dataset_attribute_set", queryset=Attribute.objects.order_by('display_order')) category_form = CategoryForm( prefix="category_choice_field", initial=topic_category.id if topic_category else None) # Create THESAURUS widgets lang = settings.THESAURUS_DEFAULT_LANG if hasattr( settings, 'THESAURUS_DEFAULT_LANG') else 'en' if hasattr(settings, 'THESAURUS') and settings.THESAURUS: warnings.warn( 'The settings for Thesaurus has been moved to Model, \ this feature will be removed in next releases', DeprecationWarning) dataset_tkeywords = layer.tkeywords.all() tkeywords_list = '' if dataset_tkeywords and len(dataset_tkeywords) > 0: tkeywords_ids = dataset_tkeywords.values_list('id', flat=True) if hasattr(settings, 'THESAURUS') and settings.THESAURUS: el = settings.THESAURUS thesaurus_name = el['name'] try: t = Thesaurus.objects.get(identifier=thesaurus_name) for tk in t.thesaurus.filter(pk__in=tkeywords_ids): tkl = tk.keyword.filter(lang=lang) if len(tkl) > 0: tkl_ids = ",".join( map(str, tkl.values_list('id', flat=True))) tkeywords_list += f",{tkl_ids}" if len( tkeywords_list) > 0 else tkl_ids except Exception: tb = traceback.format_exc() logger.error(tb) tkeywords_form = TKeywordForm(instance=layer) else: tkeywords_form = ThesaurusAvailableForm(prefix='tkeywords') # set initial values for thesaurus form for tid in tkeywords_form.fields: values = [] values = [ keyword.id for keyword in topic_thesaurus if int(tid) == keyword.thesaurus.id ] tkeywords_form.fields[tid].initial = values if request.method == "POST" and dataset_form.is_valid( ) and attribute_form.is_valid() and category_form.is_valid( ) and tkeywords_form.is_valid(): new_poc = dataset_form.cleaned_data['poc'] new_author = dataset_form.cleaned_data['metadata_author'] if new_poc is None: if poc is None: poc_form = ProfileForm(request.POST, prefix="poc", instance=poc) else: poc_form = ProfileForm(request.POST, prefix="poc") if poc_form.is_valid(): if len(poc_form.cleaned_data['profile']) == 0: # FIXME use form.add_error in django > 1.7 errors = poc_form._errors.setdefault( 'profile', ErrorList()) errors.append( _('You must set a point of contact for this resource')) poc = None if poc_form.has_changed and poc_form.is_valid(): new_poc = poc_form.save() if new_author is None: if metadata_author is None: author_form = ProfileForm(request.POST, prefix="author", instance=metadata_author) else: author_form = ProfileForm(request.POST, prefix="author") if author_form.is_valid(): if len(author_form.cleaned_data['profile']) == 0: # FIXME use form.add_error in django > 1.7 errors = author_form._errors.setdefault( 'profile', ErrorList()) errors.append( _('You must set an author for this resource')) metadata_author = None if author_form.has_changed and author_form.is_valid(): new_author = author_form.save() new_category = None if category_form and 'category_choice_field' in category_form.cleaned_data and\ category_form.cleaned_data['category_choice_field']: new_category = TopicCategory.objects.get( id=int(category_form.cleaned_data['category_choice_field'])) for form in attribute_form.cleaned_data: la = Attribute.objects.get(id=int(form['id'].id)) la.description = form["description"] la.attribute_label = form["attribute_label"] la.visible = form["visible"] la.display_order = form["display_order"] la.featureinfo_type = form["featureinfo_type"] la.save() if new_poc is not None or new_author is not None: if new_poc is not None: layer.poc = new_poc if new_author is not None: layer.metadata_author = new_author new_keywords = current_keywords if request.keyword_readonly else dataset_form.cleaned_data[ 'keywords'] new_regions = [x.strip() for x in dataset_form.cleaned_data['regions']] layer.keywords.clear() if new_keywords: layer.keywords.add(*new_keywords) layer.regions.clear() if new_regions: layer.regions.add(*new_regions) layer.category = new_category from geonode.upload.models import Upload up_sessions = Upload.objects.filter( resource_id=layer.resourcebase_ptr_id) if up_sessions.count() > 0 and up_sessions[0].user != layer.owner: up_sessions.update(user=layer.owner) register_event(request, EventType.EVENT_CHANGE_METADATA, layer) if not ajax: return HttpResponseRedirect(layer.get_absolute_url()) message = layer.alternate try: if not tkeywords_form.is_valid(): return HttpResponse( json.dumps({'message': "Invalid thesaurus keywords"}, status_code=400)) thesaurus_setting = getattr(settings, 'THESAURUS', None) if thesaurus_setting: tkeywords_data = tkeywords_form.cleaned_data['tkeywords'] tkeywords_data = tkeywords_data.filter( thesaurus__identifier=thesaurus_setting['name']) layer.tkeywords.set(tkeywords_data) elif Thesaurus.objects.all().exists(): fields = tkeywords_form.cleaned_data layer.tkeywords.set(tkeywords_form.cleanx(fields)) except Exception: tb = traceback.format_exc() logger.error(tb) resource_manager.update( layer.uuid, instance=layer, notify=True, extra_metadata=json.loads( dataset_form.cleaned_data['extra_metadata'])) return HttpResponse(json.dumps({'message': message})) if settings.ADMIN_MODERATE_UPLOADS: if not request.user.is_superuser: can_change_metadata = request.user.has_perm( 'change_resourcebase_metadata', layer.get_self_resource()) try: is_manager = request.user.groupmember_set.all().filter( role='manager').exists() except Exception: is_manager = False if not is_manager or not can_change_metadata: if settings.RESOURCE_PUBLISHING: dataset_form.fields['is_published'].widget.attrs.update( {'disabled': 'true'}) dataset_form.fields['is_approved'].widget.attrs.update( {'disabled': 'true'}) if poc is not None: dataset_form.fields['poc'].initial = poc.id poc_form = ProfileForm(prefix="poc") poc_form.hidden = True else: poc_form = ProfileForm(prefix="poc") poc_form.hidden = False if metadata_author is not None: dataset_form.fields['metadata_author'].initial = metadata_author.id author_form = ProfileForm(prefix="author") author_form.hidden = True else: author_form = ProfileForm(prefix="author") author_form.hidden = False metadata_author_groups = get_user_visible_groups(request.user) register_event(request, 'view_metadata', layer) return render( request, template, context={ "resource": layer, "dataset": layer, "dataset_form": dataset_form, "poc_form": poc_form, "author_form": author_form, "attribute_form": attribute_form, "category_form": category_form, "tkeywords_form": tkeywords_form, "preview": getattr(settings, 'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY', 'mapstore'), "crs": getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857'), "metadataxsl": getattr(settings, 'GEONODE_CATALOGUE_METADATA_XSL', True), "freetext_readonly": getattr(settings, 'FREETEXT_KEYWORDS_READONLY', False), "metadata_author_groups": metadata_author_groups, "TOPICCATEGORY_MANDATORY": getattr(settings, 'TOPICCATEGORY_MANDATORY', False), "GROUP_MANDATORY_RESOURCES": getattr(settings, 'GROUP_MANDATORY_RESOURCES', False), "UI_MANDATORY_FIELDS": list( set(getattr(settings, 'UI_DEFAULT_MANDATORY_FIELDS', [])) | set(getattr(settings, 'UI_REQUIRED_FIELDS', []))) })
def document_metadata(request, docid, template='documents/document_metadata.html', ajax=True): document = None try: document = _resolve_document(request, docid, 'base.change_resourcebase_metadata', _PERMISSION_MSG_METADATA) except PermissionDenied: return HttpResponse(_("Not allowed"), status=403) except Exception: raise Http404(_("Not found")) if not document: raise Http404(_("Not found")) # Add metadata_author or poc if missing document.add_missing_metadata_author_or_poc() poc = document.poc metadata_author = document.metadata_author topic_category = document.category current_keywords = [keyword.name for keyword in document.keywords.all()] if request.method == "POST": document_form = DocumentForm(request.POST, instance=document, prefix="resource") category_form = CategoryForm( request.POST, prefix="category_choice_field", initial=int(request.POST["category_choice_field"]) if "category_choice_field" in request.POST and request.POST["category_choice_field"] else None) if hasattr(settings, 'THESAURUS'): tkeywords_form = TKeywordForm(request.POST) else: tkeywords_form = ThesaurusAvailableForm(request.POST, prefix='tkeywords') else: document_form = DocumentForm(instance=document, prefix="resource") document_form.disable_keywords_widget_for_non_superuser(request.user) category_form = CategoryForm( prefix="category_choice_field", initial=topic_category.id if topic_category else None) # Keywords from THESAURUS management doc_tkeywords = document.tkeywords.all() if hasattr(settings, 'THESAURUS') and settings.THESAURUS: warnings.warn( 'The settings for Thesaurus has been moved to Model, \ this feature will be removed in next releases', DeprecationWarning) tkeywords_list = '' lang = 'en' # TODO: use user's language if doc_tkeywords and len(doc_tkeywords) > 0: tkeywords_ids = doc_tkeywords.values_list('id', flat=True) if hasattr(settings, 'THESAURUS') and settings.THESAURUS: el = settings.THESAURUS thesaurus_name = el['name'] try: t = Thesaurus.objects.get(identifier=thesaurus_name) for tk in t.thesaurus.filter(pk__in=tkeywords_ids): tkl = tk.keyword.filter(lang=lang) if len(tkl) > 0: tkl_ids = ",".join( map(str, tkl.values_list('id', flat=True))) tkeywords_list += f",{tkl_ids}" if len( tkeywords_list) > 0 else tkl_ids except Exception: tb = traceback.format_exc() logger.error(tb) tkeywords_form = TKeywordForm(instance=document) else: tkeywords_form = ThesaurusAvailableForm(prefix='tkeywords') # set initial values for thesaurus form for tid in tkeywords_form.fields: values = [] values = [ keyword.id for keyword in doc_tkeywords if int(tid) == keyword.thesaurus.id ] tkeywords_form.fields[tid].initial = values if request.method == "POST" and document_form.is_valid( ) and category_form.is_valid() and tkeywords_form.is_valid(): new_poc = document_form.cleaned_data['poc'] new_author = document_form.cleaned_data['metadata_author'] new_keywords = current_keywords if request.keyword_readonly else document_form.cleaned_data[ 'keywords'] new_regions = document_form.cleaned_data['regions'] new_category = None if category_form and 'category_choice_field' in category_form.cleaned_data and \ category_form.cleaned_data['category_choice_field']: new_category = TopicCategory.objects.get( id=int(category_form.cleaned_data['category_choice_field'])) if new_poc is None: if poc is None: poc_form = ProfileForm(request.POST, prefix="poc", instance=poc) else: poc_form = ProfileForm(request.POST, prefix="poc") if poc_form.is_valid(): if len(poc_form.cleaned_data['profile']) == 0: # FIXME use form.add_error in django > 1.7 errors = poc_form._errors.setdefault( 'profile', ErrorList()) errors.append( _('You must set a point of contact for this resource')) if poc_form.has_changed and poc_form.is_valid(): new_poc = poc_form.save() if new_author is None: if metadata_author is None: author_form = ProfileForm(request.POST, prefix="author", instance=metadata_author) else: author_form = ProfileForm(request.POST, prefix="author") if author_form.is_valid(): if len(author_form.cleaned_data['profile']) == 0: # FIXME use form.add_error in django > 1.7 errors = author_form._errors.setdefault( 'profile', ErrorList()) errors.append( _('You must set an author for this resource')) if author_form.has_changed and author_form.is_valid(): new_author = author_form.save() document = document_form.instance resource_manager.update(document.uuid, instance=document, keywords=new_keywords, regions=new_regions, vals=dict(poc=new_poc or document.poc, metadata_author=new_author or document.metadata_author, category=new_category), notify=True) resource_manager.set_thumbnail(document.uuid, instance=document, overwrite=False) document_form.save_many2many() register_event(request, EventType.EVENT_CHANGE_METADATA, document) url = hookset.document_detail_url(document) if not ajax: return HttpResponseRedirect(url) message = document.id try: # Keywords from THESAURUS management # Rewritten to work with updated autocomplete if not tkeywords_form.is_valid(): return HttpResponse( json.dumps({'message': "Invalid thesaurus keywords"}, status_code=400)) thesaurus_setting = getattr(settings, 'THESAURUS', None) if thesaurus_setting: tkeywords_data = tkeywords_form.cleaned_data['tkeywords'] tkeywords_data = tkeywords_data.filter( thesaurus__identifier=thesaurus_setting['name']) document.tkeywords.set(tkeywords_data) elif Thesaurus.objects.all().exists(): fields = tkeywords_form.cleaned_data document.tkeywords.set(tkeywords_form.cleanx(fields)) except Exception: tb = traceback.format_exc() logger.error(tb) return HttpResponse(json.dumps({'message': message})) # - POST Request Ends here - # Request.GET if poc is not None: document_form.fields['poc'].initial = poc.id poc_form = ProfileForm(prefix="poc") poc_form.hidden = True if metadata_author is not None: document_form.fields['metadata_author'].initial = metadata_author.id author_form = ProfileForm(prefix="author") author_form.hidden = True metadata_author_groups = get_user_visible_groups(request.user) if settings.ADMIN_MODERATE_UPLOADS: if not request.user.is_superuser: can_change_metadata = request.user.has_perm( 'change_resourcebase_metadata', document.get_self_resource()) try: is_manager = request.user.groupmember_set.all().filter( role='manager').exists() except Exception: is_manager = False if not is_manager or not can_change_metadata: if settings.RESOURCE_PUBLISHING: document_form.fields['is_published'].widget.attrs.update( {'disabled': 'true'}) document_form.fields['is_approved'].widget.attrs.update( {'disabled': 'true'}) register_event(request, EventType.EVENT_VIEW_METADATA, document) return render( request, template, context={ "resource": document, "document": document, "document_form": document_form, "poc_form": poc_form, "author_form": author_form, "category_form": category_form, "tkeywords_form": tkeywords_form, "metadata_author_groups": metadata_author_groups, "TOPICCATEGORY_MANDATORY": getattr(settings, 'TOPICCATEGORY_MANDATORY', False), "GROUP_MANDATORY_RESOURCES": getattr(settings, 'GROUP_MANDATORY_RESOURCES', False), "UI_MANDATORY_FIELDS": list( set(getattr(settings, 'UI_DEFAULT_MANDATORY_FIELDS', [])) | set(getattr(settings, 'UI_REQUIRED_FIELDS', []))) })
def form_valid(self, form): """ If the form is valid, save the associated model. """ doc_form = form.cleaned_data file = doc_form.pop('doc_file', None) if file: tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT) dirname = os.path.basename(tempdir) filepath = storage_manager.save(f"{dirname}/{file.name}", file) storage_path = storage_manager.path(filepath) self.object = resource_manager.create( None, resource_type=Document, defaults=dict(owner=self.request.user, doc_url=doc_form.pop('doc_url', None), title=doc_form.pop('title', file.name), files=[storage_path])) if tempdir != os.path.dirname(storage_path): shutil.rmtree(tempdir, ignore_errors=True) else: self.object = resource_manager.create( None, resource_type=Document, defaults=dict(owner=self.request.user, doc_url=doc_form.pop('doc_url', None), title=doc_form.pop('title', None))) if settings.ADMIN_MODERATE_UPLOADS: self.object.is_approved = False if settings.RESOURCE_PUBLISHING: self.object.is_published = False resource_manager.set_permissions( None, instance=self.object, permissions=form.cleaned_data["permissions"], created=True) abstract = None date = None regions = [] keywords = [] bbox = None url = hookset.document_detail_url(self.object) out = {'success': False} if getattr(settings, 'EXIF_ENABLED', False): try: from geonode.documents.exif.utils import exif_extract_metadata_doc exif_metadata = exif_extract_metadata_doc(self.object) if exif_metadata: date = exif_metadata.get('date', None) keywords.extend(exif_metadata.get('keywords', [])) bbox = exif_metadata.get('bbox', None) abstract = exif_metadata.get('abstract', None) except Exception: logger.debug("Exif extraction failed.") resource_manager.update( self.object.uuid, instance=self.object, keywords=keywords, regions=regions, vals=dict(abstract=abstract, date=date, date_type="Creation", bbox_polygon=BBOXHelper.from_xy(bbox).as_polygon() if bbox else None), notify=True) resource_manager.set_thumbnail(self.object.uuid, instance=self.object, overwrite=False) register_event(self.request, EventType.EVENT_UPLOAD, self.object) if self.request.GET.get('no__redirect', False): out['success'] = True out['url'] = url if out['success']: status_code = 200 else: status_code = 400 return HttpResponse(json.dumps(out), content_type='application/json', status=status_code) else: return HttpResponseRedirect(url)
def geoapp_edit(request, geoappid, template='apps/app_edit.html'): """ The view that returns the app composer opened to the app with the given app ID. """ try: geoapp_obj = _resolve_geoapp(request, geoappid, 'base.view_resourcebase', _PERMISSION_MSG_VIEW) except PermissionDenied: return HttpResponse(_("Not allowed"), status=403) except Exception: raise Http404(_("Not found")) if not geoapp_obj: raise Http404(_("Not found")) # Call this first in order to be sure "perms_list" is correct permissions_json = _perms_info_json(geoapp_obj) perms_list = list(geoapp_obj.get_self_resource().get_user_perms( request.user).union(geoapp_obj.get_user_perms(request.user))) group = None if geoapp_obj.group: try: group = GroupProfile.objects.get(slug=geoapp_obj.group.name) except GroupProfile.DoesNotExist: group = None r = geoapp_obj if request.method in ('POST', 'PATCH', 'PUT'): r = resource_manager.update(geoapp_obj.uuid, instance=geoapp_obj, notify=True) resource_manager.set_permissions( geoapp_obj.uuid, instance=geoapp_obj, permissions=ast.literal_eval(permissions_json)) resource_manager.set_thumbnail(geoapp_obj.uuid, instance=geoapp_obj, overwrite=False) access_token = None if request and request.user: access_token = get_or_create_token(request.user) if access_token and not access_token.is_expired(): access_token = access_token.token else: access_token = None _config = json.dumps(r.blob) _ctx = { 'appId': geoappid, 'appType': geoapp_obj.resource_type, 'config': _config, 'user': request.user, 'access_token': access_token, 'resource': geoapp_obj, 'group': group, 'perms_list': perms_list, "permissions_json": permissions_json, 'preview': getattr(settings, 'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY', 'mapstore') } return render(request, template, context=_ctx)