def test_moderated_upload(self): """ Test if moderation flag works """ with self.settings(ADMIN_MODERATE_UPLOADS=False): # first create a map map_created = resource_manager.create(None, resource_type=Map, defaults=dict(owner=self.u)) resource_manager.set_permissions(None, instance=map_created, permissions=None, created=True) self.assertTrue(map_created.is_approved) self.assertTrue(map_created.is_published) with self.settings(ADMIN_MODERATE_UPLOADS=True): # first create a map map_created = resource_manager.create(None, resource_type=Map, defaults=dict(owner=self.u)) resource_manager.set_permissions(None, instance=map_created, permissions=None, created=True) self.assertFalse(map_created.is_approved) self.assertTrue(map_created.is_published)
def _create_dataset(self, geonode_service, **resource_fields): # bear in mind that in ``geonode.layers.models`` there is a # ``pre_save_dataset`` function handler that is connected to the # ``pre_save`` signal for the Dataset model. This handler does a check # for common fields (such as abstract and title) and adds # sensible default values keywords = resource_fields.pop("keywords", []) defaults = dict(owner=geonode_service.owner, remote_service=geonode_service, remote_typename=geonode_service.name, sourcetype=base_enumerations.SOURCE_TYPE_REMOTE, ptype=getattr(geonode_service, "ptype", "gxp_wmscsource"), **resource_fields) if geonode_service.method == INDEXED: defaults['ows_url'] = geonode_service.service_url geonode_dataset = resource_manager.create(None, resource_type=Dataset, defaults=defaults) resource_manager.update(geonode_dataset.uuid, instance=geonode_dataset, keywords=keywords, notify=True) resource_manager.set_permissions(geonode_dataset.uuid, instance=geonode_dataset) return geonode_dataset
def create_gn_dataset(workspace, datastore, name, title, owner_name): """ Associate a layer in GeoNode for a given layer in GeoServer. """ owner = get_user_model().objects.get(username=owner_name) layer = resource_manager.create( str(uuid.uuid4()), resource_type=Dataset, defaults=dict( name=name, workspace=workspace.name, store=datastore.name, subtype='vector', alternate=f'{workspace.name}:{name}', title=title, owner=owner, srid='EPSG:4326', bbox_polygon=Polygon.from_bbox(BBOX), ll_bbox_polygon=Polygon.from_bbox(BBOX), data_quality_statement=DATA_QUALITY_MESSAGE )) to_update = {} if settings.ADMIN_MODERATE_UPLOADS: to_update['is_approved'] = to_update['was_approved'] = False if settings.RESOURCE_PUBLISHING: to_update['is_published'] = to_update['was_published'] = False resource_manager.update(layer.uuid, instance=layer, vals=to_update) resource_manager.set_thumbnail(None, instance=layer) return layer
def _create_and_update(self, validated_data, instance=None): # Extract JSON blob _data = {} if 'blob' in validated_data: _data = validated_data.pop('blob') # Create a new instance if not instance: _instance = resource_manager.create(None, resource_type=self.Meta.model, defaults=validated_data) else: _instance = instance try: self.Meta.model.objects.filter(pk=_instance.id).update( **validated_data) _instance.refresh_from_db() except Exception as e: raise ValidationError(e) # Let's finalize the instance and notify the users validated_data['blob'] = _data return resource_manager.update(_instance.uuid, instance=_instance, vals=validated_data, notify=True)
def test_ajax_document_permissions(self, create_thumb): """Verify that the ajax_document_permissions view is behaving as expected """ create_thumb.return_value = True # Setup some document names to work with f = [f"{settings.MEDIA_ROOT}/img.gif"] superuser = get_user_model().objects.get(pk=2) document = resource_manager.create(None, resource_type=Document, defaults=dict(files=f, owner=superuser, title='theimg', is_approved=True)) document_id = document.id invalid_document_id = 20 # Test that an invalid document is handled for properly response = self.client.post(reverse('resource_permissions', args=(invalid_document_id, )), data=json.dumps(self.perm_spec), content_type="application/json") self.assertEqual(response.status_code, 401) # Test that GET returns permissions response = self.client.get( reverse('resource_permissions', args=(document_id, ))) assert ('permissions' in ensure_string(response.content)) # Test that a user is required to have # documents.change_dataset_permissions # First test un-authenticated response = self.client.post(reverse('resource_permissions', args=(document_id, )), data=json.dumps(self.perm_spec), content_type="application/json") self.assertEqual(response.status_code, 401) # Next Test with a user that does NOT have the proper perms logged_in = self.client.login(username='******', password='******') self.assertEqual(logged_in, True) response = self.client.post(reverse('resource_permissions', args=(document_id, )), data=json.dumps(self.perm_spec), content_type="application/json") self.assertEqual(response.status_code, 401) # Login as a user with the proper permission and test the endpoint logged_in = self.client.login(username='******', password='******') self.assertEqual(logged_in, True) response = self.client.post(reverse('resource_permissions', args=(document_id, )), data=json.dumps(self.perm_spec), content_type="application/json") # Test that the method returns 200 self.assertEqual(response.status_code, 200)
def setUp(self): super().setUp() self.bobby = get_user_model().objects.get(username='******') self.geo_app = resource_manager.create( None, resource_type=GeoApp, defaults=dict( title="Testing GeoApp", owner=self.bobby, blob='{"test_data": {"test": ["test_1","test_2","test_3"]}}'))
def test_generate_thumbnail_name_document(self): doc = resource_manager.create( None, resource_type=Document, defaults=dict( doc_url="http://geonode.org/map.pdf", owner=get_user_model().objects.get(username='******'), title="Test doc", )) name = thumbnails._generate_thumbnail_name(doc) self.assertIsNotNone( re.match(f"document-{self.re_uuid}-thumb.png", name, re.I), "Document name should meet a provided pattern" )
def test_create_document_url(self): """Tests creating an external document instead of a file.""" superuser = get_user_model().objects.get(pk=2) c = resource_manager.create(None, resource_type=Document, defaults=dict( doc_url="http://geonode.org/map.pdf", owner=superuser, title="GeoNode Map", )) doc = Document.objects.get(pk=c.id) self.assertEqual(doc.title, "GeoNode Map") self.assertEqual(doc.extension, "pdf")
def setUp(self): self.not_admin = get_user_model().objects.create(username='******', is_active=True) self.not_admin.set_password('very-secret') self.not_admin.save() self.files = [f"{settings.MEDIA_ROOT}/img.gif"] self.test_doc = resource_manager.create(None, resource_type=Document, defaults=dict( files=self.files, owner=self.not_admin, title='test', is_approved=True)) self.perm_spec = {"users": {"AnonymousUser": []}} self.doc_link_url = reverse('document_link', args=(self.test_doc.pk, ))
def test_generate_thumbnail_name_geoapp(self): geo_app = resource_manager.create( None, resource_type=GeoApp, defaults=dict( title="Test GeoApp", owner=get_user_model().objects.get(username='******'), blob='{"test_data": {"test": ["test_1","test_2","test_3"]}}' ) ) name = thumbnails._generate_thumbnail_name(geo_app) self.assertIsNotNone( re.match(f"geoapp-{self.re_uuid}-thumb.png", name, re.I), "GeoApp name should meet a provided pattern" )
def setUp(self): super().setUp() self.bobby = get_user_model().objects.get(username='******') self.geo_app = resource_manager.create( None, resource_type=GeoApp, defaults=dict( title="Testing GeoApp", owner=self.bobby, blob='{"test_data": {"test": ["test_1","test_2","test_3"]}}')) self.user = get_user_model().objects.get(username='******') self.geoapp = GeoApp.objects.create(name="name", title="geoapp_titlte", thumbnail_url='initial', owner=self.user) self.sut = GeoAppForm
def update_geonode_resource( self, harvested_info: HarvestedResourceInfo, harvestable_resource: "HarvestableResource", # noqa harvesting_session_id: int, ): """Create or update a local GeoNode resource with the input harvested information.""" defaults = self.get_geonode_resource_defaults( harvested_info, harvestable_resource) geonode_resource = harvestable_resource.geonode_resource if geonode_resource is None: geonode_resource = resource_manager.create( str(harvested_info.resource_descriptor.uuid), self.get_geonode_resource_type( harvestable_resource.remote_resource_type), defaults ) else: if not geonode_resource.uuid == str(harvested_info.resource_descriptor.uuid): raise RuntimeError( f"Resource {geonode_resource!r} already exists locally but its " f"UUID ({geonode_resource.uuid}) does not match the one found on " f"the remote resource {harvested_info.resource_descriptor.uuid!r}") geonode_resource = resource_manager.update( str(harvested_info.resource_descriptor.uuid), vals=defaults) keywords = list( harvested_info.resource_descriptor.identification.other_keywords ) + geonode_resource.keyword_list() harvester = models.Harvester.objects.get(pk=self.harvester_id) keywords.append( harvester.name.lower().replace( 'harvester ', '').replace( 'harvester_', '').replace( 'harvester', '').strip() ) regions = harvested_info.resource_descriptor.identification.place_keywords resource_manager.update( str(harvested_info.resource_descriptor.uuid), regions=regions, keywords=list(set(keywords))) harvestable_resource.geonode_resource = geonode_resource harvestable_resource.save() self.finalize_resource_update( geonode_resource, harvested_info, harvestable_resource, harvesting_session_id )
def new_map_json(request): if request.method == 'GET': map_obj, config = new_map_config(request) if isinstance(config, HttpResponse): return config else: return HttpResponse(config) elif request.method == 'POST': if not request.user.is_authenticated: return HttpResponse( 'You must be logged in to save new maps', content_type="text/plain", status=401 ) map_obj = resource_manager.create( None, resource_type=Map, defaults=dict( zoom=0, center_x=0, center_y=0, owner=request.user ) ) resource_manager.set_permissions(None, instance=map_obj, permissions=None, created=True) # If the body has been read already, use an empty string. # See https://github.com/django/django/commit/58d555caf527d6f1bdfeab14527484e4cca68648 # for a better exception to catch when we move to Django 1.7. try: body = request.body except Exception: body = '' try: map_obj.update_from_viewer(body, context={'request': request, 'mapId': map_obj.id, 'map': map_obj}) except ValueError as e: return HttpResponse(str(e), status=400) else: register_event(request, EventType.EVENT_UPLOAD, map_obj) return HttpResponse( json.dumps({'id': map_obj.id}), status=200, content_type='application/json' ) else: return HttpResponse(status=405)
def test_document_link_with_permissions(self): self.test_doc.set_permissions(self.perm_spec) # Get link as Anonymous user response = self.client.get(self.doc_link_url) self.assertEqual(response.status_code, 401) # Access resource with user logged-in self.client.login(username=self.not_admin.username, password='******') response = self.client.get(self.doc_link_url) self.assertEqual(response.status_code, 404) # test document link with external url doc = resource_manager.create(None, resource_type=Document, defaults=dict( doc_url="http://geonode.org/map.pdf", owner=self.not_admin, title="GeoNode Map Doc", )) self.assertEqual(doc.href, 'http://geonode.org/map.pdf')
def _create_layer(self, geonode_service, **resource_fields): # bear in mind that in ``geonode.layers.models`` there is a # ``pre_save_layer`` function handler that is connected to the # ``pre_save`` signal for the Layer model. This handler does a check # for common fields (such as abstract and title) and adds # sensible default values keywords = resource_fields.pop("keywords", []) geonode_layer = resource_manager.create( None, resource_type=Layer, defaults=dict( owner=geonode_service.owner, remote_service=geonode_service, **resource_fields ) ) resource_manager.update(geonode_layer.uuid, instance=geonode_layer, keywords=keywords, notify=True) resource_manager.set_permissions(geonode_layer.uuid, instance=geonode_layer) return geonode_layer
def _create_new_geonode_resource(self, geonode_resource_type, defaults: typing.Dict): logger.debug( f"Creating a new GeoNode resource for resource with uuid: {defaults['uuid']!r}..." ) resource_defaults = defaults.copy() resource_files = resource_defaults.pop("files", []) if len(resource_files) > 0: logger.debug("calling resource_manager.ingest...") geonode_resource = resource_manager.ingest( resource_files, uuid=resource_defaults["uuid"], resource_type=geonode_resource_type, defaults=resource_defaults, importer_session_opts={"name": resource_defaults["uuid"]}, ) else: logger.debug("calling resource_manager.create...") geonode_resource = resource_manager.create( resource_defaults["uuid"], geonode_resource_type, defaults) return geonode_resource
def create(self, validated_data): # Sanity checks if 'name' not in validated_data or \ 'owner' not in validated_data: raise ValidationError("No valid data: 'name' and 'owner' are mandatory fields!") if self.Meta.model.objects.filter(name=validated_data['name']).count(): raise ValidationError("A GeoApp with the same 'name' already exists!") # Extract users' profiles _user_profiles = {} for _key, _value in validated_data.items(): if _key in ('owner', 'poc', 'metadata_owner'): _user_profiles[_key] = _value for _key, _value in _user_profiles.items(): validated_data.pop(_key) _u = get_user_model().objects.filter(username=_value).first() if _u: validated_data[_key] = _u else: raise ValidationError(f"The specified '{_key}' does not exist!") # Extract JSON blob _data = {} if 'blob' in validated_data: _data = validated_data.pop('blob') # Create a new instance _instance = resource_manager.create( None, resource_type=self.Meta.model, defaults=validated_data) return resource_manager.update( _instance.uuid, instance=_instance, vals=dict( blob=_data ), notify=True)
def update_geonode_resource( self, harvested_info: HarvestedResourceInfo, harvestable_resource: "HarvestableResource", # noqa harvesting_session_id: int, ): """Create or update a local GeoNode resource with the input harvested information.""" harvester = models.Harvester.objects.get(pk=self.harvester_id) defaults = self.get_geonode_resource_defaults( harvested_info.resource_descriptor, harvestable_resource) geonode_resource = harvestable_resource.geonode_resource if geonode_resource is None: geonode_resource = resource_manager.create( str(harvested_info.resource_descriptor.uuid), self.get_geonode_resource_type( harvestable_resource.remote_resource_type), defaults ) else: if not geonode_resource.uuid == str(harvested_info.resource_descriptor.uuid): raise RuntimeError( f"Resource {geonode_resource!r} already exists locally but its " f"UUID ({geonode_resource.uuid}) does not match the one found on " f"the remote resource {harvested_info.resource_descriptor.uuid!r}") geonode_resource = resource_manager.update( str(harvested_info.resource_descriptor.uuid), vals=defaults) resource_manager.set_permissions( str(harvested_info.resource_descriptor.uuid), instance=geonode_resource, permissions=harvester.default_access_permissions) harvestable_resource.geonode_resource = geonode_resource harvestable_resource.save() self.finalize_resource_update( geonode_resource, harvested_info, harvestable_resource, harvesting_session_id )
def final_step(upload_session, user, charset="UTF-8", dataset_id=None): import_session = upload_session.import_session import_id = import_session.id _log(f'Reloading session {import_id} to check validity') try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) if Upload.objects.filter(import_id=import_id).count(): Upload.objects.filter(import_id=import_id).update(complete=False) upload = Upload.objects.filter(import_id=import_id).get() if upload.state == enumerations.STATE_RUNNING: return upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py task = import_session.tasks[0] task.set_charset(charset) # @todo see above in save_step, regarding computed unique name name = task.layer.name if dataset_id: name = Dataset.objects.get(resourcebase_ptr_id=dataset_id).name _log(f'Getting from catalog [{name}]') try: # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen gs_catalog.get_layer(name) except Exception: Upload.objects.invalidate_from_session(upload_session) raise LayerNotReady( _(f"Expected to find layer named '{name}' in geoserver")) if import_session.state == 'READY' or (import_session.state == 'PENDING' and task.state == 'READY'): import_session.commit() elif import_session.state == 'INCOMPLETE' and task.state != 'ERROR': Upload.objects.invalidate_from_session(upload_session) raise Exception(f'unknown item state: {task.state}') try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) _log(f'Creating Django record for [{name}]') target = task.target alternate = task.get_target_layer_name() dataset_uuid = None title = upload_session.dataset_title abstract = upload_session.dataset_abstract metadata_uploaded = False xml_file = upload_session.base_file[0].xml_files if xml_file: try: # get model properties from XML # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(xml_file[0], os.path.dirname(archive)) # Assign the absolute path to this file xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}" # Sanity checks if isinstance(xml_file, list): if len(xml_file) > 0: xml_file = xml_file[0] else: xml_file = None elif not isinstance(xml_file, str): xml_file = None if xml_file and os.path.exists(xml_file) and os.access( xml_file, os.R_OK): dataset_uuid, vals, regions, keywords, custom = parse_metadata( open(xml_file).read()) metadata_uploaded = True except Exception as e: Upload.objects.invalidate_from_session(upload_session) logger.error(e) raise GeoNodeException( _("Exception occurred while parsing the provided Metadata file." ), e) # look for SLD sld_file = upload_session.base_file[0].sld_files sld_uploaded = False if sld_file: # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive logger.error(f'using uploaded sld file from {archive}') zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(sld_file[0], os.path.dirname(archive), path=upload_session.tempdir) # Assign the absolute path to this file sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}" else: _sld_file = f"{os.path.dirname(upload_session.tempdir)}/{os.path.basename(sld_file[0])}" logger.error(f"copying [{sld_file[0]}] to [{_sld_file}]") try: shutil.copyfile(sld_file[0], _sld_file) sld_file = _sld_file except (IsADirectoryError, shutil.SameFileError) as e: logger.exception(e) sld_file = sld_file[0] except Exception as e: raise UploadException.from_exc(_('Error uploading Dataset'), e) sld_uploaded = True else: # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: logger.error('using provided sld file from importer') base_file = upload_session.base_file sld_file = base_file[0].sld_files[0] sld_uploaded = False logger.error(f'[sld_uploaded: {sld_uploaded}] sld_file: {sld_file}') # Make sure the layer does not exists already if dataset_uuid and Dataset.objects.filter(uuid=dataset_uuid).count(): Upload.objects.invalidate_from_session(upload_session) logger.error( "The UUID identifier from the XML Metadata is already in use in this system." ) raise GeoNodeException( _("The UUID identifier from the XML Metadata is already in use in this system." )) # Is it a regular file or an ImageMosaic? # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: saved_dataset = None has_time = has_elevation = False start = end = None if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: has_elevation = True start = datetime.datetime.strptime( upload_session.mosaic_time_value, TIME_REGEX_FORMAT[upload_session.mosaic_time_regex]) start = pytz.utc.localize(start, is_dst=False) end = start if upload_session.time and upload_session.time_info and upload_session.time_transforms: has_time = True if upload_session.append_to_mosaic_opts: try: with transaction.atomic(): saved_dataset_filter = Dataset.objects.filter( name=upload_session.append_to_mosaic_name) if not saved_dataset_filter.exists(): saved_dataset = resource_manager.create( name=upload_session.append_to_mosaic_name) created = True else: saved_dataset = saved_dataset_filter.get() created = False saved_dataset.set_dirty_state() if saved_dataset.temporal_extent_start and end: if pytz.utc.localize(saved_dataset.temporal_extent_start, is_dst=False) < end: saved_dataset.temporal_extent_end = end Dataset.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_end=end) else: saved_dataset.temporal_extent_start = end Dataset.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_start=end) except Exception as e: _log( f"There was an error updating the mosaic temporal extent: {str(e)}" ) else: try: with transaction.atomic(): saved_dataset_filter = Dataset.objects.filter( store=target.name, alternate=alternate, workspace=target.workspace_name, name=task.layer.name) if not saved_dataset_filter.exists(): saved_dataset = resource_manager.create( dataset_uuid, resource_type=Dataset, defaults=dict( store=target.name, subtype=get_dataset_storetype(target.store_type), alternate=alternate, workspace=target.workspace_name, title=title, name=task.layer.name, abstract=abstract or _('No abstract provided'), owner=user, temporal_extent_start=start, temporal_extent_end=end, is_mosaic=has_elevation, has_time=has_time, has_elevation=has_elevation, time_regex=upload_session.mosaic_time_regex)) created = True else: saved_dataset = saved_dataset_filter.get() created = False saved_dataset.set_dirty_state() except Exception as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc(_('Error configuring Dataset'), e) assert saved_dataset if not created: return saved_dataset try: saved_dataset.set_dirty_state() with transaction.atomic(): Upload.objects.update_from_session(upload_session, resource=saved_dataset) # Set default permissions on the newly created layer and send notifications permissions = upload_session.permissions # Finalize Upload resource_manager.set_permissions(None, instance=saved_dataset, permissions=permissions, created=created) resource_manager.update(None, instance=saved_dataset, xml_file=xml_file, metadata_uploaded=metadata_uploaded) resource_manager.exec('set_style', None, instance=saved_dataset, sld_uploaded=sld_uploaded, sld_file=sld_file, tempdir=upload_session.tempdir) resource_manager.exec('set_time_info', None, instance=saved_dataset, time_info=upload_session.time_info) resource_manager.set_thumbnail(None, instance=saved_dataset) saved_dataset.set_processing_state(enumerations.STATE_PROCESSED) except Exception as e: saved_dataset.set_processing_state(enumerations.STATE_INVALID) raise GeoNodeException(e) finally: saved_dataset.clear_dirty_state() try: logger.debug( f"... Cleaning up the temporary folders {upload_session.tempdir}") if upload_session.tempdir and os.path.exists(upload_session.tempdir): shutil.rmtree(upload_session.tempdir) except Exception as e: logger.warning(e) finally: Upload.objects.filter(import_id=import_id).update(complete=True) return saved_dataset
def form_valid(self, form): """ If the form is valid, save the associated model. """ doc_form = form.cleaned_data file = doc_form.pop('doc_file', None) if file: tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT) dirname = os.path.basename(tempdir) filepath = storage_manager.save(f"{dirname}/{file.name}", file) storage_path = storage_manager.path(filepath) self.object = resource_manager.create( None, resource_type=Document, defaults=dict(owner=self.request.user, doc_url=doc_form.pop('doc_url', None), title=doc_form.pop('title', file.name), files=[storage_path])) if tempdir != os.path.dirname(storage_path): shutil.rmtree(tempdir, ignore_errors=True) else: self.object = resource_manager.create( None, resource_type=Document, defaults=dict(owner=self.request.user, doc_url=doc_form.pop('doc_url', None), title=doc_form.pop('title', None))) if settings.ADMIN_MODERATE_UPLOADS: self.object.is_approved = False if settings.RESOURCE_PUBLISHING: self.object.is_published = False resource_manager.set_permissions( None, instance=self.object, permissions=form.cleaned_data["permissions"], created=True) abstract = None date = None regions = [] keywords = [] bbox = None url = hookset.document_detail_url(self.object) out = {'success': False} if getattr(settings, 'EXIF_ENABLED', False): try: from geonode.documents.exif.utils import exif_extract_metadata_doc exif_metadata = exif_extract_metadata_doc(self.object) if exif_metadata: date = exif_metadata.get('date', None) keywords.extend(exif_metadata.get('keywords', [])) bbox = exif_metadata.get('bbox', None) abstract = exif_metadata.get('abstract', None) except Exception: logger.debug("Exif extraction failed.") resource_manager.update( self.object.uuid, instance=self.object, keywords=keywords, regions=regions, vals=dict(abstract=abstract, date=date, date_type="Creation", bbox_polygon=BBOXHelper.from_xy(bbox).as_polygon() if bbox else None), notify=True) resource_manager.set_thumbnail(self.object.uuid, instance=self.object, overwrite=False) register_event(self.request, EventType.EVENT_UPLOAD, self.object) if self.request.GET.get('no__redirect', False): out['success'] = True out['url'] = url if out['success']: status_code = 200 else: status_code = 400 return HttpResponse(json.dumps(out), content_type='application/json', status=status_code) else: return HttpResponseRedirect(url)