def test_moderated_upload(self): """ Test if moderation flag works """ with self.settings(ADMIN_MODERATE_UPLOADS=False): # first create a map map_created = resource_manager.create(None, resource_type=Map, defaults=dict(owner=self.u)) resource_manager.set_permissions(None, instance=map_created, permissions=None, created=True) self.assertTrue(map_created.is_approved) self.assertTrue(map_created.is_published) with self.settings(ADMIN_MODERATE_UPLOADS=True): # first create a map map_created = resource_manager.create(None, resource_type=Map, defaults=dict(owner=self.u)) resource_manager.set_permissions(None, instance=map_created, permissions=None, created=True) self.assertFalse(map_created.is_approved) self.assertTrue(map_created.is_published)
def _create_dataset(self, geonode_service, **resource_fields): # bear in mind that in ``geonode.layers.models`` there is a # ``pre_save_dataset`` function handler that is connected to the # ``pre_save`` signal for the Dataset model. This handler does a check # for common fields (such as abstract and title) and adds # sensible default values keywords = resource_fields.pop("keywords", []) defaults = dict(owner=geonode_service.owner, remote_service=geonode_service, remote_typename=geonode_service.name, sourcetype=base_enumerations.SOURCE_TYPE_REMOTE, ptype=getattr(geonode_service, "ptype", "gxp_wmscsource"), **resource_fields) if geonode_service.method == INDEXED: defaults['ows_url'] = geonode_service.service_url geonode_dataset = resource_manager.create(None, resource_type=Dataset, defaults=defaults) resource_manager.update(geonode_dataset.uuid, instance=geonode_dataset, keywords=keywords, notify=True) resource_manager.set_permissions(geonode_dataset.uuid, instance=geonode_dataset) return geonode_dataset
def set_permissions(self, perm_spec=None, created=False): """ Sets an object's the permission levels based on the perm_spec JSON. the mapping looks like: { 'users': { 'AnonymousUser': ['view'], 'username': ['perm1','perm2','perm3'], 'username2': ['perm1','perm2','perm3'] ... }, 'groups': [ 'groupname': ['perm1','perm2','perm3'], 'groupname2': ['perm1','perm2','perm3'], ... ] } """ from geonode.resource.manager import resource_manager # Fixup Advanced Workflow permissions prev_perm_spec = copy.deepcopy(self.get_all_level_info()) perm_spec = resource_manager.get_workflow_permissions( self.uuid, instance=self, permissions=perm_spec) # Avoid setting the permissions if nothing changed if not self.compare_perms(prev_perm_spec, perm_spec): return resource_manager.set_permissions(self.uuid, instance=self, permissions=perm_spec, created=created)
def new_map_json(request): if request.method == 'GET': map_obj, config = new_map_config(request) if isinstance(config, HttpResponse): return config else: return HttpResponse(config) elif request.method == 'POST': if not request.user.is_authenticated: return HttpResponse( 'You must be logged in to save new maps', content_type="text/plain", status=401 ) map_obj = resource_manager.create( None, resource_type=Map, defaults=dict( zoom=0, center_x=0, center_y=0, owner=request.user ) ) resource_manager.set_permissions(None, instance=map_obj, permissions=None, created=True) # If the body has been read already, use an empty string. # See https://github.com/django/django/commit/58d555caf527d6f1bdfeab14527484e4cca68648 # for a better exception to catch when we move to Django 1.7. try: body = request.body except Exception: body = '' try: map_obj.update_from_viewer(body, context={'request': request, 'mapId': map_obj.id, 'map': map_obj}) except ValueError as e: return HttpResponse(str(e), status=400) else: register_event(request, EventType.EVENT_UPLOAD, map_obj) return HttpResponse( json.dumps({'id': map_obj.id}), status=200, content_type='application/json' ) else: return HttpResponse(status=405)
def test_map_view(self, thumbnail_mock): """Test that map view can be properly rendered """ # first create a map map_created = Map.objects.create(owner=self.u) MapLayer.objects.create( map=map_created, name='base:nic_admin', ows_url='http://localhost:8080/geoserver/wms', ) resource_manager.set_permissions(None, instance=map_created, permissions=None, created=True) map_id = map_created.id url = reverse('map_metadata', args=(map_id, )) self.client.logout() url = reverse('map_embed', args=(map_id, )) # test unauthenticated user to view map response = self.client.get(url) self.assertEqual(response.status_code, 200) # TODO: unauthenticated user can still access the map view # test a user without map view permission self.client.login(username='******', password='******') response = self.client.get(url) self.assertEqual(response.status_code, 200) self.client.logout() # TODO: the user can still access the map view without permission # Now test with a valid user using GET method self.client.login(username=self.user, password=self.passwd) response = self.client.get(url) self.assertEqual(response.status_code, 200) # Config equals to that of the map whose id is given map_obj = Map.objects.get(id=map_id) map_obj = Map.objects.get(id=map_id) self.assertEqual(response.context['resource'], map_obj) self.assertIsNotNone(response.context['access_token']) self.assertEqual(response.context['is_embed'], 'true')
def _create_layer(self, geonode_service, **resource_fields): # bear in mind that in ``geonode.layers.models`` there is a # ``pre_save_layer`` function handler that is connected to the # ``pre_save`` signal for the Layer model. This handler does a check # for common fields (such as abstract and title) and adds # sensible default values keywords = resource_fields.pop("keywords", []) geonode_layer = resource_manager.create( None, resource_type=Layer, defaults=dict( owner=geonode_service.owner, remote_service=geonode_service, **resource_fields ) ) resource_manager.update(geonode_layer.uuid, instance=geonode_layer, keywords=keywords, notify=True) resource_manager.set_permissions(geonode_layer.uuid, instance=geonode_layer) return geonode_layer
def set_default_permissions(self, owner=None): """ Removes all the permissions except for the owner and assign the view permission to the anonymous group. """ from geonode.resource.manager import resource_manager # default permissions for anonymous users anonymous_group, created = Group.objects.get_or_create( name='anonymous') if not anonymous_group: raise Exception("Could not acquire 'anonymous' Group.") perm_spec = copy.deepcopy(self.get_all_level_info()) if "users" not in perm_spec: perm_spec["users"] = {} if "groups" not in perm_spec: perm_spec["groups"] = {} # default permissions for owner and owner's groups _owner = owner or self.owner user_groups = Group.objects.filter(name__in=_owner.groupmember_set.all( ).values_list("group__slug", flat=True)) # Anonymous anonymous_can_view = settings.DEFAULT_ANONYMOUS_VIEW_PERMISSION if anonymous_can_view: perm_spec["groups"][anonymous_group] = ['view_resourcebase'] else: for user_group in user_groups: if not skip_registered_members_common_group(user_group): perm_spec["groups"][user_group] = ['view_resourcebase'] anonymous_can_download = settings.DEFAULT_ANONYMOUS_DOWNLOAD_PERMISSION if anonymous_can_download: perm_spec["groups"][anonymous_group] = [ 'view_resourcebase', 'download_resourcebase' ] else: for user_group in user_groups: if not skip_registered_members_common_group(user_group): perm_spec["groups"][user_group] = [ 'view_resourcebase', 'download_resourcebase' ] # Fixup Advanced Workflow permissions perm_spec = resource_manager.get_workflow_permissions( self.uuid, instance=self, permissions=perm_spec) return resource_manager.set_permissions(self.uuid, instance=self, owner=owner, permissions=perm_spec)
def update_geonode_resource( self, harvested_info: HarvestedResourceInfo, harvestable_resource: "HarvestableResource", # noqa harvesting_session_id: int, ): """Create or update a local GeoNode resource with the input harvested information.""" harvester = models.Harvester.objects.get(pk=self.harvester_id) defaults = self.get_geonode_resource_defaults( harvested_info.resource_descriptor, harvestable_resource) geonode_resource = harvestable_resource.geonode_resource if geonode_resource is None: geonode_resource = resource_manager.create( str(harvested_info.resource_descriptor.uuid), self.get_geonode_resource_type( harvestable_resource.remote_resource_type), defaults ) else: if not geonode_resource.uuid == str(harvested_info.resource_descriptor.uuid): raise RuntimeError( f"Resource {geonode_resource!r} already exists locally but its " f"UUID ({geonode_resource.uuid}) does not match the one found on " f"the remote resource {harvested_info.resource_descriptor.uuid!r}") geonode_resource = resource_manager.update( str(harvested_info.resource_descriptor.uuid), vals=defaults) resource_manager.set_permissions( str(harvested_info.resource_descriptor.uuid), instance=geonode_resource, permissions=harvester.default_access_permissions) harvestable_resource.geonode_resource = geonode_resource harvestable_resource.save() self.finalize_resource_update( geonode_resource, harvested_info, harvestable_resource, harvesting_session_id )
def set_permissions(self, perm_spec=None, created=False): """ Sets an object's the permission levels based on the perm_spec JSON. the mapping looks like: { 'users': { 'AnonymousUser': ['view'], 'username': ['perm1','perm2','perm3'], 'username2': ['perm1','perm2','perm3'] ... }, 'groups': [ 'groupname': ['perm1','perm2','perm3'], 'groupname2': ['perm1','perm2','perm3'], ... ] } """ from geonode.resource.manager import resource_manager return resource_manager.set_permissions(self.uuid, instance=self, permissions=perm_spec, created=created)
def final_step(upload_session, user, charset="UTF-8", dataset_id=None): import_session = upload_session.import_session import_id = import_session.id _log(f'Reloading session {import_id} to check validity') try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) if Upload.objects.filter(import_id=import_id).count(): Upload.objects.filter(import_id=import_id).update(complete=False) upload = Upload.objects.filter(import_id=import_id).get() if upload.state == enumerations.STATE_RUNNING: return upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py task = import_session.tasks[0] task.set_charset(charset) # @todo see above in save_step, regarding computed unique name name = task.layer.name if dataset_id: name = Dataset.objects.get(resourcebase_ptr_id=dataset_id).name _log(f'Getting from catalog [{name}]') try: # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen gs_catalog.get_layer(name) except Exception: Upload.objects.invalidate_from_session(upload_session) raise LayerNotReady( _(f"Expected to find layer named '{name}' in geoserver")) if import_session.state == 'READY' or (import_session.state == 'PENDING' and task.state == 'READY'): import_session.commit() elif import_session.state == 'INCOMPLETE' and task.state != 'ERROR': Upload.objects.invalidate_from_session(upload_session) raise Exception(f'unknown item state: {task.state}') try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) _log(f'Creating Django record for [{name}]') target = task.target alternate = task.get_target_layer_name() dataset_uuid = None title = upload_session.dataset_title abstract = upload_session.dataset_abstract metadata_uploaded = False xml_file = upload_session.base_file[0].xml_files if xml_file: try: # get model properties from XML # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(xml_file[0], os.path.dirname(archive)) # Assign the absolute path to this file xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}" # Sanity checks if isinstance(xml_file, list): if len(xml_file) > 0: xml_file = xml_file[0] else: xml_file = None elif not isinstance(xml_file, str): xml_file = None if xml_file and os.path.exists(xml_file) and os.access( xml_file, os.R_OK): dataset_uuid, vals, regions, keywords, custom = parse_metadata( open(xml_file).read()) metadata_uploaded = True except Exception as e: Upload.objects.invalidate_from_session(upload_session) logger.error(e) raise GeoNodeException( _("Exception occurred while parsing the provided Metadata file." ), e) # look for SLD sld_file = upload_session.base_file[0].sld_files sld_uploaded = False if sld_file: # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive logger.error(f'using uploaded sld file from {archive}') zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(sld_file[0], os.path.dirname(archive), path=upload_session.tempdir) # Assign the absolute path to this file sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}" else: _sld_file = f"{os.path.dirname(upload_session.tempdir)}/{os.path.basename(sld_file[0])}" logger.error(f"copying [{sld_file[0]}] to [{_sld_file}]") try: shutil.copyfile(sld_file[0], _sld_file) sld_file = _sld_file except (IsADirectoryError, shutil.SameFileError) as e: logger.exception(e) sld_file = sld_file[0] except Exception as e: raise UploadException.from_exc(_('Error uploading Dataset'), e) sld_uploaded = True else: # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: logger.error('using provided sld file from importer') base_file = upload_session.base_file sld_file = base_file[0].sld_files[0] sld_uploaded = False logger.error(f'[sld_uploaded: {sld_uploaded}] sld_file: {sld_file}') # Make sure the layer does not exists already if dataset_uuid and Dataset.objects.filter(uuid=dataset_uuid).count(): Upload.objects.invalidate_from_session(upload_session) logger.error( "The UUID identifier from the XML Metadata is already in use in this system." ) raise GeoNodeException( _("The UUID identifier from the XML Metadata is already in use in this system." )) # Is it a regular file or an ImageMosaic? # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: saved_dataset = None has_time = has_elevation = False start = end = None if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: has_elevation = True start = datetime.datetime.strptime( upload_session.mosaic_time_value, TIME_REGEX_FORMAT[upload_session.mosaic_time_regex]) start = pytz.utc.localize(start, is_dst=False) end = start if upload_session.time and upload_session.time_info and upload_session.time_transforms: has_time = True if upload_session.append_to_mosaic_opts: try: with transaction.atomic(): saved_dataset_filter = Dataset.objects.filter( name=upload_session.append_to_mosaic_name) if not saved_dataset_filter.exists(): saved_dataset = resource_manager.create( name=upload_session.append_to_mosaic_name) created = True else: saved_dataset = saved_dataset_filter.get() created = False saved_dataset.set_dirty_state() if saved_dataset.temporal_extent_start and end: if pytz.utc.localize(saved_dataset.temporal_extent_start, is_dst=False) < end: saved_dataset.temporal_extent_end = end Dataset.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_end=end) else: saved_dataset.temporal_extent_start = end Dataset.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_start=end) except Exception as e: _log( f"There was an error updating the mosaic temporal extent: {str(e)}" ) else: try: with transaction.atomic(): saved_dataset_filter = Dataset.objects.filter( store=target.name, alternate=alternate, workspace=target.workspace_name, name=task.layer.name) if not saved_dataset_filter.exists(): saved_dataset = resource_manager.create( dataset_uuid, resource_type=Dataset, defaults=dict( store=target.name, subtype=get_dataset_storetype(target.store_type), alternate=alternate, workspace=target.workspace_name, title=title, name=task.layer.name, abstract=abstract or _('No abstract provided'), owner=user, temporal_extent_start=start, temporal_extent_end=end, is_mosaic=has_elevation, has_time=has_time, has_elevation=has_elevation, time_regex=upload_session.mosaic_time_regex)) created = True else: saved_dataset = saved_dataset_filter.get() created = False saved_dataset.set_dirty_state() except Exception as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc(_('Error configuring Dataset'), e) assert saved_dataset if not created: return saved_dataset try: saved_dataset.set_dirty_state() with transaction.atomic(): Upload.objects.update_from_session(upload_session, resource=saved_dataset) # Set default permissions on the newly created layer and send notifications permissions = upload_session.permissions # Finalize Upload resource_manager.set_permissions(None, instance=saved_dataset, permissions=permissions, created=created) resource_manager.update(None, instance=saved_dataset, xml_file=xml_file, metadata_uploaded=metadata_uploaded) resource_manager.exec('set_style', None, instance=saved_dataset, sld_uploaded=sld_uploaded, sld_file=sld_file, tempdir=upload_session.tempdir) resource_manager.exec('set_time_info', None, instance=saved_dataset, time_info=upload_session.time_info) resource_manager.set_thumbnail(None, instance=saved_dataset) saved_dataset.set_processing_state(enumerations.STATE_PROCESSED) except Exception as e: saved_dataset.set_processing_state(enumerations.STATE_INVALID) raise GeoNodeException(e) finally: saved_dataset.clear_dirty_state() try: logger.debug( f"... Cleaning up the temporary folders {upload_session.tempdir}") if upload_session.tempdir and os.path.exists(upload_session.tempdir): shutil.rmtree(upload_session.tempdir) except Exception as e: logger.warning(e) finally: Upload.objects.filter(import_id=import_id).update(complete=True) return saved_dataset
def form_valid(self, form): """ If the form is valid, save the associated model. """ doc_form = form.cleaned_data file = doc_form.pop('doc_file', None) if file: tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT) dirname = os.path.basename(tempdir) filepath = storage_manager.save(f"{dirname}/{file.name}", file) storage_path = storage_manager.path(filepath) self.object = resource_manager.create( None, resource_type=Document, defaults=dict(owner=self.request.user, doc_url=doc_form.pop('doc_url', None), title=doc_form.pop('title', file.name), files=[storage_path])) if tempdir != os.path.dirname(storage_path): shutil.rmtree(tempdir, ignore_errors=True) else: self.object = resource_manager.create( None, resource_type=Document, defaults=dict(owner=self.request.user, doc_url=doc_form.pop('doc_url', None), title=doc_form.pop('title', None))) if settings.ADMIN_MODERATE_UPLOADS: self.object.is_approved = False if settings.RESOURCE_PUBLISHING: self.object.is_published = False resource_manager.set_permissions( None, instance=self.object, permissions=form.cleaned_data["permissions"], created=True) abstract = None date = None regions = [] keywords = [] bbox = None url = hookset.document_detail_url(self.object) out = {'success': False} if getattr(settings, 'EXIF_ENABLED', False): try: from geonode.documents.exif.utils import exif_extract_metadata_doc exif_metadata = exif_extract_metadata_doc(self.object) if exif_metadata: date = exif_metadata.get('date', None) keywords.extend(exif_metadata.get('keywords', [])) bbox = exif_metadata.get('bbox', None) abstract = exif_metadata.get('abstract', None) except Exception: logger.debug("Exif extraction failed.") resource_manager.update( self.object.uuid, instance=self.object, keywords=keywords, regions=regions, vals=dict(abstract=abstract, date=date, date_type="Creation", bbox_polygon=BBOXHelper.from_xy(bbox).as_polygon() if bbox else None), notify=True) resource_manager.set_thumbnail(self.object.uuid, instance=self.object, overwrite=False) register_event(self.request, EventType.EVENT_UPLOAD, self.object) if self.request.GET.get('no__redirect', False): out['success'] = True out['url'] = url if out['success']: status_code = 200 else: status_code = 400 return HttpResponse(json.dumps(out), content_type='application/json', status=status_code) else: return HttpResponseRedirect(url)
def geoapp_edit(request, geoappid, template='apps/app_edit.html'): """ The view that returns the app composer opened to the app with the given app ID. """ try: geoapp_obj = _resolve_geoapp(request, geoappid, 'base.view_resourcebase', _PERMISSION_MSG_VIEW) except PermissionDenied: return HttpResponse(_("Not allowed"), status=403) except Exception: raise Http404(_("Not found")) if not geoapp_obj: raise Http404(_("Not found")) # Call this first in order to be sure "perms_list" is correct permissions_json = _perms_info_json(geoapp_obj) perms_list = list(geoapp_obj.get_self_resource().get_user_perms( request.user).union(geoapp_obj.get_user_perms(request.user))) group = None if geoapp_obj.group: try: group = GroupProfile.objects.get(slug=geoapp_obj.group.name) except GroupProfile.DoesNotExist: group = None r = geoapp_obj if request.method in ('POST', 'PATCH', 'PUT'): r = resource_manager.update(geoapp_obj.uuid, instance=geoapp_obj, notify=True) resource_manager.set_permissions( geoapp_obj.uuid, instance=geoapp_obj, permissions=ast.literal_eval(permissions_json)) resource_manager.set_thumbnail(geoapp_obj.uuid, instance=geoapp_obj, overwrite=False) access_token = None if request and request.user: access_token = get_or_create_token(request.user) if access_token and not access_token.is_expired(): access_token = access_token.token else: access_token = None _config = json.dumps(r.blob) _ctx = { 'appId': geoappid, 'appType': geoapp_obj.resource_type, 'config': _config, 'user': request.user, 'access_token': access_token, 'resource': geoapp_obj, 'group': group, 'perms_list': perms_list, "permissions_json": permissions_json, 'preview': getattr(settings, 'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY', 'mapstore') } return render(request, template, context=_ctx)
def set_permissions(self, perm_spec, created=False): from geonode.resource.manager import resource_manager return resource_manager.set_permissions(self.uuid, instance=self, permissions=perm_spec, created=created)
def set_default_permissions(self, owner=None): from geonode.resource.manager import resource_manager return resource_manager.set_permissions(self.uuid, instance=self, owner=owner, permissions=None)
def set_datasets_permissions(permissions_name, resources_names=None, users_usernames=None, groups_names=None, delete_flag=False, verbose=False): # Processing information if not resources_names: # If resources is None we consider all the existing layer resources = Dataset.objects.all() else: try: resources = Dataset.objects.filter(Q(title__in=resources_names) | Q(name__in=resources_names)) except Dataset.DoesNotExist: logger.warning( f'No resources have been found with these names: {", ".join(resources_names)}.' ) if not resources: logger.warning("No resources have been found. No update operations have been executed.") else: # PERMISSIONS if not permissions_name: logger.error("No permissions have been provided.") else: permissions = [] if permissions_name.lower() in ('read', 'r'): if not delete_flag: permissions = READ_PERMISSIONS else: permissions = READ_PERMISSIONS + WRITE_PERMISSIONS \ + DOWNLOAD_PERMISSIONS + OWNER_PERMISSIONS elif permissions_name.lower() in ('write', 'w'): if not delete_flag: permissions = READ_PERMISSIONS + WRITE_PERMISSIONS else: permissions = WRITE_PERMISSIONS elif permissions_name.lower() in ('download', 'd'): if not delete_flag: permissions = READ_PERMISSIONS + DOWNLOAD_PERMISSIONS else: permissions = DOWNLOAD_PERMISSIONS elif permissions_name.lower() in ('owner', 'o'): if not delete_flag: permissions = READ_PERMISSIONS + WRITE_PERMISSIONS \ + DOWNLOAD_PERMISSIONS + OWNER_PERMISSIONS else: permissions = OWNER_PERMISSIONS if not permissions: logger.error( "Permission must match one of these values: read (r), write (w), download (d), owner (o)." ) else: if not users_usernames and not groups_names: logger.error( "At least one user or one group must be provided." ) else: # USERS users = [] if users_usernames: User = get_user_model() for _user in users_usernames: try: if isinstance(_user, str): user = User.objects.get(username=_user) else: user = User.objects.get(username=_user.username) users.append(user) except User.DoesNotExist: logger.warning( f'The user {_user} does not exists. ' 'It has been skipped.' ) # GROUPS groups = [] if groups_names: for group_name in groups_names: try: group = Group.objects.get(name=group_name) groups.append(group) except Group.DoesNotExist: logger.warning( f'The group {group_name} does not exists. ' 'It has been skipped.' ) if not users and not groups: logger.error( 'Neither users nor groups corresponding to the typed names have been found. ' 'No update operations have been executed.' ) else: # RESOURCES for resource in resources: # Existing permissions on the resource perm_spec = resource.get_all_level_info() if verbose: logger.info( f"Initial permissions info for the resource {resource.title}: {perm_spec}" ) print( f"Initial permissions info for the resource {resource.title}: {perm_spec}" ) for u in users: _user = u # Add permissions if not delete_flag: # Check the permission already exists if _user not in perm_spec["users"] and _user.username not in perm_spec["users"]: perm_spec["users"][_user] = permissions else: if _user.username in perm_spec["users"]: u_perms_list = perm_spec["users"][_user.username] del(perm_spec["users"][_user.username]) perm_spec["users"][_user] = u_perms_list try: u_perms_list = perm_spec["users"][_user] base_set = set(u_perms_list) target_set = set(permissions) perm_spec["users"][_user] = list(base_set | target_set) except KeyError: perm_spec["users"][_user] = permissions # Delete permissions else: # Skip resource owner if _user != resource.owner: if _user in perm_spec["users"]: u_perms_set = set() for up in perm_spec["users"][_user]: if up not in permissions: u_perms_set.add(up) perm_spec["users"][_user] = list(u_perms_set) else: logger.warning( f"The user {_user.username} does not have " f"any permission on the dataset {resource.title}. " "It has been skipped." ) else: logger.warning( f"Warning! - The user {_user.username} is the " f"layer {resource.title} owner, " "so its permissions can't be changed. " "It has been skipped." ) for g in groups: _group = g # Add permissions if not delete_flag: # Check the permission already exists if _group not in perm_spec["groups"] and _group.name not in perm_spec["groups"]: perm_spec["groups"][_group] = permissions else: if _group.name in perm_spec["groups"]: g_perms_list = perm_spec["groups"][_group.name] del(perm_spec["groups"][_group.name]) perm_spec["groups"][_group] = g_perms_list try: g_perms_list = perm_spec["groups"][_group] base_set = set(g_perms_list) target_set = set(permissions) perm_spec["groups"][_group] = list(base_set | target_set) except KeyError: perm_spec["groups"][_group] = permissions # Delete permissions else: if g in perm_spec["groups"]: g_perms_set = set() for gp in perm_spec["groups"][g]: if gp not in permissions: g_perms_set.add(gp) perm_spec["groups"][g] = list(g_perms_set) else: logger.warning( f"The group {g.name} does not have any permission " f"on the dataset {resource.title}. " "It has been skipped." ) # Set final permissions from geonode.resource.manager import resource_manager resource_manager.set_permissions(resource.uuid, instance=resource, permissions=perm_spec) if verbose: logger.info( f"Final permissions info for the resource {resource.title}: {perm_spec}" ) print( f"Final permissions info for the resource {resource.title}: {perm_spec}" ) if verbose: logger.info("Permissions successfully updated!") print("Permissions successfully updated!")