def set_geofence_all(instance): """assign access permissions to all users This method is only relevant to Dataset instances that have their underlying data managed by geoserver, meaning: * layers that are not associated with a Service * layers that are associated with a Service that is being CASCADED through geoserver """ resource = instance.get_self_resource() logger.debug(f"Inside set_geofence_all for instance {instance}") workspace = get_dataset_workspace(resource.dataset) dataset_name = resource.dataset.name if resource.dataset and hasattr(resource.dataset, 'name') \ else resource.dataset.alternate logger.debug(f"going to work in workspace {workspace}") try: url = settings.OGC_SERVER['default']['LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] # Create GeoFence Rules for ANONYMOUS to the Dataset """ curl -X POST -u admin:geoserver -H "Content-Type: text/xml" -d \ "<Rule><workspace>geonode</workspace><layer>{layer}</layer><access>ALLOW</access></Rule>" \ http://<host>:<port>/geoserver/rest/geofence/rules """ headers = {'Content-type': 'application/xml'} payload = _get_geofence_payload( layer=resource.dataset, dataset_name=dataset_name, workspace=workspace, access="ALLOW" ) response = requests.post( f"{url}rest/geofence/rules", headers=headers, data=payload, auth=HTTPBasicAuth(user, passwd) ) if response.status_code not in (200, 201): logger.debug( f"Response {response.status_code} : {response.text}") raise RuntimeError("Could not ADD GeoServer ANONYMOUS Rule " f"for Dataset {dataset_name}") except Exception: tb = traceback.format_exc() logger.debug(tb) finally: if not getattr(settings, 'DELAYED_SECURITY_SIGNALS', False): set_geofence_invalidate_cache() else: resource.set_dirty_state()
def purge_geofence_dataset_rules(resource): """purge layer existing GeoFence Cache Rules""" # Scan GeoFence Rules associated to the Dataset """ curl -u admin:geoserver http://<host>:<port>/geoserver/rest/geofence/rules.json?workspace=geonode&layer={layer} """ url = settings.OGC_SERVER['default']['LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] headers = {'Content-type': 'application/json'} workspace = get_dataset_workspace(resource.dataset) dataset_name = resource.dataset.name if resource.dataset and hasattr(resource.dataset, 'name') \ else resource.dataset.alternate try: r = requests.get( f"{url}rest/geofence/rules.json?workspace={workspace}&layer={dataset_name}", headers=headers, auth=HTTPBasicAuth(user, passwd), timeout=10, verify=False ) if (r.status_code >= 200 and r.status_code < 300): gs_rules = r.json() r_ids = [] if gs_rules and gs_rules['rules']: for r in gs_rules['rules']: if r['layer'] and r['layer'] == dataset_name: r_ids.append(r['id']) # Delete GeoFence Rules associated to the Dataset # curl -X DELETE -u admin:geoserver http://<host>:<port>/geoserver/rest/geofence/rules/id/{r_id} for r_id in r_ids: r = requests.delete( f"{url}rest/geofence/rules/id/{str(r_id)}", headers=headers, auth=HTTPBasicAuth(user, passwd)) if (r.status_code < 200 or r.status_code > 201): msg = "Could not DELETE GeoServer Rule for Dataset " msg = msg + str(dataset_name) e = Exception(msg) logger.debug(f"Response [{r.status_code}] : {r.text}") raise e except Exception as e: logger.exception(e)
def sync_geofence_with_guardian(dataset, perms, user=None, group=None, group_perms=None): """ Sync Guardian permissions to GeoFence. """ _dataset_name = dataset.name if dataset and hasattr( dataset, 'name') else dataset.alternate.split(":")[0] _dataset_workspace = get_dataset_workspace(dataset) # Create new rule-set gf_services = _get_gf_services(dataset, perms) gf_requests = {} if 'change_dataset_data' not in perms: _skip_perm = False if user and group_perms: if isinstance(user, str): user = get_user_model().objects.get(username=user) user_groups = list(user.groups.all().values_list('name', flat=True)) for _group, _perm in group_perms.items(): if 'change_dataset_data' in _perm and _group in user_groups: _skip_perm = True break if not _skip_perm: gf_requests["WFS"] = { "TRANSACTION": False, "LOCKFEATURE": False, "GETFEATUREWITHLOCK": False } _user = None _group = None users_geolimits = None groups_geolimits = None anonymous_geolimits = None _group, _user, _disable_cache, users_geolimits, groups_geolimits, anonymous_geolimits = get_user_geolimits( dataset, user, group, gf_services) if _disable_cache: gf_services_limits_first = {"*": gf_services.pop('*')} gf_services_limits_first.update(gf_services) gf_services = gf_services_limits_first for service, allowed in gf_services.items(): if dataset and _dataset_name and allowed: if _user: logger.debug( f"Adding 'user' to geofence the rule: {dataset} {service} {_user}" ) _wkt = None if users_geolimits and users_geolimits.count(): _wkt = users_geolimits.last().wkt if service in gf_requests: for request, enabled in gf_requests[service].items(): _update_geofence_rule(dataset, _dataset_name, _dataset_workspace, service, request=request, user=_user, allow=enabled) _update_geofence_rule(dataset, _dataset_name, _dataset_workspace, service, user=_user, geo_limit=_wkt) elif not _group: logger.debug( f"Adding to geofence the rule: {dataset} {service} *") _wkt = None if anonymous_geolimits and anonymous_geolimits.count(): _wkt = anonymous_geolimits.last().wkt if service in gf_requests: for request, enabled in gf_requests[service].items(): _update_geofence_rule(dataset, _dataset_name, _dataset_workspace, service, request=request, user=_user, allow=enabled) _update_geofence_rule(dataset, _dataset_name, _dataset_workspace, service, geo_limit=_wkt) if service in gf_requests: for request, enabled in gf_requests[service].items(): _update_geofence_rule(dataset, _dataset_name, _dataset_workspace, service, request=request, user=_user, allow=enabled) if _group: logger.debug( f"Adding 'group' to geofence the rule: {dataset} {service} {_group}" ) _wkt = None if groups_geolimits and groups_geolimits.count(): _wkt = groups_geolimits.last().wkt if service in gf_requests: for request, enabled in gf_requests[service].items(): _update_geofence_rule(dataset, _dataset_name, _dataset_workspace, service, request=request, group=_group, allow=enabled) _update_geofence_rule(dataset, _dataset_name, _dataset_workspace, service, group=_group, geo_limit=_wkt) if service in gf_requests: for request, enabled in gf_requests[service].items(): _update_geofence_rule(dataset, _dataset_name, _dataset_workspace, service, request=request, group=_group, allow=enabled) if not getattr(settings, 'DELAYED_SECURITY_SIGNALS', False): set_geofence_invalidate_cache() else: dataset.set_dirty_state()
def _datasets_locations( instance: Union[Dataset, Map], compute_bbox: bool = False, target_crs: str = "EPSG:3857") -> Tuple[List[List], List]: """ Function returning a list mapping instance's datasets to their locations, enabling to construct a minimum number of WMS request for multiple datasets of the same OGC source (ensuring datasets order for Maps) :param instance: instance of Dataset or Map models :param compute_bbox: flag determining whether a BBOX containing the instance should be computed, based on instance's datasets :param target_crs: valid only when compute_bbox is True - CRS of the returned BBOX :return: a tuple with a list, which maps datasets to their locations in a correct datasets order e.g. [ ["http://localhost:8080/geoserver/": ["geonode:layer1", "geonode:layer2]] ] and a list optionally consisting of 5 elements containing west, east, south, north instance's boundaries and CRS """ ogc_server_settings = OGC_Servers_Handler(settings.OGC_SERVER)["default"] locations = [] bbox = [] if isinstance(instance, Dataset): locations.append([ instance.ows_url or ogc_server_settings.LOCATION, [instance.alternate], [] ]) if compute_bbox: if instance.ll_bbox_polygon: bbox = utils.clean_bbox(instance.ll_bbox, target_crs) elif (instance.bbox[-1].upper() != 'EPSG:3857' and target_crs.upper() == 'EPSG:3857' and utils.exceeds_epsg3857_area_of_use(instance.bbox)): # handle exceeding the area of use of the default thumb's CRS bbox = utils.transform_bbox( utils.crop_to_3857_area_of_use(instance.bbox), target_crs) else: bbox = utils.transform_bbox(instance.bbox, target_crs) elif isinstance(instance, Map): for map_dataset in instance.maplayers.iterator(): if not map_dataset.local and not map_dataset.ows_url: logger.warning( "Incorrectly defined remote dataset encountered (no OWS URL defined)." "Skipping it in the thumbnail generation.") continue name = get_dataset_name(map_dataset) store = map_dataset.store workspace = get_dataset_workspace(map_dataset) map_dataset_style = map_dataset.current_style if store and Dataset.objects.filter( store=store, workspace=workspace, name=name).count() > 0: dataset = Dataset.objects.filter(store=store, workspace=workspace, name=name).first() elif workspace and Dataset.objects.filter(workspace=workspace, name=name).count() > 0: dataset = Dataset.objects.filter(workspace=workspace, name=name).first() elif Dataset.objects.filter( alternate=map_dataset.name).count() > 0: dataset = Dataset.objects.filter( alternate=map_dataset.name).first() else: logger.warning( f"Dataset for MapLayer {name} was not found. Skipping it in the thumbnail." ) continue if dataset.subtype in ['tileStore', 'remote']: # limit number of locations, ensuring dataset order if len(locations) and locations[-1][ 0] == dataset.remote_service.service_url: # if previous dataset's location is the same as the current one - append current dataset there locations[-1][1].append(dataset.alternate) # update the styles too if map_dataset_style: locations[-1][2].append(map_dataset_style) else: locations.append([ dataset.remote_service.service_url, [dataset.alternate], [map_dataset_style] if map_dataset_style else [] ]) else: # limit number of locations, ensuring dataset order if len(locations) and locations[-1][0] == settings.OGC_SERVER[ "default"]["LOCATION"]: # if previous dataset's location is the same as the current one - append current dataset there locations[-1][1].append(dataset.alternate) # update the styles too if map_dataset_style: locations[-1][2].append(map_dataset_style) else: locations.append([ settings.OGC_SERVER["default"]["LOCATION"], [dataset.alternate], [map_dataset_style] if map_dataset_style else [] ]) if compute_bbox: if dataset.ll_bbox_polygon: dataset_bbox = utils.clean_bbox(dataset.ll_bbox, target_crs) elif (dataset.bbox[-1].upper() != 'EPSG:3857' and target_crs.upper() == 'EPSG:3857' and utils.exceeds_epsg3857_area_of_use(dataset.bbox)): # handle exceeding the area of use of the default thumb's CRS dataset_bbox = utils.transform_bbox( utils.crop_to_3857_area_of_use(dataset.bbox), target_crs) else: dataset_bbox = utils.transform_bbox( dataset.bbox, target_crs) if not bbox: bbox = dataset_bbox else: # dataset's BBOX: (left, right, bottom, top) bbox = [ min(bbox[0], dataset_bbox[0]), max(bbox[1], dataset_bbox[1]), min(bbox[2], dataset_bbox[2]), max(bbox[3], dataset_bbox[3]), ] if bbox and len(bbox) < 5: bbox = list(bbox) + [target_crs] # convert bbox to list, if it's tuple return locations, bbox
def dataset_style_manage(request, layername): layer = _resolve_dataset(request, layername, 'layers.change_dataset_style', _PERMISSION_MSG_MODIFY) if request.method == 'GET': try: cat = gs_catalog # First update the layer style info from GS to GeoNode's DB try: set_styles(layer, cat) except AttributeError: logger.warn( 'Unable to set the default style. Ensure Geoserver is running and that this layer exists.' ) gs_styles = [] # Temporary Hack to remove GeoServer temp styles from the list Style.objects.filter( name__iregex=r'\w{8}-\w{4}-\w{4}-\w{4}-\w{12}_(ms)_\d{13}' ).delete() for style in Style.objects.values('name', 'sld_title'): gs_styles.append((style['name'], style['sld_title'])) current_dataset_styles = layer.styles.all() dataset_styles = [] for style in current_dataset_styles: sld_title = style.name try: if style.sld_title: sld_title = style.sld_title except Exception: tb = traceback.format_exc() logger.debug(tb) dataset_styles.append((style.name, sld_title)) # Render the form def_sld_name = None # noqa def_sld_title = None # noqa default_style = None if layer.default_style: def_sld_name = layer.default_style.name # noqa def_sld_title = layer.default_style.name # noqa try: if layer.default_style.sld_title: def_sld_title = layer.default_style.sld_title except Exception: tb = traceback.format_exc() logger.debug(tb) default_style = (def_sld_name, def_sld_title) return render(request, 'datasets/dataset_style_manage.html', context={ "layer": layer, "gs_styles": gs_styles, "dataset_styles": dataset_styles, "dataset_style_names": [s[0] for s in dataset_styles], "default_style": default_style }) except (FailedRequestError, OSError): tb = traceback.format_exc() logger.debug(tb) msg = ( f'Could not connect to geoserver at "{ogc_server_settings.LOCATION}"' f'to manage style information for layer "{layer.name}"') logger.debug(msg) # If geoserver is not online, return an error return render(request, 'datasets/dataset_style_manage.html', context={ "layer": layer, "error": msg }) elif request.method in ('POST', 'PUT', 'DELETE'): try: workspace = get_dataset_workspace( layer) or settings.DEFAULT_WORKSPACE selected_styles = request.POST.getlist('style-select') default_style = request.POST['default_style'] # Save to GeoServer cat = gs_catalog try: gs_dataset = cat.get_layer(layer.name) except Exception: gs_dataset = None if not gs_dataset: gs_dataset = cat.get_layer(layer.alternate) if gs_dataset: _default_style = cat.get_style(default_style) or \ cat.get_style(default_style, workspace=workspace) if _default_style: gs_dataset.default_style = _default_style elif cat.get_style(default_style, workspace=settings.DEFAULT_WORKSPACE): gs_dataset.default_style = cat.get_style( default_style, workspace=settings.DEFAULT_WORKSPACE) styles = [] for style in selected_styles: _gs_sld = cat.get_style(style) or cat.get_style( style, workspace=workspace) if _gs_sld: styles.append(_gs_sld) elif cat.get_style(style, workspace=settings.DEFAULT_WORKSPACE): styles.append( cat.get_style( style, workspace=settings.DEFAULT_WORKSPACE)) else: Style.objects.filter(name=style).delete() gs_dataset.styles = styles cat.save(gs_dataset) # Save to Django set_styles(layer, cat) # Invalidate GeoWebCache for the updated resource try: _stylefilterparams_geowebcache_dataset(layer.alternate) _invalidate_geowebcache_dataset(layer.alternate) except Exception: pass return HttpResponseRedirect(layer.get_absolute_url()) except (FailedRequestError, OSError, MultiValueDictKeyError): tb = traceback.format_exc() logger.debug(tb) msg = (f'Error Saving Styles for Dataset "{layer.name}"') logger.warn(msg) return render(request, 'datasets/dataset_style_manage.html', context={ "layer": layer, "error": msg })
if anonymous_can_view: sync_geofence_with_guardian(instance, VIEW_PERMISSIONS, user=None, group=None) gf_services = _get_gf_services(instance, VIEW_PERMISSIONS) _, _, _disable_dataset_cache, _, _, _ = get_user_geolimits(instance, None, None, gf_services) _disable_cache.append(_disable_dataset_cache) if anonymous_can_download: sync_geofence_with_guardian(instance, DOWNLOAD_PERMISSIONS, user=None, group=None) gf_services = _get_gf_services(instance, DOWNLOAD_PERMISSIONS) _, _, _disable_dataset_cache, _, _, _ = get_user_geolimits(instance, None, None, gf_services) _disable_cache.append(_disable_dataset_cache) if _disable_cache: filters, formats = _get_gwc_filters_and_formats(_disable_cache) try: _dataset_workspace = get_dataset_workspace(instance.get_real_instance()) toggle_dataset_cache(f'{_dataset_workspace}:{instance.get_real_instance().name}', filters=filters, formats=formats) except Dataset.DoesNotExist: pass else: instance.set_dirty_state() except Exception as e: logger.exception(e) return False return True def set_thumbnail(self, uuid: str, /, instance: ResourceBase = None, overwrite: bool = True, check_bbox: bool = True) -> bool: if instance and (isinstance(instance.get_real_instance(), Dataset) or isinstance(instance.get_real_instance(), Map)): if overwrite or instance.thumbnail_url == static(MISSING_THUMB): create_gs_thumbnail(instance.get_real_instance(), overwrite=overwrite, check_bbox=check_bbox) return True