def test_get_resources(self, mock_wms_get_resource, mock_wms_get_resources, mock_wms_parsed_service, mock_wms): mock_wms.return_value = (self.phony_url, self.parsed_wms) mock_wms_parsed_service.return_value = self.parsed_wms mock_wms_parsed_service.provider.url = self.phony_url mock_wms_parsed_service.identification.title = self.phony_title mock_wms_parsed_service.identification.version = self.phony_version mock_wms_get_resource.return_value = list( self.parsed_wms.contents.values())[0] mock_wms_get_resources.return_value = self.parsed_wms.contents.values() handler = wms.WmsServiceHandler(self.phony_url) result = list(handler.get_resources()) self.assertEqual(len(result), 1) test_user, created = get_user_model().objects.get_or_create( username="******") if created: test_user.set_password("somepassword") test_user.save() result = handler.create_geonode_service(test_user) try: geonode_service, created = Service.objects.get_or_create( base_url=result.base_url, owner=test_user) for _d in Dataset.objects.filter(remote_service=geonode_service): resource_manager.delete(_d.uuid, instance=_d) result = list(handler.get_resources()) dataset_meta = handler.get_resource(result[0].name) resource_fields = handler._get_indexed_dataset_fields(dataset_meta) keywords = resource_fields.pop("keywords") resource_fields["keywords"] = keywords resource_fields["is_approved"] = True resource_fields["is_published"] = True except Service.DoesNotExist as e: # In the case the Service URL becomes inaccessible for some reason logger.error(e)
def map_remove(request, mapid, template='maps/map_remove.html'): ''' Delete a map, and its constituent layers. ''' try: map_obj = _resolve_map( request, mapid, 'base.delete_resourcebase', _PERMISSION_MSG_VIEW) except PermissionDenied: return HttpResponse(_("Not allowed"), status=403) except Exception: raise Http404(_("Not found")) if not map_obj: raise Http404(_("Not found")) if request.method == 'GET': return render(request, template, context={ "map": map_obj }) elif request.method == 'POST': resource_manager.delete(map_obj.uuid, instance=map_obj) register_event(request, EventType.EVENT_REMOVE, map_obj) return HttpResponseRedirect(reverse("maps_browse"))
def _update_upload_session_state(self, upload_session_id: int): """Task invoked by 'upload_workflow.chord' in order to process all the 'PENDING' Upload tasks.""" lock_id = f'{self.request.id}' with AcquireLock(lock_id) as lock: if lock.acquire() is True: _upload = Upload.objects.get(id=upload_session_id) session = _upload.get_session.import_session if not session or session.state != enumerations.STATE_COMPLETE: session = gs_uploader.get_session(_upload.import_id) if session: try: content = next_step_response(None, _upload.get_session).content if isinstance(content, bytes): content = content.decode('UTF-8') response_json = json.loads(content) _success = response_json.get('success', False) _redirect_to = response_json.get('redirect_to', '') if _success: if 'upload/final' not in _redirect_to and 'upload/check' not in _redirect_to: _upload.set_resume_url(_redirect_to) _upload.set_processing_state(enumerations.STATE_WAITING) else: if session.state == enumerations.STATE_COMPLETE and _upload.state == enumerations.STATE_PENDING: if not _upload.resource or not _upload.resource.processed: final_step_view(None, _upload.get_session) _upload.set_processing_state(enumerations.STATE_RUNNING) except (NotFound, Exception) as e: logger.exception(e) if _upload.state not in (enumerations.STATE_COMPLETE, enumerations.STATE_PROCESSED): _upload.set_processing_state(enumerations.STATE_INVALID) if _upload.resource: resource_manager.delete(_upload.resource.uuid)
def delete_queryset(self, request, queryset): """ We need to invoke the 'ResourceBase.delete' method even when deleting through the admin batch action """ for obj in queryset: from geonode.resource.manager import resource_manager resource_manager.delete(obj.uuid, instance=obj)
def _upload_session_cleanup(self, upload_session_id: int): """Task invoked by 'upload_workflow.chord' in order to remove and cleanup all the 'INVALID' stale Upload tasks.""" try: _upload = Upload.objects.get(id=upload_session_id) if _upload.resource: resource_manager.delete(_upload.resource.uuid) _upload.delete() logger.debug(f"Upload {upload_session_id} deleted with state {_upload.state}.") except Exception as e: logger.error(f"Upload {upload_session_id} errored with exception {e}.")
def delete_dataset(self, dataset_id): """ Deletes a layer. """ try: layer = Dataset.objects.get(id=dataset_id) except Dataset.DoesNotExist: logger.warning(f"Layers {dataset_id} does not exist!") return logger.debug(f'Deleting Dataset {layer}') resource_manager.delete(uuid=layer.uuid, instance=layer)
def delete_layer(self, layer_id): """ Deletes a layer. """ try: layer = Layer.objects.get(id=layer_id) except Layer.DoesNotExist: logger.warning(f"Layers {layer_id} does not exist!") return logger.debug(f'Deleting Layer {layer}') resource_manager.delete(uuid=layer.uuid, instance=layer)
def test_get_resources(self, mock_wms): mock_wms.return_value = (self.phony_url, self.parsed_wms) handler = wms.WmsServiceHandler(self.phony_url) result = list(handler.get_resources()) self.assertEqual(result[0].name, self.phony_dataset_name) test_user, created = get_user_model().objects.get_or_create( username="******") if created: test_user.set_password("somepassword") test_user.save() result = handler.create_geonode_service(test_user) try: geonode_service, created = Service.objects.get_or_create( base_url=result.base_url, owner=test_user) for _d in Dataset.objects.filter(remote_service=geonode_service): resource_manager.delete(_d.uuid, instance=_d) HarvestJob.objects.filter(service=geonode_service).delete() result = list(handler.get_resources()) dataset_meta = handler.get_resource(result[0].name) resource_fields = handler._get_indexed_dataset_fields(dataset_meta) keywords = resource_fields.pop("keywords") resource_fields["keywords"] = keywords resource_fields["is_approved"] = True resource_fields["is_published"] = True geonode_dataset = handler._create_dataset(geonode_service, **resource_fields) self.assertIsNotNone(geonode_dataset) self.assertNotEqual(geonode_dataset.srid, "EPSG:4326") self.assertEqual(geonode_dataset.sourcetype, base_enumerations.SOURCE_TYPE_REMOTE) harvest_job, created = HarvestJob.objects.get_or_create( service=geonode_service, resource_id=geonode_dataset.alternate) self.assertIsNotNone(harvest_job) for _d in Dataset.objects.filter(remote_service=geonode_service): resource_manager.delete(_d.uuid, instance=_d) self.assertEqual( HarvestJob.objects.filter( service=geonode_service, resource_id=geonode_dataset.alternate).count(), 0) legend_url = handler._create_dataset_legend_link(geonode_dataset) self.assertTrue('sld_version=1.1.0' in str(legend_url)) except Service.DoesNotExist as e: # In the case the Service URL becomes inaccessible for some reason logger.error(e)
def document_remove(request, docid, template='documents/document_remove.html'): try: document = _resolve_document(request, docid, 'base.delete_resourcebase', _PERMISSION_MSG_DELETE) except PermissionDenied: return HttpResponse(_("Not allowed"), status=403) except Exception: raise Http404(_("Not found")) if not document: raise Http404(_("Not found")) if request.method == 'GET': return render(request, template, context={"document": document}) if request.method == 'POST': resource_manager.delete(document.uuid, instance=document) register_event(request, EventType.EVENT_REMOVE, document) return HttpResponseRedirect(reverse("document_browse")) else: return HttpResponse(_("Not allowed"), status=403)
def geoapp_remove(request, geoappid, template='apps/app_remove.html'): try: geoapp_obj = _resolve_geoapp(request, geoappid, 'base.delete_resourcebase', _PERMISSION_MSG_DELETE) except PermissionDenied: return HttpResponse(_("Not allowed"), status=403) except Exception: raise Http404(_("Not found")) if not geoapp_obj: raise Http404(_("Not found")) if request.method == 'GET': return render(request, template, context={"resource": geoapp_obj}) elif request.method == 'POST': resource_manager.delete(geoapp_obj.uuid, instance=geoapp_obj) register_event(request, EventType.EVENT_REMOVE, geoapp_obj) return HttpResponseRedirect(reverse("apps_browse")) else: return HttpResponse("Not allowed", status=403)
def finalize_incomplete_session_uploads(self, *args, **kwargs): """The task periodically checks for pending and stale Upload sessions. It runs every 600 seconds (see the PeriodTask on geonode.upload._init_), checks first for expired stale Upload sessions and schedule them for cleanup. We have to make sure To NOT Delete those Unprocessed Ones, which are in live sessions. After removing the stale ones, it collects all the unprocessed and runs them in parallel.""" lock_id = f'{self.request.id}' with AcquireLock(lock_id) as lock: if lock.acquire() is True: _upload_ids = [] _upload_tasks = [] # Check first if we need to delete stale sessions expiry_time = now() - timedelta(hours=UPLOAD_SESSION_EXPIRY_HOURS) for _upload in Upload.objects.exclude( state=enumerations.STATE_PROCESSED).exclude( date__gt=expiry_time): _upload.set_processing_state(enumerations.STATE_INVALID) _upload_ids.append(_upload.id) _upload_tasks.append( _upload_session_cleanup.signature(args=(_upload.id, ))) upload_workflow_finalizer = _upload_workflow_finalizer.signature( args=( '_upload_session_cleanup', _upload_ids, ), immutable=True).on_error( _upload_workflow_error.signature(args=( '_upload_session_cleanup', _upload_ids, ), immutable=True)) upload_workflow = chord(_upload_tasks, body=upload_workflow_finalizer) upload_workflow.apply_async() # Let's finish the valid ones _processing_states = (enumerations.STATE_RUNNING, enumerations.STATE_INVALID, enumerations.STATE_PROCESSED) for _upload in Upload.objects.exclude( state__in=_processing_states): session = None try: if not _upload.import_id: raise NotFound session = _upload.get_session.import_session if not session or session.state != enumerations.STATE_COMPLETE: session = gs_uploader.get_session(_upload.import_id) except (NotFound, Exception) as e: logger.exception(e) session = None if _upload.state not in (enumerations.STATE_COMPLETE, enumerations.STATE_PROCESSED): _upload.set_processing_state( enumerations.STATE_INVALID) if _upload.resource: resource_manager.delete(_upload.resource.uuid) if session: _upload_ids.append(_upload.id) _upload_tasks.append( _update_upload_session_state.signature( args=(_upload.id, ))) upload_workflow_finalizer = _upload_workflow_finalizer.signature( args=( '_update_upload_session_state', _upload_ids, ), immutable=True).on_error( _upload_workflow_error.signature(args=( '_update_upload_session_state', _upload_ids, ), immutable=True)) upload_workflow = chord(_upload_tasks, body=upload_workflow_finalizer) upload_workflow.apply_async()
def test_get_arcgis_alternative_structure(self, mock_map_service): LayerESRIExtent = namedtuple('LayerESRIExtent', 'spatialReference xmin ymin ymax xmax') LayerESRIExtentSpatialReference = namedtuple('LayerESRIExtentSpatialReference', 'wkid latestWkid') mock_arcgis_service_contents = { 'copyrightText': '', 'description': '', 'documentInfo': { 'Author': 'Administrator', 'Category': '', 'Comments': '', 'Keywords': '', 'Subject': '', 'Title': 'basemap_ortofoto_AGEA2011' }, 'fullExtent': { 'xmax': 579764.2319999984, 'xmin': 386130.6820000001, 'ymax': 4608909.064, 'ymin': 4418016.7140000025 }, 'initialExtent': { 'xmax': 605420.5635976626, 'xmin': 349091.7176066373, 'ymax': 4608197.140968505, 'ymin': 4418728.637031497 }, 'layers': [ { 'copyrightText': '', 'definitionExpression': '', 'description': '', 'displayField': '', 'extent': LayerESRIExtent( LayerESRIExtentSpatialReference(None, None), 570962.7069999985, 4600232.139, 394932.207, 4426693.639000002), 'fields': [], 'geometryType': '', 'id': 1, 'maxScale': 0.0, 'minScale': 0.0, 'name': 'Regione_Campania.ecw', 'title': 'Regione_Campania.ecw', 'parentLayer': { 'id': -1, 'name': '-1' }, 'subLayers': [], 'type': 'Raster Dataset' } ], 'mapName': 'Layers', 'serviceDescription': '', 'singleFusedMapCache': True, 'spatialReference': None, 'tileInfo': { 'cols': 512, 'compressionQuality': 0, 'dpi': 96, 'format': 'PNG8', 'lods': [ {'level': 0, 'resolution': 185.20870375074085, 'scale': 700000.0}, {'level': 1, 'resolution': 66.1459656252646, 'scale': 250000.0}, {'level': 2, 'resolution': 26.458386250105836, 'scale': 100000.0}, {'level': 3, 'resolution': 19.843789687579378, 'scale': 75000.0}, {'level': 4, 'resolution': 13.229193125052918, 'scale': 50000.0}, {'level': 5, 'resolution': 6.614596562526459, 'scale': 25000.0}, {'level': 6, 'resolution': 2.6458386250105836, 'scale': 10000.0}, {'level': 7, 'resolution': 1.3229193125052918, 'scale': 5000.0}, {'level': 8, 'resolution': 0.5291677250021167, 'scale': 2000.0} ], 'origin': { 'x': 289313.907000001, 'y': 4704355.239 }, 'rows': 512, 'spatialReference': None }, 'units': 'esriMeters' } phony_url = "http://sit.cittametropolitana.na.it/arcgis/rest/services/basemap_ortofoto_AGEA2011/MapServer" mock_parsed_arcgis = mock.MagicMock(ArcMapService).return_value (url, mock_parsed_arcgis) = mock.MagicMock(ArcMapService, return_value=(phony_url, mock_parsed_arcgis)).return_value mock_parsed_arcgis.url = phony_url mock_parsed_arcgis.layers = mock_arcgis_service_contents['layers'] mock_parsed_arcgis._contents = mock_arcgis_service_contents mock_parsed_arcgis._json_struct = mock_arcgis_service_contents mock_map_service.return_value = (phony_url, mock_parsed_arcgis) handler = arcgis.ArcImageServiceHandler(phony_url) self.assertEqual(handler.url, phony_url) dataset_meta = handler._dataset_meta(mock_parsed_arcgis.layers[0]) self.assertIsNotNone(dataset_meta) self.assertEqual(dataset_meta.id, 1) resource_fields = handler._get_indexed_dataset_fields(dataset_meta) self.assertEqual(resource_fields['alternate'], f'{slugify(phony_url)}:{dataset_meta.id}') test_user, created = get_user_model().objects.get_or_create(username="******") if created: test_user.set_password("somepassword") test_user.save() try: result = handler.create_geonode_service(test_user) geonode_service, created = Service.objects.get_or_create( base_url=result.base_url, owner=test_user) for _d in Dataset.objects.filter(remote_service=geonode_service): resource_manager.delete(_d.uuid, instance=_d) HarvestJob.objects.filter(service=geonode_service).delete() handler._harvest_resource(dataset_meta, geonode_service) geonode_dataset = Dataset.objects.filter(remote_service=geonode_service).get() self.assertIsNotNone(geonode_dataset) self.assertNotEqual(geonode_dataset.srid, "EPSG:4326") self.assertEqual(geonode_dataset.sourcetype, base_enumerations.SOURCE_TYPE_REMOTE) harvest_job, created = HarvestJob.objects.get_or_create( service=geonode_service, resource_id=geonode_dataset.alternate ) self.assertIsNotNone(harvest_job) for _d in Dataset.objects.filter(remote_service=geonode_service): resource_manager.delete(_d.uuid, instance=_d) self.assertEqual(HarvestJob.objects.filter(service=geonode_service, resource_id=geonode_dataset.alternate).count(), 0) except (Service.DoesNotExist, HTTPError) as e: # In the case the Service URL becomes inaccessible for some reason logger.error(e)