示例#1
0
文件: tasks.py 项目: umeier/geonode
def geoserver_create_thumbnail(self,
                               instance_id,
                               overwrite=True,
                               check_bbox=True):
    """
    Runs create_gs_thumbnail.
    """
    instance = None
    try:
        instance = ResourceBase.objects.get(id=instance_id).get_real_instance()
    except Exception:
        logger.error(f"Resource id {instance_id} does not exist yet!")
        raise

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            try:
                create_gs_thumbnail(instance,
                                    overwrite=overwrite,
                                    check_bbox=check_bbox)
                logger.debug(
                    f"... Created Thumbnail for Layer {instance.title}")
            except Exception as e:
                geoserver_create_thumbnail.retry(exc=e)
示例#2
0
def geoserver_set_style(
        self,
        instance_id,
        base_file):
    """
    Sets styles from SLD file.
    """
    instance = None
    try:
        instance = Dataset.objects.get(id=instance_id)
    except Dataset.DoesNotExist:
        logger.debug(f"Dataset id {instance_id} does not exist yet!")
        raise

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            try:
                sld = open(base_file, "rb").read()
                set_dataset_style(
                    instance,
                    instance.alternate,
                    sld,
                    base_file=base_file)
            except Exception as e:
                logger.exception(e)
示例#3
0
def geoserver_create_thumbnail(self,
                               instance_id,
                               overwrite=True,
                               check_bbox=True):
    """
    Runs create_gs_thumbnail.
    """
    instance = None
    try:
        instance = ResourceBase.objects.get(id=instance_id).get_real_instance()
    except Exception:
        logger.error(f"Resource id {instance_id} does not exist yet!")
        raise

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            instance.set_processing_state(enumerations.STATE_RUNNING)
            try:
                instance.set_dirty_state()
                create_gs_thumbnail(instance,
                                    overwrite=overwrite,
                                    check_bbox=check_bbox)
                logger.debug(
                    f"... Created Thumbnail for Dataset {instance.title}")
            except Exception as e:
                geoserver_create_thumbnail.retry(exc=e)
            finally:
                instance.set_processing_state(enumerations.STATE_PROCESSED)
示例#4
0
def _update_upload_session_state(self, upload_session_id: int):
    """Task invoked by 'upload_workflow.chord' in order to process all the 'PENDING' Upload tasks."""

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            _upload = Upload.objects.get(id=upload_session_id)
            session = _upload.get_session.import_session
            if not session or session.state != enumerations.STATE_COMPLETE:
                session = gs_uploader.get_session(_upload.import_id)

            if session:
                try:
                    content = next_step_response(None, _upload.get_session).content
                    if isinstance(content, bytes):
                        content = content.decode('UTF-8')
                    response_json = json.loads(content)
                    _success = response_json.get('success', False)
                    _redirect_to = response_json.get('redirect_to', '')
                    if _success:
                        if 'upload/final' not in _redirect_to and 'upload/check' not in _redirect_to:
                            _upload.set_resume_url(_redirect_to)
                            _upload.set_processing_state(enumerations.STATE_WAITING)
                        else:
                            if session.state == enumerations.STATE_COMPLETE and _upload.state == enumerations.STATE_PENDING:
                                if not _upload.resource or not _upload.resource.processed:
                                    final_step_view(None, _upload.get_session)
                                _upload.set_processing_state(enumerations.STATE_RUNNING)
                except (NotFound, Exception) as e:
                    logger.exception(e)
                    if _upload.state not in (enumerations.STATE_COMPLETE, enumerations.STATE_PROCESSED):
                        _upload.set_processing_state(enumerations.STATE_INVALID)
                        if _upload.resource:
                            resource_manager.delete(_upload.resource.uuid)
示例#5
0
def geoserver_cascading_delete(self, *args, **kwargs):
    """
    Runs cascading_delete.
    """
    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            return cascading_delete(*args, **kwargs)
示例#6
0
def geoserver_update_datasets(self, *args, **kwargs):
    """
    Runs update layers.
    """
    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            return gs_slurp(*args, **kwargs)
示例#7
0
def geoserver_create_style(
        self,
        instance_id,
        name,
        sld_file,
        tempdir):
    """
    Sets or create styles from Upload Session.
    """
    instance = None
    try:
        instance = Dataset.objects.get(id=instance_id)
    except Dataset.DoesNotExist:
        logger.debug(f"Dataset id {instance_id} does not exist yet!")
        raise

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True and instance:
            if sld_file and os.path.exists(sld_file) and os.access(sld_file, os.R_OK):
                f = None
                if os.path.isfile(sld_file):
                    try:
                        f = open(sld_file)
                    except Exception:
                        pass
                elif tempdir and os.path.exists(tempdir):
                    if os.path.isfile(os.path.join(tempdir, sld_file)):
                        try:
                            f = open(os.path.join(tempdir, sld_file))
                        except Exception:
                            pass
                if f:
                    sld = f.read()
                    f.close()
                    if not gs_catalog.get_style(name=name, workspace=settings.DEFAULT_WORKSPACE):
                        style = gs_catalog.create_style(
                            name,
                            sld,
                            raw=True,
                            workspace=settings.DEFAULT_WORKSPACE)
                        gs_dataset = gs_catalog.get_layer(name)
                        _default_style = gs_dataset.default_style
                        gs_dataset.default_style = style
                        gs_catalog.save(gs_dataset)
                        set_styles(instance, gs_catalog)
                        try:
                            gs_catalog.delete(_default_style)
                            Link.objects.filter(
                                resource=instance.resourcebase_ptr,
                                name='Legend',
                                url__contains=f'STYLE={_default_style.name}').delete()
                        except Exception as e:
                            logger.exception(e)
                else:
                    get_sld_for(gs_catalog, instance)
            else:
                get_sld_for(gs_catalog, instance)
示例#8
0
 def get_resume_url(self):
     if self.state == Upload.STATE_WAITING and self.import_id:
         return f"{reverse('data_upload')}?id={self.import_id}"
     else:
         session = None
         try:
             if not self.import_id:
                 raise NotFound
             session = self.get_session.import_session
             if not session or session.state != Upload.STATE_COMPLETE:
                 session = gs_uploader.get_session(self.import_id)
         except (NotFound, Exception):
             if self.state not in (Upload.STATE_COMPLETE,
                                   Upload.STATE_PROCESSED):
                 self.state = Upload.STATE_INVALID
                 Upload.objects.filter(id=self.id).update(
                     state=Upload.STATE_INVALID)
         if session:
             lock_id = f'{self.import_id}'
             with AcquireLock(lock_id) as lock:
                 if lock.acquire() is True:
                     try:
                         content = next_step_response(
                             None, self.get_session).content
                         if isinstance(content, bytes):
                             content = content.decode('UTF-8')
                         response_json = json.loads(content)
                         if response_json[
                                 'success'] and 'redirect_to' in response_json:
                             if 'upload/final' not in response_json[
                                     'redirect_to'] and 'upload/check' not in response_json[
                                         'redirect_to']:
                                 self.state = Upload.STATE_WAITING
                                 Upload.objects.filter(id=self.id).update(
                                     state=Upload.STATE_WAITING)
                                 return f"{reverse('data_upload')}?id={self.import_id}"
                             else:
                                 next = get_next_step(self.get_session)
                                 if next == 'final' and session.state == Upload.STATE_COMPLETE and self.state == Upload.STATE_PENDING:
                                     if not self.layer or not self.layer.processed:
                                         from .views import final_step_view
                                         final_step_view(
                                             None, self.get_session)
                                     self.state = Upload.STATE_RUNNING
                                     Upload.objects.filter(
                                         id=self.id).update(
                                             state=Upload.STATE_RUNNING)
                     except (NotFound, Exception) as e:
                         logger.exception(e)
                         if self.state not in (Upload.STATE_COMPLETE,
                                               Upload.STATE_PROCESSED):
                             self.state = Upload.STATE_INVALID
                             Upload.objects.filter(id=self.id).update(
                                 state=Upload.STATE_INVALID)
     return None
示例#9
0
def geoserver_post_save_datasets(self, instance_id, *args, **kwargs):
    """
    Runs update layers.
    """
    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            sync_instance_with_geoserver(instance_id, *args, **kwargs)

            # Updating HAYSTACK Indexes if needed
            if settings.HAYSTACK_SEARCH:
                call_command('update_index')
示例#10
0
def _upload_session_cleanup(self, upload_session_id: int):
    """Task invoked by 'upload_workflow.chord' in order to remove and cleanup all the 'INVALID' stale Upload tasks."""

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            try:
                _upload = Upload.objects.get(id=upload_session_id)
                if _upload.resource:
                    resource_manager.delete(_upload.resource.uuid)
                _upload.delete()
                logger.debug(f"Upload {upload_session_id} deleted with state {_upload.state}.")
            except Exception as e:
                logger.error(f"Upload {upload_session_id} errored with exception {e}.")
示例#11
0
def probe_services(self):
    # The cache key consists of the task name and the MD5 digest
    # of the name.
    name = b'probe_services'
    hexdigest = md5(name).hexdigest()
    lock_id = f'{name.decode()}-lock-{hexdigest}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            for service in models.Service.objects.all():
                try:
                    service.probe = service.probe_service()
                    service.save()
                except Exception as e:
                    logger.error(e)
示例#12
0
def geoserver_delete_map(self, object_id):
    """
    Deletes a map and the associated map layers.
    """
    from geonode.maps.models import Map
    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            try:
                map_obj = Map.objects.get(id=object_id)
            except Map.DoesNotExist:
                return

            map_obj.dataset_set.all().delete()
            map_obj.delete()
示例#13
0
文件: tasks.py 项目: umeier/geonode
def geoserver_finalize_upload(self, import_id, instance_id, permissions,
                              created, xml_file, sld_file, sld_uploaded,
                              tempdir):
    """
    Finalize Layer and GeoServer configuration:
     - Sets Layer Metadata from XML and updates GeoServer Layer accordingly.
     - Sets Default Permissions
    """
    instance = None
    try:
        instance = Layer.objects.get(id=instance_id)
    except Layer.DoesNotExist:
        logger.debug(f"Layer id {instance_id} does not exist yet!")
        raise

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            from geonode.upload.models import Upload
            upload = Upload.objects.get(import_id=import_id)
            upload.layer = instance
            upload.save()

            # Sanity checks
            if isinstance(xml_file, list):
                if len(xml_file) > 0:
                    xml_file = xml_file[0]
                else:
                    xml_file = None
            elif not isinstance(xml_file, six.string_types):
                xml_file = None

            if xml_file and os.path.exists(xml_file) and os.access(
                    xml_file, os.R_OK):
                instance.metadata_uploaded = True
                identifier, vals, regions, keywords = set_metadata(
                    open(xml_file).read())

                try:
                    gs_resource = gs_catalog.get_resource(
                        name=instance.name,
                        store=instance.store,
                        workspace=instance.workspace)
                except Exception:
                    try:
                        gs_resource = gs_catalog.get_resource(
                            name=instance.alternate,
                            store=instance.store,
                            workspace=instance.workspace)
                    except Exception:
                        try:
                            gs_resource = gs_catalog.get_resource(
                                name=instance.alternate or instance.typename)
                        except Exception:
                            gs_resource = None

                if vals:
                    title = vals.get('title', '')
                    abstract = vals.get('abstract', '')

                    # Updating GeoServer resource
                    gs_resource.title = title
                    gs_resource.abstract = abstract
                    gs_catalog.save(gs_resource)
                else:
                    vals = {}

                vals.update(
                    dict(uuid=instance.uuid,
                         name=instance.name,
                         owner=instance.owner,
                         store=gs_resource.store.name,
                         storeType=gs_resource.store.resource_type,
                         alternate=gs_resource.store.workspace.name + ':' +
                         gs_resource.name,
                         title=gs_resource.title or gs_resource.store.name,
                         abstract=gs_resource.abstract or ''))

                instance.metadata_xml = xml_file
                regions_resolved, regions_unresolved = resolve_regions(regions)
                keywords.extend(regions_unresolved)

                # Assign the regions (needs to be done after saving)
                regions_resolved = list(set(regions_resolved))
                if regions_resolved:
                    if len(regions_resolved) > 0:
                        if not instance.regions:
                            instance.regions = regions_resolved
                        else:
                            instance.regions.clear()
                            instance.regions.add(*regions_resolved)

                # Assign the keywords (needs to be done after saving)
                keywords = list(set(keywords))
                if keywords:
                    if len(keywords) > 0:
                        if not instance.keywords:
                            instance.keywords = keywords
                        else:
                            instance.keywords.add(*keywords)

                # set model properties
                defaults = {}
                for key, value in vals.items():
                    if key == 'spatial_representation_type':
                        value = SpatialRepresentationType(identifier=value)
                    elif key == 'topic_category':
                        value, created = TopicCategory.objects.get_or_create(
                            identifier=value.lower(),
                            defaults={
                                'description': '',
                                'gn_description': value
                            })
                        key = 'category'
                        defaults[key] = value
                    else:
                        defaults[key] = value

                # Save all the modified information in the instance without triggering signals.
                try:
                    if not defaults.get('title', title):
                        defaults['title'] = instance.title or instance.name
                    if not defaults.get('abstract', abstract):
                        defaults['abstract'] = instance.abstract or ''

                    to_update = {}
                    to_update['charset'] = defaults.pop(
                        'charset', instance.charset)
                    to_update['storeType'] = defaults.pop(
                        'storeType', instance.storeType)
                    for _key in ('name', 'workspace', 'store', 'storeType',
                                 'alternate', 'typename'):
                        if _key in defaults:
                            to_update[_key] = defaults.pop(_key)
                        else:
                            to_update[_key] = getattr(instance, _key)
                    to_update.update(defaults)

                    with transaction.atomic():
                        ResourceBase.objects.filter(
                            id=instance.resourcebase_ptr.id).update(**defaults)
                        Layer.objects.filter(id=instance.id).update(
                            **to_update)

                        # Refresh from DB
                        instance.refresh_from_db()
                except IntegrityError:
                    raise

            if sld_uploaded:
                geoserver_set_style(instance.id, sld_file)
            else:
                geoserver_create_style(instance.id, instance.name, sld_file,
                                       tempdir)

            logger.debug(
                'Finalizing (permissions and notifications) Layer {0}'.format(
                    instance))
            instance.handle_moderated_uploads()

            if permissions is not None:
                logger.debug(
                    f'Setting permissions {permissions} for {instance.name}')
                instance.set_permissions(permissions, created=created)
            elif created:
                logger.debug(
                    f'Setting default permissions for {instance.name}')
                instance.set_default_permissions()
            try:
                # Update the upload sessions
                geonode_upload_sessions = UploadSession.objects.filter(
                    resource=instance)
                geonode_upload_sessions.update(processed=False)
                instance.upload_session = geonode_upload_sessions.first()
            except Exception as e:
                logger.exception(e)

            instance.save(notify=not created)

            try:
                logger.debug(
                    f"... Cleaning up the temporary folders {tempdir}")
                if tempdir and os.path.exists(tempdir):
                    shutil.rmtree(tempdir)
            finally:
                upload.complete = True
                upload.save()

            signals.upload_complete.send(sender=geoserver_finalize_upload,
                                         layer=instance)
示例#14
0
def collect_metric(**options):
    # Exit early if MONITORING_ENABLED=False
    if not settings.MONITORING_ENABLED:
        return

    # Avoid possible module circular dependency issues
    from geonode.monitoring.models import Service
    from geonode.monitoring.collector import CollectorAPI

    _start_time = None
    _end_time = None
    # The cache key consists of the task name and the MD5 digest
    # of the name.
    name = b'collect_metric'
    hexdigest = md5(name).hexdigest()
    lock_id = f'{name.decode()}-lock-{hexdigest}'
    _start_time = _end_time = datetime.utcnow().isoformat()
    log.info(f'[{lock_id}] Collecting Metrics - started @ {_start_time}')
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            log.info(
                f'[{lock_id}] Collecting Metrics - [...acquired lock] @ {_start_time}'
            )
            try:
                oservice = options['service']
                if not oservice:
                    services = Service.objects.all()
                else:
                    services = [oservice]
                if options['list_services']:
                    print('available services')
                    for s in services:
                        print('  ', s.name, '(', s.url, ')')
                        print('   type', s.service_type.name)
                        print('   running on', s.host.name, s.host.ip)
                        print('   active:', s.active)
                        if s.last_check:
                            print('    last check:', s.last_check)
                        else:
                            print('    not checked yet')
                        print(' ')
                    return
                c = CollectorAPI()
                for s in services:
                    try:
                        run_check(s,
                                  collector=c,
                                  since=options['since'],
                                  until=options['until'],
                                  force_check=options['force_check'],
                                  format=options['format'])
                    except Exception as e:
                        log.warning(e)
                if not options['do_not_clear']:
                    log.info("Clearing old data")
                    c.clear_old_data()
                if options['emit_notifications']:
                    log.info("Processing notifications for %s",
                             options['until'])
                    # s = Service.objects.first()
                    # interval = s.check_interval
                    # now = datetime.utcnow().replace(tzinfo=pytz.utc)
                    # notifications_check = now - interval
                    c.emit_notifications()  # notifications_check))
                _end_time = datetime.utcnow().isoformat()
                log.info(
                    f'[{lock_id}] Collecting Metrics - finished @ {_end_time}')
            except Exception as e:
                log.info(
                    f'[{lock_id}] Collecting Metrics - errored @ {_end_time}')
                log.exception(e)
    log.info(f'[{lock_id}] Collecting Metrics - exit @ {_end_time}')
    return (_start_time, _end_time)
示例#15
0
def resouce_service_dispatcher(self, execution_id: str):
    """Performs a Resource Service request asynchronously.

    This is the main Resource Service API dispatcher.
    The method looks for avaialable `ExecutionRequests` with status `READY` and triggers the
    `func_name` method of the `resource_manager` with the `input_params`.
    It finally updates the `status` of the request.

    A client is able to query the `status_url` endpoint in order to get the current `status` other than
    the `output_params`.
    """
    with AcquireLock(execution_id) as lock:
        if lock.acquire() is True:
            _exec_request = ExecutionRequest.objects.filter(
                exec_id=execution_id)
            if _exec_request.exists():
                _request = _exec_request.get()
                if _request.status == ExecutionRequest.STATUS_READY:
                    _exec_request.update(
                        status=ExecutionRequest.STATUS_RUNNING)
                    _request.refresh_from_db()
                    if hasattr(resource_manager, _request.func_name):
                        try:
                            _signature = signature(
                                getattr(resource_manager, _request.func_name))
                            _args = []
                            _kwargs = {}
                            for _param_name in _signature.parameters:
                                if _request.input_params and _request.input_params.get(
                                        _param_name, None):
                                    _param = _signature.parameters.get(
                                        _param_name)
                                    _param_value = _get_param_value(
                                        _param,
                                        _request.input_params.get(_param_name))
                                    if _param.kind == Parameter.POSITIONAL_ONLY:
                                        _args.append(_param_value)
                                    else:
                                        _kwargs[_param_name] = _param_value

                            _bindings = _signature.bind(*_args, **_kwargs)
                            _bindings.apply_defaults()

                            _output = getattr(resource_manager,
                                              _request.func_name)(
                                                  *_bindings.args,
                                                  **_bindings.kwargs)
                            _output_params = {}
                            if _output is not None and _signature.return_annotation != Signature.empty:
                                if _signature.return_annotation.__module__ == 'builtins':
                                    _output_params = {"output": _output}
                                elif _signature.return_annotation == ResourceBase or isinstance(
                                        _output, ResourceBase):
                                    _output_params = {
                                        "output": {
                                            "uuid": _output.uuid
                                        }
                                    }
                            else:
                                _output_params = {"output": None}
                            _exec_request.update(
                                status=ExecutionRequest.STATUS_FINISHED,
                                finished=datetime.now(),
                                output_params=_output_params)
                            _request.refresh_from_db()
                        except Exception as e:
                            logger.exception(e)
                            _exec_request.update(
                                status=ExecutionRequest.STATUS_FAILED,
                                finished=datetime.now(),
                                output_params={
                                    "error":
                                    _(f"Error occurred while executin the operation: '{_request.func_name}'"
                                      ),
                                    "exception":
                                    str(e)
                                })
                            _request.refresh_from_db()
                    else:
                        _exec_request.update(
                            status=ExecutionRequest.STATUS_FAILED,
                            finished=datetime.now(),
                            output_params={
                                "error":
                                _(f"Could not find the operation name: '{_request.func_name}'"
                                  )
                            })
                        _request.refresh_from_db()

            logger.error(
                f"WARNING: The requested ExecutionRequest with 'exec_id'={execution_id} was not found!"
            )
示例#16
0
def finalize_incomplete_session_uploads(self, *args, **kwargs):
    """The task periodically checks for pending and stale Upload sessions.
    It runs every 600 seconds (see the PeriodTask on geonode.upload._init_),
    checks first for expired stale Upload sessions and schedule them for cleanup.
    We have to make sure To NOT Delete those Unprocessed Ones,
    which are in live sessions.
    After removing the stale ones, it collects all the unprocessed and runs them
    in parallel."""

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            _upload_ids = []
            _upload_tasks = []

            # Check first if we need to delete stale sessions
            expiry_time = now() - timedelta(hours=UPLOAD_SESSION_EXPIRY_HOURS)
            for _upload in Upload.objects.exclude(
                    state=enumerations.STATE_PROCESSED).exclude(
                        date__gt=expiry_time):
                _upload.set_processing_state(enumerations.STATE_INVALID)
                _upload_ids.append(_upload.id)
                _upload_tasks.append(
                    _upload_session_cleanup.signature(args=(_upload.id, )))

            upload_workflow_finalizer = _upload_workflow_finalizer.signature(
                args=(
                    '_upload_session_cleanup',
                    _upload_ids,
                ),
                immutable=True).on_error(
                    _upload_workflow_error.signature(args=(
                        '_upload_session_cleanup',
                        _upload_ids,
                    ),
                                                     immutable=True))
            upload_workflow = chord(_upload_tasks,
                                    body=upload_workflow_finalizer)
            upload_workflow.apply_async()

            # Let's finish the valid ones
            _processing_states = (enumerations.STATE_RUNNING,
                                  enumerations.STATE_INVALID,
                                  enumerations.STATE_PROCESSED)
            for _upload in Upload.objects.exclude(
                    state__in=_processing_states):
                session = None
                try:
                    if not _upload.import_id:
                        raise NotFound
                    session = _upload.get_session.import_session
                    if not session or session.state != enumerations.STATE_COMPLETE:
                        session = gs_uploader.get_session(_upload.import_id)
                except (NotFound, Exception) as e:
                    logger.exception(e)
                    session = None
                    if _upload.state not in (enumerations.STATE_COMPLETE,
                                             enumerations.STATE_PROCESSED):
                        _upload.set_processing_state(
                            enumerations.STATE_INVALID)
                        if _upload.resource:
                            resource_manager.delete(_upload.resource.uuid)

                if session:
                    _upload_ids.append(_upload.id)
                    _upload_tasks.append(
                        _update_upload_session_state.signature(
                            args=(_upload.id, )))

            upload_workflow_finalizer = _upload_workflow_finalizer.signature(
                args=(
                    '_update_upload_session_state',
                    _upload_ids,
                ),
                immutable=True).on_error(
                    _upload_workflow_error.signature(args=(
                        '_update_upload_session_state',
                        _upload_ids,
                    ),
                                                     immutable=True))
            upload_workflow = chord(_upload_tasks,
                                    body=upload_workflow_finalizer)
            upload_workflow.apply_async()
示例#17
0
文件: tasks.py 项目: umeier/geonode
def geoserver_post_save_layers(self, instance_id, *args, **kwargs):
    """
    Runs update layers.
    """
    instance = None
    try:
        instance = Layer.objects.get(id=instance_id)
    except Layer.DoesNotExist:
        logger.debug(f"Layer id {instance_id} does not exist yet!")
        raise

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            # Don't run this signal if is a Layer from a remote service
            if getattr(instance, "remote_service", None) is not None:
                return

            if instance.storeType == "remoteStore":
                return

            # Don't run this signal handler if it is a tile layer or a remote store (Service)
            #    Currently only gpkg files containing tiles will have this type & will be served via MapProxy.
            if hasattr(instance, 'storeType') and getattr(
                    instance, 'storeType') in ['tileStore', 'remoteStore']:
                return instance

            if isinstance(instance, ResourceBase):
                if hasattr(instance, 'layer'):
                    instance = instance.layer
                else:
                    return

            geonode_upload_sessions = UploadSession.objects.filter(
                resource=instance)
            geonode_upload_sessions.update(processed=False)

            gs_resource = None
            values = None
            _tries = 0
            _max_tries = getattr(ogc_server_settings, "MAX_RETRIES", 2)

            # If the store in None then it's a new instance from an upload,
            # only in this case run the geoserver_upload method
            if not instance.store or getattr(instance, 'overwrite', False):
                base_file, info = instance.get_base_file()

                # There is no need to process it if there is no file.
                if base_file is None:
                    return
                gs_name, workspace, values, gs_resource = geoserver_upload(
                    instance,
                    base_file.file.path,
                    instance.owner,
                    instance.name,
                    overwrite=True,
                    title=instance.title,
                    abstract=instance.abstract,
                    charset=instance.charset)

            values, gs_resource = fetch_gs_resource(instance, values, _tries)
            while not gs_resource and _tries < _max_tries:
                values, gs_resource = fetch_gs_resource(
                    instance, values, _tries)
                _tries += 1

            # Get metadata links
            metadata_links = []
            for link in instance.link_set.metadata():
                metadata_links.append((link.mime, link.name, link.url))

            if gs_resource:
                logger.debug("Found geoserver resource for this layer: %s" %
                             instance.name)
                gs_resource.metadata_links = metadata_links
                instance.gs_resource = gs_resource

                # Update Attribution link
                if instance.poc:
                    # gsconfig now utilizes an attribution dictionary
                    gs_resource.attribution = {
                        'title': str(instance.poc),
                        'width': None,
                        'height': None,
                        'href': None,
                        'url': None,
                        'type': None
                    }
                    profile = get_user_model().objects.get(
                        username=instance.poc.username)
                    site_url = settings.SITEURL.rstrip(
                        '/') if settings.SITEURL.startswith(
                            'http') else settings.SITEURL
                    gs_resource.attribution_link = site_url + profile.get_absolute_url(
                    )
                """Get information from geoserver.

                The attributes retrieved include:

                * Bounding Box
                * SRID
                * Download links (WMS, WCS or WFS and KML)
                * Styles (SLD)
                """
                try:
                    # This is usually done in Layer.pre_save, however if the hooks
                    # are bypassed by custom create/updates we need to ensure the
                    # bbox is calculated properly.
                    bbox = gs_resource.native_bbox
                    instance.set_bbox_polygon(
                        [bbox[0], bbox[2], bbox[1], bbox[3]],
                        gs_resource.projection)
                except Exception as e:
                    logger.exception(e)

                if instance.srid:
                    instance.srid_url = "http://www.spatialreference.org/ref/" + \
                        instance.srid.replace(':', '/').lower() + "/"
                elif instance.bbox_polygon is not None:
                    # Guessing 'EPSG:4326' by default
                    instance.srid = 'EPSG:4326'
                else:
                    raise GeoNodeException(
                        "Invalid Projection. Layer is missing CRS!")

                # Iterate over values from geoserver.
                for key in ['alternate', 'store', 'storeType']:
                    # attr_name = key if 'typename' not in key else 'alternate'
                    # print attr_name
                    setattr(instance, key, values[key])

                try:
                    if settings.RESOURCE_PUBLISHING:
                        if instance.is_published != gs_resource.advertised:
                            gs_resource.advertised = 'true'

                    if not settings.FREETEXT_KEYWORDS_READONLY:
                        # AF: Warning - this won't allow people to have empty keywords on GeoNode
                        if len(instance.keyword_list()
                               ) == 0 and gs_resource.keywords:
                            for keyword in gs_resource.keywords:
                                if keyword not in instance.keyword_list():
                                    instance.keywords.add(keyword)

                    if any(instance.keyword_list()):
                        keywords = instance.keyword_list()
                        gs_resource.keywords = [
                            kw for kw in list(set(keywords))
                        ]

                    # gs_resource should only be called if
                    # ogc_server_settings.BACKEND_WRITE_ENABLED == True
                    if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED",
                               True):
                        gs_catalog.save(gs_resource)
                except Exception as e:
                    msg = (
                        'Error while trying to save resource named %s in GeoServer, '
                        'try to use: "%s"' % (gs_resource, str(e)))
                    e.args = (msg, )
                    logger.exception(e)

                # store the resource to avoid another geoserver call in the post_save
                to_update = {
                    'title': instance.title or instance.name,
                    'abstract': instance.abstract or "",
                    'alternate': instance.alternate,
                    'bbox_polygon': instance.bbox_polygon,
                    'srid': 'EPSG:4326'
                }

                if is_monochromatic_image(instance.thumbnail_url):
                    to_update['thumbnail_url'] = staticfiles.static(
                        settings.MISSING_THUMBNAIL)

                # Save all the modified information in the instance without triggering signals.
                try:
                    with transaction.atomic():
                        ResourceBase.objects.filter(
                            id=instance.resourcebase_ptr.id).update(
                                **to_update)

                        # to_update['name'] = instance.name,
                        to_update[
                            'workspace'] = gs_resource.store.workspace.name
                        to_update['store'] = gs_resource.store.name
                        to_update['storeType'] = instance.storeType
                        to_update['typename'] = instance.alternate

                        Layer.objects.filter(id=instance.id).update(
                            **to_update)

                        # Refresh from DB
                        instance.refresh_from_db()
                except Exception as e:
                    logger.exception(e)

                # Refreshing CSW records
                logger.debug(
                    f"... Updating the Catalogue entries for Layer {instance.title}"
                )
                try:
                    catalogue_post_save(instance=instance,
                                        sender=instance.__class__)
                except Exception as e:
                    logger.exception(e)

                # Refreshing layer links
                logger.debug(
                    f"... Creating Default Resource Links for Layer {instance.title}"
                )
                try:
                    set_resource_default_links(instance, instance, prune=True)
                except Exception as e:
                    logger.exception(e)

                # Save layer attributes
                logger.debug(
                    f"... Refresh GeoServer attributes list for Layer {instance.title}"
                )
                try:
                    set_attributes_from_geoserver(instance)
                except Exception as e:
                    logger.exception(e)

                # Save layer styles
                logger.debug(
                    f"... Refresh Legend links for Layer {instance.title}")
                try:
                    set_styles(instance, gs_catalog)
                except Exception as e:
                    logger.exception(e)

                # Invalidate GeoWebCache for the updated resource
                try:
                    _stylefilterparams_geowebcache_layer(instance.alternate)
                    _invalidate_geowebcache_layer(instance.alternate)
                except Exception:
                    pass

                # Creating Layer Thumbnail by sending a signal
                from geonode.geoserver.signals import geoserver_post_save_complete
                geoserver_post_save_complete.send(sender=instance.__class__,
                                                  instance=instance)
            try:
                geonode_upload_sessions = UploadSession.objects.filter(
                    resource=instance)
                geonode_upload_sessions.update(processed=True)
            except Exception as e:
                logger.exception(e)

            # Updating HAYSTACK Indexes if needed
            if settings.HAYSTACK_SEARCH:
                call_command('update_index')
示例#18
0
def geoserver_finalize_upload(self, import_id, instance_id, permissions,
                              created, xml_file, sld_file, sld_uploaded,
                              tempdir):
    """
    Finalize Layer and GeoServer configuration:
     - Sets Layer Metadata from XML and updates GeoServer Layer accordingly.
     - Sets Default Permissions
    """
    instance = None
    try:
        instance = Layer.objects.get(id=instance_id)
    except Layer.DoesNotExist:
        logger.debug(f"Layer id {instance_id} does not exist yet!")
        raise

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            from geonode.upload.models import Upload
            upload = Upload.objects.get(import_id=import_id)
            upload.layer = instance
            upload.save()

            try:
                # Update the upload sessions
                geonode_upload_sessions = UploadSession.objects.filter(
                    resource=instance)
                geonode_upload_sessions.update(processed=False)
                instance.upload_session = geonode_upload_sessions.first()
            except Exception as e:
                logger.exception(e)

            # Sanity checks
            if isinstance(xml_file, list):
                if len(xml_file) > 0:
                    xml_file = xml_file[0]
                else:
                    xml_file = None
            elif not isinstance(xml_file, str):
                xml_file = None

            if xml_file and os.path.exists(xml_file) and os.access(
                    xml_file, os.R_OK):
                instance.metadata_uploaded = True

            try:
                gs_resource = gs_catalog.get_resource(
                    name=instance.name,
                    store=instance.store,
                    workspace=instance.workspace)
            except Exception:
                try:
                    gs_resource = gs_catalog.get_resource(
                        name=instance.alternate,
                        store=instance.store,
                        workspace=instance.workspace)
                except Exception:
                    try:
                        gs_resource = gs_catalog.get_resource(
                            name=instance.alternate or instance.typename)
                    except Exception:
                        gs_resource = None

            if gs_resource:
                # Updating GeoServer resource
                gs_resource.title = instance.title
                gs_resource.abstract = instance.abstract
                gs_catalog.save(gs_resource)
                if gs_resource.store:
                    instance.storeType = gs_resource.store.resource_type
                    if not instance.alternate:
                        instance.alternate = f"{gs_resource.store.workspace.name}:{gs_resource.name}"

            if sld_uploaded:
                geoserver_set_style(instance.id, sld_file)
            else:
                geoserver_create_style(instance.id, instance.name, sld_file,
                                       tempdir)

            logger.debug(
                f'Finalizing (permissions and notifications) Layer {instance}')
            instance.handle_moderated_uploads()

            if permissions is not None:
                logger.debug(
                    f'Setting permissions {permissions} for {instance.name}')
                instance.set_permissions(permissions, created=created)

            instance.save(notify=not created)

            try:
                logger.debug(
                    f"... Cleaning up the temporary folders {tempdir}")
                if tempdir and os.path.exists(tempdir):
                    shutil.rmtree(tempdir)
            except Exception as e:
                logger.warning(e)
            finally:
                upload.complete = True
                upload.save()

            signals.upload_complete.send(sender=geoserver_finalize_upload,
                                         layer=instance)
示例#19
0
文件: tasks.py 项目: umeier/geonode
def geoserver_create_style(self, instance_id, name, sld_file, tempdir):
    """
    Sets or create styles from Upload Session.
    """
    instance = None
    try:
        instance = Layer.objects.get(id=instance_id)
    except Layer.DoesNotExist:
        logger.debug(f"Layer id {instance_id} does not exist yet!")
        raise

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True and instance:
            publishing = gs_catalog.get_layer(name)
            if sld_file and os.path.exists(sld_file) and os.access(
                    sld_file, os.R_OK):
                f = None
                if os.path.isfile(sld_file):
                    try:
                        f = open(sld_file, 'r')
                    except Exception:
                        pass
                elif tempdir and os.path.exists(tempdir):
                    if os.path.isfile(os.path.join(tempdir, sld_file)):
                        try:
                            f = open(os.path.join(tempdir, sld_file), 'r')
                        except Exception:
                            pass

                if f:
                    sld = f.read()
                    f.close()
                else:
                    sld = get_sld_for(gs_catalog, publishing)
            else:
                sld = get_sld_for(gs_catalog, publishing)

            style = None
            if sld is not None:
                try:
                    gs_catalog.create_style(
                        name,
                        sld,
                        raw=True,
                        workspace=settings.DEFAULT_WORKSPACE)
                    gs_catalog.reset()
                except geoserver.catalog.ConflictingDataError:
                    try:
                        gs_catalog.create_style(
                            name + '_layer',
                            sld,
                            raw=True,
                            workspace=settings.DEFAULT_WORKSPACE)
                        gs_catalog.reset()
                    except geoserver.catalog.ConflictingDataError as e:
                        msg = 'There was already a style named %s in GeoServer, cannot overwrite: "%s"' % (
                            name, str(e))
                        logger.error(msg)
                        e.args = (msg, )

                if style is None:
                    try:
                        style = gs_catalog.get_style(
                            name, workspace=settings.DEFAULT_WORKSPACE
                        ) or gs_catalog.get_style(name)
                    except Exception:
                        logger.warn(
                            'Could not retreive the Layer default Style name')
                        try:
                            style = gs_catalog.get_style(name + '_layer', workspace=settings.DEFAULT_WORKSPACE) or \
                                gs_catalog.get_style(name + '_layer')
                            logger.warn(
                                'No style could be created for the layer, falling back to POINT default one'
                            )
                        except Exception as e:
                            style = gs_catalog.get_style('point')
                            logger.warn(str(e))
                if style:
                    publishing.default_style = style
                    logger.debug('default style set to %s', name)
                    gs_catalog.save(publishing)