コード例 #1
0
 def delete(self, *args, **kwargs):
     importer_locations = []
     upload_files = [_file.file for _file in UploadFile.objects.filter(upload=self)]
     super(Upload, self).delete(*args, **kwargs)
     try:
         session = gs_uploader.get_session(self.import_id)
     except (NotFound, Exception):
         session = None
     if session:
         for task in session.tasks:
             if getattr(task, 'data'):
                 importer_locations.append(
                     getattr(task.data, 'location'))
         try:
             session.delete()
         except Exception:
             logging.warning('error deleting upload session')
     for _file in upload_files:
         try:
             if os.path.isfile(_file.path):
                 os.remove(_file.path)
         except Exception as e:
             logger.warning(e)
     for _location in importer_locations:
         try:
             shutil.rmtree(_location)
         except Exception as e:
             logger.warning(e)
     if self.upload_dir and os.path.exists(self.upload_dir):
         try:
             shutil.rmtree(self.upload_dir)
         except Exception as e:
             logger.warning(e)
コード例 #2
0
    def delete(self, *args, **kwargs):
        importer_locations = []
        super().delete(*args, **kwargs)
        try:
            session = gs_uploader.get_session(self.import_id)
        except (NotFound, Exception):
            session = None
        if session:
            for task in session.tasks:
                if getattr(task, 'data'):
                    importer_locations.append(getattr(task.data, 'location'))
            try:
                session.delete()
            except Exception:
                logging.warning('error deleting upload session')

        for _location in importer_locations:
            try:
                shutil.rmtree(_location)
            except Exception as e:
                logger.warning(e)

        # here we are deleting the local that soon will be removed
        if self.upload_dir and os.path.exists(self.upload_dir):
            try:
                shutil.rmtree(self.upload_dir)
            except Exception as e:
                logger.warning(e)
コード例 #3
0
def _update_upload_session_state(self, upload_session_id: int):
    """Task invoked by 'upload_workflow.chord' in order to process all the 'PENDING' Upload tasks."""

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            _upload = Upload.objects.get(id=upload_session_id)
            session = _upload.get_session.import_session
            if not session or session.state != enumerations.STATE_COMPLETE:
                session = gs_uploader.get_session(_upload.import_id)

            if session:
                try:
                    content = next_step_response(None, _upload.get_session).content
                    if isinstance(content, bytes):
                        content = content.decode('UTF-8')
                    response_json = json.loads(content)
                    _success = response_json.get('success', False)
                    _redirect_to = response_json.get('redirect_to', '')
                    if _success:
                        if 'upload/final' not in _redirect_to and 'upload/check' not in _redirect_to:
                            _upload.set_resume_url(_redirect_to)
                            _upload.set_processing_state(enumerations.STATE_WAITING)
                        else:
                            if session.state == enumerations.STATE_COMPLETE and _upload.state == enumerations.STATE_PENDING:
                                if not _upload.resource or not _upload.resource.processed:
                                    final_step_view(None, _upload.get_session)
                                _upload.set_processing_state(enumerations.STATE_RUNNING)
                except (NotFound, Exception) as e:
                    logger.exception(e)
                    if _upload.state not in (enumerations.STATE_COMPLETE, enumerations.STATE_PROCESSED):
                        _upload.set_processing_state(enumerations.STATE_INVALID)
                        if _upload.resource:
                            resource_manager.delete(_upload.resource.uuid)
コード例 #4
0
ファイル: upload.py プロジェクト: Seraf69/geonode
def run_import(upload_session, async):
    """Run the import, possibly asynchronously.

    Returns the target datastore.
    """
    import_session = upload_session.import_session
    import_session = gs_uploader.get_session(import_session.id)
    task = import_session.tasks[0]
    if import_session.state == 'INCOMPLETE':
        if task.state != 'ERROR':
            raise Exception('unknown item state: %s' % task.state)
    elif import_session.state == 'PENDING' and task.target.store_type == 'coverageStore':
        if task.state == 'READY':
            import_session.commit(async)

    elif import_session.state == 'PENDING' and task.target.store_type == 'coverageStore':
        if task.state == 'READY':
            import_session.commit(async)

    # if a target datastore is configured, ensure the datastore exists
    # in geoserver and set the uploader target appropriately

    if ogc_server_settings.GEOGIG_ENABLED and upload_session.geogig is True \
            and task.target.store_type != 'coverageStore':
        target = create_geoserver_db_featurestore(
            store_type='geogig',
            store_name=upload_session.geogig_store,
            author_name=upload_session.user.username,
            author_email=upload_session.user.email)
        _log(
            'setting target datastore %s %s',
            target.name,
            target.workspace.name)
        task.set_target(target.name, target.workspace.name)

    elif ogc_server_settings.datastore_db and task.target.store_type != 'coverageStore':
        target = create_geoserver_db_featurestore()
        _log(
            'setting target datastore %s %s',
            target.name,
            target.workspace.name)
        task.set_target(target.name, target.workspace.name)
    else:
        target = task.target

    if upload_session.update_mode:
        _log('setting updateMode to %s', upload_session.update_mode)
        task.set_update_mode(upload_session.update_mode)

    _log('running import session')
    # run async if using a database
    import_session.commit(async)

    # @todo check status of import session - it may fail, but due to protocol,
    # this will not be reported during the commit
    return target
コード例 #5
0
def run_import(upload_session, async_upload=_ASYNC_UPLOAD):
    """Run the import, possibly asynchronously.

    Returns the target datastore.
    """
    # run_import can raise an exception which callers should handle
    import_session = upload_session.import_session
    import_session = gs_uploader.get_session(import_session.id)
    task = import_session.tasks[0]
    import_execution_requested = False
    if import_session.state == 'INCOMPLETE':
        if task.state != 'ERROR':
            raise Exception(_('unknown item state: %s' % task.state))
    elif import_session.state == 'PENDING' and task.target.store_type == 'coverageStore':
        if task.state == 'READY':
            import_session.commit(async_upload)
            import_execution_requested = True
        if task.state == 'ERROR':
            progress = task.get_progress()
            raise Exception(
                _('error during import: %s' % progress.get('message')))

    # if a target datastore is configured, ensure the datastore exists
    # in geoserver and set the uploader target appropriately
    if ogc_server_settings.GEOGIG_ENABLED and upload_session.geogig is True \
            and task.target.store_type != 'coverageStore':
        target = create_geoserver_db_featurestore(
            store_type='geogig',
            store_name=upload_session.geogig_store,
            author_name=upload_session.user.username,
            author_email=upload_session.user.email)
        _log('setting target datastore %s %s', target.name,
             target.workspace.name)
        task.set_target(target.name, target.workspace.name)
    elif ogc_server_settings.datastore_db and task.target.store_type != 'coverageStore':
        target = create_geoserver_db_featurestore(
            # store_name=ogc_server_settings.DATASTORE,
            store_name=ogc_server_settings.datastore_db['NAME'])
        _log('setting target datastore %s %s', target.name,
             target.workspace.name)
        task.set_target(target.name, target.workspace.name)
    else:
        target = task.target

    if upload_session.update_mode:
        _log('setting updateMode to %s', upload_session.update_mode)
        task.set_update_mode(upload_session.update_mode)

    _log('running import session')
    # run async if using a database
    if not import_execution_requested:
        import_session.commit(async_upload)

    # @todo check status of import session - it may fail, but due to protocol,
    # this will not be reported during the commit
    return target
コード例 #6
0
ファイル: upload.py プロジェクト: vesnikos/geonode
def run_import(upload_session, async):
    """Run the import, possibly asynchronously.

    Returns the target datastore.
    """
    import_session = upload_session.import_session
    import_session = gs_uploader.get_session(import_session.id)
    task = import_session.tasks[0]
    if import_session.state == 'INCOMPLETE':
        if task.state != 'ERROR':
            raise Exception('unknown item state: %s' % task.state)
    elif import_session.state == 'PENDING' and task.target.store_type == 'coverageStore':
        if task.state == 'READY':
            import_session.commit(async)

    elif import_session.state == 'PENDING' and task.target.store_type == 'coverageStore':
        if task.state == 'READY':
            import_session.commit(async)

    # if a target datastore is configured, ensure the datastore exists
    # in geoserver and set the uploader target appropriately

    if ogc_server_settings.GEOGIG_ENABLED and upload_session.geogig is True \
            and task.target.store_type != 'coverageStore':

        target = create_geoserver_db_featurestore(
            store_type='geogig',
            store_name=upload_session.geogig_store)
        _log(
            'setting target datastore %s %s',
            target.name,
            target.workspace.name)
        task.set_target(target.name, target.workspace.name)

    elif ogc_server_settings.datastore_db and task.target.store_type != 'coverageStore':
        target = create_geoserver_db_featurestore()
        _log(
            'setting target datastore %s %s',
            target.name,
            target.workspace.name)
        task.set_target(target.name, target.workspace.name)
    else:
        target = task.target

    if upload_session.update_mode:
        _log('setting updateMode to %s', upload_session.update_mode)
        task.set_update_mode(upload_session.update_mode)

    _log('running import session')
    # run async if using a database
    import_session.commit(async)

    # @todo check status of import session - it may fail, but due to protocol,
    # this will not be reported during the commit
    return target
コード例 #7
0
 def get_resume_url(self):
     if self.state == Upload.STATE_WAITING and self.import_id:
         return f"{reverse('data_upload')}?id={self.import_id}"
     else:
         session = None
         try:
             if not self.import_id:
                 raise NotFound
             session = self.get_session.import_session
             if not session or session.state != Upload.STATE_COMPLETE:
                 session = gs_uploader.get_session(self.import_id)
         except (NotFound, Exception):
             if self.state not in (Upload.STATE_COMPLETE,
                                   Upload.STATE_PROCESSED):
                 self.state = Upload.STATE_INVALID
                 Upload.objects.filter(id=self.id).update(
                     state=Upload.STATE_INVALID)
         if session:
             lock_id = f'{self.import_id}'
             with AcquireLock(lock_id) as lock:
                 if lock.acquire() is True:
                     try:
                         content = next_step_response(
                             None, self.get_session).content
                         if isinstance(content, bytes):
                             content = content.decode('UTF-8')
                         response_json = json.loads(content)
                         if response_json[
                                 'success'] and 'redirect_to' in response_json:
                             if 'upload/final' not in response_json[
                                     'redirect_to'] and 'upload/check' not in response_json[
                                         'redirect_to']:
                                 self.state = Upload.STATE_WAITING
                                 Upload.objects.filter(id=self.id).update(
                                     state=Upload.STATE_WAITING)
                                 return f"{reverse('data_upload')}?id={self.import_id}"
                             else:
                                 next = get_next_step(self.get_session)
                                 if next == 'final' and session.state == Upload.STATE_COMPLETE and self.state == Upload.STATE_PENDING:
                                     if not self.layer or not self.layer.processed:
                                         from .views import final_step_view
                                         final_step_view(
                                             None, self.get_session)
                                     self.state = Upload.STATE_RUNNING
                                     Upload.objects.filter(
                                         id=self.id).update(
                                             state=Upload.STATE_RUNNING)
                     except (NotFound, Exception) as e:
                         logger.exception(e)
                         if self.state not in (Upload.STATE_COMPLETE,
                                               Upload.STATE_PROCESSED):
                             self.state = Upload.STATE_INVALID
                             Upload.objects.filter(id=self.id).update(
                                 state=Upload.STATE_INVALID)
     return None
コード例 #8
0
def _update_upload_session_state(self, upload_session_id: int):
    """Task invoked by 'upload_workflow.chord' in order to process all the 'PENDING' Upload tasks."""
    _upload = Upload.objects.get(id=upload_session_id)
    session = _upload.get_session.import_session
    if not session or session.state != enumerations.STATE_COMPLETE:
        session = gs_uploader.get_session(_upload.import_id)

    if session:
        try:
            _response = next_step_response(None, _upload.get_session)
            _upload.refresh_from_db()
            _content = _response.content
            if isinstance(_content, bytes):
                _content = _content.decode('UTF-8')
            _response_json = json.loads(_content)
            _success = _response_json.get('success', False)
            _redirect_to = _response_json.get('redirect_to', '')
            if _success:
                if 'upload/final' not in _redirect_to and 'upload/check' not in _redirect_to:
                    _upload.set_resume_url(_redirect_to)
                    _upload.set_processing_state(enumerations.STATE_WAITING)
                else:
                    if session.state == enumerations.STATE_COMPLETE and _upload.state == enumerations.STATE_PENDING:
                        if not _upload.resource or not _upload.resource.processed:
                            _response = final_step_view(None, _upload.get_session)
                            _upload.refresh_from_db()
                            _content = _response.content
                            if isinstance(_content, bytes):
                                _content = _content.decode('UTF-8')
                            _response_json = json.loads(_content)
                            _success = _response_json.get('success', False)
                            _status = _response_json.get('status', 'error')
                            if _status == 'error':
                                # GeoNode Layer creation errored!
                                _upload.set_processing_state(enumerations.STATE_INVALID)
                            elif _status == 'pending':
                                # GeoNode Layer not ready yet...
                                _upload.set_processing_state(enumerations.STATE_PENDING)
                            elif _upload.state != enumerations.STATE_PROCESSED:
                                if _upload.resource and _upload.resource.processed:
                                    # GeoNode Layer successfully processed...
                                    _upload.set_processing_state(enumerations.STATE_PROCESSED)
                                else:
                                    # GeoNode Layer updating...
                                    _upload.set_processing_state(enumerations.STATE_RUNNING)
                logger.debug(f"Upload {upload_session_id} updated with state {_upload.state}.")
        except (NotFound, Exception) as e:
            logger.exception(e)
            if _upload.state not in (enumerations.STATE_COMPLETE, enumerations.STATE_PROCESSED):
                _upload.set_processing_state(enumerations.STATE_INVALID)
                logger.error(f"Upload {upload_session_id} deleted with state {_upload.state}.")
コード例 #9
0
ファイル: models.py プロジェクト: vesnikos/geonode
 def delete(self, cascade=True):
     models.Model.delete(self)
     if cascade:
         try:
             session = gs_uploader.get_session(self.import_id)
         except NotFound:
             session = None
         if session:
             try:
                 session.delete()
             except:
                 logging.exception('error deleting upload session')
         if self.upload_dir and path.exists(self.upload_dir):
             shutil.rmtree(self.upload_dir)
コード例 #10
0
ファイル: models.py プロジェクト: ermis-f/geonode
 def delete(self, cascade=True):
     models.Model.delete(self)
     if cascade:
         try:
             session = gs_uploader.get_session(self.import_id)
         except NotFound:
             session = None
         if session:
             try:
                 session.delete()
             except BaseException:
                 logging.exception('error deleting upload session')
         if self.upload_dir and path.exists(self.upload_dir):
             shutil.rmtree(self.upload_dir)
コード例 #11
0
ファイル: utils.py プロジェクト: karakostis/geonode
def run_import(upload_session, async_upload=_ASYNC_UPLOAD):
    """Run the import, possibly asynchronously.

    Returns the target datastore.
    """
    # run_import can raise an exception which callers should handle
    import_session = upload_session.import_session
    import_session = gs_uploader.get_session(import_session.id)
    task = import_session.tasks[0]
    import_execution_requested = False
    if import_session.state == 'INCOMPLETE':
        if task.state != 'ERROR':
            raise Exception(_('unknown item state: %s' % task.state))
    elif import_session.state == 'PENDING' and task.target.store_type == 'coverageStore':
        if task.state == 'READY':
            import_session.commit(async_upload)
            import_execution_requested = True
        if task.state == 'ERROR':
            progress = task.get_progress()
            raise Exception(_(
                'error during import: %s' %
                progress.get('message')))

    # if a target datastore is configured, ensure the datastore exists
    # in geoserver and set the uploader target appropriately
    if ogc_server_settings.datastore_db and task.target.store_type != 'coverageStore':
        target = create_geoserver_db_featurestore(
            # store_name=ogc_server_settings.DATASTORE,
            store_name=ogc_server_settings.datastore_db['NAME']
        )
        _log(
            'setting target datastore %s %s',
            target.name,
            target.workspace.name)
        task.set_target(target.name, target.workspace.name)
    else:
        target = task.target

    if upload_session.update_mode:
        _log('setting updateMode to %s', upload_session.update_mode)
        task.set_update_mode(upload_session.update_mode)

    _log('running import session')
    # run async if using a database
    if not import_execution_requested:
        import_session.commit(async_upload)

    # @todo check status of import session - it may fail, but due to protocol,
    # this will not be reported during the commit
    return target
コード例 #12
0
 def get_import_url(self):
     session = None
     try:
         if not self.import_id:
             raise NotFound
         session = self.get_session.import_session
         if not session or session.state != enumerations.STATE_COMPLETE:
             session = gs_uploader.get_session(self.import_id)
     except (NotFound, Exception):
         if self.state not in (enumerations.STATE_COMPLETE, enumerations.STATE_PROCESSED):
             self.set_processing_state(enumerations.STATE_INVALID)
     if session and self.state != enumerations.STATE_INVALID:
         return f"{ogc_server_settings.LOCATION}rest/imports/{session.id}"
     else:
         return None
コード例 #13
0
 def get_import_url(self):
     session = None
     try:
         if not self.import_id:
             raise NotFound
         session = self.get_session.import_session
         if not session or session.state != Upload.STATE_COMPLETE:
             session = gs_uploader.get_session(self.import_id)
     except (NotFound, Exception):
         if self.state not in (Upload.STATE_COMPLETE, Upload.STATE_PROCESSED):
             self.state = Upload.STATE_INVALID
             Upload.objects.filter(id=self.id).update(state=Upload.STATE_INVALID)
     if session and self.state != Upload.STATE_INVALID:
         return f"{ogc_server_settings.LOCATION}rest/imports/{session.id}"
     else:
         return None
コード例 #14
0
    def delete(self, *args, **kwargs):
        importer_locations = []
        super().delete(*args, **kwargs)
        try:
            session = gs_uploader.get_session(self.import_id)
        except (NotFound, Exception):
            session = None
        if session:
            for task in session.tasks:
                if getattr(task, 'data'):
                    importer_locations.append(
                        getattr(task.data, 'location'))
            try:
                session.delete()
            except Exception:
                logging.warning('error deleting upload session')

        # we delete directly the folder with the files of the resource
        if self.resource:
            for _file in self.resource.files:
                try:
                    if storage_manager.exists(_file):
                        storage_manager.delete(_file)
                except Exception as e:
                    logger.warning(e)

            # Do we want to delete the files also from the resource?
            ResourceBase.objects.filter(id=self.resource.id).update(files={})

        for _location in importer_locations:
            try:
                shutil.rmtree(_location)
            except Exception as e:
                logger.warning(e)

        # here we are deleting the local that soon will be removed
        if self.upload_dir and os.path.exists(self.upload_dir):
            try:
                shutil.rmtree(self.upload_dir)
            except Exception as e:
                logger.warning(e)
コード例 #15
0
ファイル: utils.py プロジェクト: atenekom/geonode
    layer_values = values or _get_layer_values(layer, upload_session, expand=0)
    att_list = _get_time_dimensions(layer, upload_session)
    _is_eligible = att_list or False
    if upload_session and _is_eligible:
        upload_session.time = True
    return (_is_eligible, layer_values)


def run_import(upload_session, async=_ASYNC_UPLOAD):
    """Run the import, possibly asynchronously.

    Returns the target datastore.
    """
    # run_import can raise an exception which callers should handle
    import_session = upload_session.import_session
    import_session = gs_uploader.get_session(import_session.id)
    task = import_session.tasks[0]
    import_execution_requested = False
    if import_session.state == 'INCOMPLETE':
        if task.state != 'ERROR':
            raise Exception(_('unknown item state: %s' % task.state))
    elif import_session.state == 'PENDING' and task.target.store_type == 'coverageStore':
        if task.state == 'READY':
            import_session.commit(async)
            import_execution_requested = True
        if task.state == 'ERROR':
            progress = task.get_progress()
            raise Exception(_(
                'error during import: %s' %
                progress.get('message')))
コード例 #16
0
    layer_values = values or _get_layer_values(layer, upload_session, expand=0)
    att_list = _get_time_dimensions(layer, upload_session)
    _is_eligible = att_list or False
    if upload_session and _is_eligible:
        upload_session.time = True
    return (_is_eligible, layer_values)


def run_import(upload_session, async=_ASYNC_UPLOAD):
    """Run the import, possibly asynchronously.

    Returns the target datastore.
    """
    # run_import can raise an exception which callers should handle
    import_session = upload_session.import_session
    import_session = gs_uploader.get_session(import_session.id)
    task = import_session.tasks[0]
    import_execution_requested = False
    if import_session.state == 'INCOMPLETE':
        if task.state != 'ERROR':
            raise Exception('unknown item state: %s' % task.state)
    elif import_session.state == 'PENDING' and task.target.store_type == 'coverageStore':
        if task.state == 'READY':
            import_session.commit(async)
            import_execution_requested = True
        if task.state == 'ERROR':
            progress = task.get_progress()
            raise Exception('error during import: %s' %
                            progress.get('message'))

    # if a target datastore is configured, ensure the datastore exists
コード例 #17
0
def finalize_incomplete_session_uploads(self, *args, **kwargs):
    """The task periodically checks for pending and stale Upload sessions.
    It runs every 600 seconds (see the PeriodTask on geonode.upload._init_),
    checks first for expired stale Upload sessions and schedule them for cleanup.
    We have to make sure To NOT Delete those Unprocessed Ones,
    which are in live sessions.
    After removing the stale ones, it collects all the unprocessed and runs them
    in parallel."""

    lock_id = f'{self.request.id}'
    with AcquireLock(lock_id) as lock:
        if lock.acquire() is True:
            _upload_ids = []
            _upload_tasks = []

            # Check first if we need to delete stale sessions
            expiry_time = now() - timedelta(hours=UPLOAD_SESSION_EXPIRY_HOURS)
            for _upload in Upload.objects.exclude(
                    state=enumerations.STATE_PROCESSED).exclude(
                        date__gt=expiry_time):
                _upload.set_processing_state(enumerations.STATE_INVALID)
                _upload_ids.append(_upload.id)
                _upload_tasks.append(
                    _upload_session_cleanup.signature(args=(_upload.id, )))

            upload_workflow_finalizer = _upload_workflow_finalizer.signature(
                args=(
                    '_upload_session_cleanup',
                    _upload_ids,
                ),
                immutable=True).on_error(
                    _upload_workflow_error.signature(args=(
                        '_upload_session_cleanup',
                        _upload_ids,
                    ),
                                                     immutable=True))
            upload_workflow = chord(_upload_tasks,
                                    body=upload_workflow_finalizer)
            upload_workflow.apply_async()

            # Let's finish the valid ones
            _processing_states = (enumerations.STATE_RUNNING,
                                  enumerations.STATE_INVALID,
                                  enumerations.STATE_PROCESSED)
            for _upload in Upload.objects.exclude(
                    state__in=_processing_states):
                session = None
                try:
                    if not _upload.import_id:
                        raise NotFound
                    session = _upload.get_session.import_session
                    if not session or session.state != enumerations.STATE_COMPLETE:
                        session = gs_uploader.get_session(_upload.import_id)
                except (NotFound, Exception) as e:
                    logger.exception(e)
                    session = None
                    if _upload.state not in (enumerations.STATE_COMPLETE,
                                             enumerations.STATE_PROCESSED):
                        _upload.set_processing_state(
                            enumerations.STATE_INVALID)
                        if _upload.resource:
                            resource_manager.delete(_upload.resource.uuid)

                if session:
                    _upload_ids.append(_upload.id)
                    _upload_tasks.append(
                        _update_upload_session_state.signature(
                            args=(_upload.id, )))

            upload_workflow_finalizer = _upload_workflow_finalizer.signature(
                args=(
                    '_update_upload_session_state',
                    _upload_ids,
                ),
                immutable=True).on_error(
                    _upload_workflow_error.signature(args=(
                        '_update_upload_session_state',
                        _upload_ids,
                    ),
                                                     immutable=True))
            upload_workflow = chord(_upload_tasks,
                                    body=upload_workflow_finalizer)
            upload_workflow.apply_async()