Exemple #1
0
    def save(self, document, validate=True, triggerEvents=True):
        """
        Create or update a document in the collection. This triggers two
        events; one prior to validation, and one prior to saving. Either of
        these events may have their default action prevented.

        :param document: The document to save.
        :type document: dict
        :param validate: Whether to call the model's validate() before saving.
        :type validate: bool
        :param triggerEvents: Whether to trigger events for validate and
        pre- and post-save hooks.
        """
        if validate and triggerEvents:
            event = events.trigger('.'.join(('model', self.name, 'validate')),
                                   document)
            if event.defaultPrevented:
                validate = False

        if validate:
            document = self.validate(document)

        if triggerEvents:
            event = events.trigger('model.{}.save'.format(self.name), document)
            if event.defaultPrevented:
                return document

        document['_id'] = self.collection.save(document)

        if triggerEvents:
            events.trigger('model.{}.save.after'.format(self.name), document)

        return document
Exemple #2
0
    def mongoSearch(self, params):
        self.requireParams(('type', 'q'), params)
        allowed = {
            'collection': ['_id', 'name', 'description'],
            'folder': ['_id', 'name', 'description'],
            'item': ['_id', 'name', 'description', 'folderId'],
            'user': ['_id', 'firstName', 'lastName', 'login']
        }
        limit, offset, sort = self.getPagingParameters(params, 'name')
        coll = params['type']

        events.trigger('mongo_search.allowed_collections', info=allowed)

        if coll not in allowed:
            raise RestException('Invalid resource type: {}'.format(coll))

        try:
            query = bson.json_util.loads(params['q'])
        except ValueError:
            raise RestException('The query parameter must be a JSON object.')

        model = ModelImporter().model(coll)
        if hasattr(model, 'filterResultsByPermission'):
            cursor = model.find(
                query, fields=allowed[coll] + ['public', 'access'], limit=0)
            return [r for r in model.filterResultsByPermission(
                cursor, user=self.getCurrentUser(), level=AccessType.READ,
                limit=limit, offset=offset, removeKeys=('public', 'access'))]
        else:
            return [r for r in model.find(query, fields=allowed[coll],
                                          limit=limit, offset=offset)]
Exemple #3
0
def unmountServerFuse(name):
    """
    Unmount a mounted FUSE mount.  This may fail if there are open files on the
    mount.

    :param name: a key within the list of known mounts.
    """
    with _fuseMountsLock:
        entry = _fuseMounts.pop(name, None)
        if entry:
            events.trigger('server_fuse.unmount', {'name': name})
            path = entry['path']
            # Girder uses shutilwhich on Python < 3
            if shutil.which('fusermount'):
                subprocess.call(['fusermount', '-u', os.path.realpath(path)])
            else:
                subprocess.call(['umount', os.path.realpath(path)])
            if entry['thread']:
                entry['thread'].join(10)
            # clean up previous processes so there aren't any zombies
            try:
                os.waitpid(-1, os.WNOHANG)
            except OSError:
                # Don't throw an error; sometimes we get an
                # errno 10: no child processes
                pass
Exemple #4
0
    def createUploadToFile(self, file, user, size):
        """
        Creates a new upload record into a file that already exists. This
        should be used when updating the contents of a file. Deletes any
        previous file content from the assetstore it was in. This will upload
        into the current assetstore rather than assetstore the file was
        previously contained in.

        :param file: The file record to update.
        :param user: The user performing this upload.
        :param size: The size of the new file contents.
        """
        eventParams = {'model': 'file', 'resource': file}
        events.trigger('model.upload.assetstore', eventParams)
        assetstore = eventParams.get('assetstore',
                                     self.model('assetstore').getCurrent())

        adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
        now = datetime.datetime.utcnow()

        upload = {
            'created': now,
            'updated': now,
            'userId': user['_id'],
            'fileId': file['_id'],
            'assetstoreId': assetstore['_id'],
            'size': size,
            'name': file['name'],
            'mimeType': file['mimeType'],
            'received': 0
        }
        upload = adapter.initUpload(upload)
        return self.save(upload)
Exemple #5
0
    def createUploadToFile(self, file, user, size):
        """
        Creates a new upload record into a file that already exists. This
        should be used when updating the contents of a file. Deletes any
        previous file content from the assetstore it was in. This will upload
        into the current assetstore rather than assetstore the file was
        previously contained in.

        :param file: The file record to update.
        :param user: The user performing this upload.
        :param size: The size of the new file contents.
        """
        eventParams = {"model": "file", "resource": file}
        events.trigger("model.upload.assetstore", eventParams)
        assetstore = eventParams.get("assetstore", self.model("assetstore").getCurrent())

        adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
        now = datetime.datetime.utcnow()

        upload = {
            "created": now,
            "updated": now,
            "userId": user["_id"],
            "fileId": file["_id"],
            "assetstoreId": assetstore["_id"],
            "size": size,
            "name": file["name"],
            "mimeType": file["mimeType"],
            "received": 0,
        }
        upload = adapter.initUpload(upload)
        return self.save(upload)
Exemple #6
0
def load(info):
    ext = ResourceExt(info)
    events.bind("model.setting.save.after", "provenanceMain", ext.bindModels)
    events.bind("provenance.initialize", "provenanceMain", ext.bindModels)
    events.trigger("provenance.initialize", info={})
    events.bind("model.file.save", "provenanceMain", ext.fileSaveHandler)
    events.bind("model.file.save.created", "provenanceMain", ext.fileSaveCreatedHandler)
    events.bind("model.file.remove", "provenance", ext.fileRemoveHandler)
Exemple #7
0
    def destroy(self, path):
        """
        Handle shutdown of the FUSE.

        :param path: always '/'.
        """
        events.trigger('server_fuse.destroy')
        return super(ServerFuse, self).destroy(path)
Exemple #8
0
 def stream():
     yield file['linkUrl'][offset:endByte]
     if endByte >= len(file['linkUrl']):
         events.trigger('model.file.download.complete', info={
             'file': file,
             'startByte': offset,
             'endByte': endByte,
             'redirect': False})
Exemple #9
0
def load(info):
    ext = ResourceExt(info)
    events.bind('model.setting.save.after', 'provenanceMain', ext.bindModels)
    events.bind('provenance.initialize', 'provenanceMain', ext.bindModels)
    events.trigger('provenance.initialize', info={})
    events.bind('model.file.save', 'provenanceMain', ext.fileSaveHandler)
    events.bind('model.file.save.created', 'provenanceMain', ext.fileSaveCreatedHandler)
    events.bind('model.file.remove', 'provenance', ext.fileRemoveHandler)
Exemple #10
0
def solr_documents_from_field(field, values, classifications=None):
    """Given a field, and a list of values, return list of relevant solr documents.

    This performs several requests, each of size CHUNK_SIZE to avoid sending
    too much data (HTTP 413).

    Additionally it can take an iterable of classifications which will be
    searched for through Solr.

    :param paths: List of solr paths corresponding to the Solr id attribute
    :param classifications: List of classifications to search by
    :returns: List of solr documents
    """
    CHUNK_SIZE = 20
    documents = []

    event = events.trigger('imagespace.solr_documents_from_field', info={
        'field': field,
        'values': values
    })
    for response in event.responses:
        field = response['field']
        values = response['values']

    for i in xrange(0, len(values), CHUNK_SIZE):
        values_chunk = values[i:i + CHUNK_SIZE]

        if classifications:
            q = ' OR '.join(['%s:[.7 TO *]' % key
                             for key in classifications])
        else:
            q = '*:*'

        qparams = {
            'wt': 'json',
            'q': q,
            'rows': str(CHUNK_SIZE)
        }

        # Give plugins a chance to adjust the Solr query parameters
        event = events.trigger('imagespace.imagesearch.qparams', qparams)
        for response in event.responses:
            qparams = response

        # Filter by field
        qparams['fq'] = qparams['fq'] if 'fq' in qparams else []
        qparams['fq'].append('%(field)s:(%(value)s)' % {
            'field': field,
            'value': ' '.join(values_chunk)
        })

        r = requests.get(imagespaceSetting.get('IMAGE_SPACE_SOLR') + '/select',
                         params=qparams,
                         verify=False)

        documents += r.json()['response']['docs']

    return documents
Exemple #11
0
    def destroy(self, path):
        """
        Handle shutdown of the FUSE.

        :param path: always '/'.
        """
        Setting().unset(SettingKey.GIRDER_MOUNT_INFORMATION)
        events.trigger('server_fuse.destroy')
        return super(ServerFuse, self).destroy(path)
Exemple #12
0
 def downloadGenerator():
     for data in fileDownload():
         yield data
     if endByte is None or endByte >= file['size']:
         events.trigger('model.file.download.complete', info={
             'file': file,
             'startByte': offset,
             'endByte': endByte,
             'redirect': False})
Exemple #13
0
 def scheduleJob(self, job):
     """
     Trigger the event to schedule this job. Other plugins are in charge of
     actually scheduling and/or executing the job, except in the case when
     the handler is 'local'.
     """
     if job.get('async') is True:
         events.daemon.trigger('jobs.schedule', info=job)
     else:
         events.trigger('jobs.schedule', info=job)
Exemple #14
0
    def points(self, params):
        self.requireParams(('q',), params)
        limit, offset, sort = self.getPagingParameters(params, 'name')
        latitude = params.get('latitude', 'meta.latitude')
        longitude = params.get('longitude', 'meta.longitude')

        spec = {
            'type': 'point',
            'latitude': latitude,
            'longitude': longitude,
            'keys': ['meta', 'name', 'description', '_id'],
            'flatten': ['meta']
        }

        try:
            query = bson.json_util.loads(params['q'])
        except ValueError:  # pragma: no cover
            raise RestException('The query parameter must be a JSON object.')

        events.trigger('geojson.points', info={
            'spec': spec,
            'query': query
        })

        # make sure the lat/lon are whitelisted keys to prevent private
        # data leaking
        if spec['latitude'].split('.')[0] not in spec['keys'] or \
                spec['longitude'].split('.')[0] not in spec['keys']:
            raise RestException('Invalid latitude/longitude key.', code=402)

        coll = features.FeatureCollection(points=spec)

        item = ModelImporter().model('item')
        cursor = item.find(
            query,
            limit=0
        )

        cursor = item.filterResultsByPermission(
            cursor,
            user=self.getCurrentUser(),
            level=AccessType.READ,
            limit=limit,
            offset=offset
        )

        try:
            obj = coll(points=cursor)
        except features.GeoJSONException:
            raise RestException(
                'Could not assemble a geoJSON object from spec.',
                code=401
            )

        return obj
Exemple #15
0
    def finalizeUpload(self, upload, assetstore=None):
        """
        This should only be called manually in the case of creating an
        empty file, i.e. one that has no chunks.
        """
        events.trigger('model.upload.finalize', upload)
        if assetstore is None:
            assetstore = self.model('assetstore').load(upload['assetstoreId'])

        if 'fileId' in upload:  # Updating an existing file's contents
            file = self.model('file').load(upload['fileId'], force=True)

            # Delete the previous file contents from the containing assetstore
            assetstore_utilities.getAssetstoreAdapter(
                self.model('assetstore').load(
                    file['assetstoreId'])).deleteFile(file)

            item = self.model('item').load(file['itemId'], force=True)
            self.model('file').propagateSizeChange(
                item, upload['size'] - file['size'])

            # Update file info
            file['creatorId'] = upload['userId']
            file['created'] = datetime.datetime.utcnow()
            file['assetstoreId'] = assetstore['_id']
            file['size'] = upload['size']
        else:  # Creating a new file record
            if upload['parentType'] == 'folder':
                # Create a new item with the name of the file.
                item = self.model('item').createItem(
                    name=upload['name'], creator={'_id': upload['userId']},
                    folder={'_id': upload['parentId']})
            elif upload['parentType'] == 'item':
                item = self.model('item').load(
                    id=upload['parentId'], force=True)
            else:
                item = None

            file = self.model('file').createFile(
                item=item, name=upload['name'], size=upload['size'],
                creator={'_id': upload['userId']}, assetstore=assetstore,
                mimeType=upload['mimeType'], saveFile=False)

        adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
        file = adapter.finalizeUpload(upload, file)
        self.model('file').save(file)
        self.remove(upload)

        # Add an async event for handlers that wish to process this file.
        events.daemon.trigger('data.process', {
            'file': file,
            'assetstore': assetstore
        })

        return file
Exemple #16
0
    def copyFolderComponents(self, srcFolder, newFolder, creator, progress,
                             firstFolder=None):
        """
        Copy the items, subfolders, and extended data of a folder that was just
        copied.

        :param srcFolder: the original folder.
        :type srcFolder: dict
        :param newFolder: the new folder.
        :type newFolder: dict
        :param creator: user representing the creator of the new folder.
        :type creator: dict
        :param progress: a progress context to record process on.
        :type progress: girder.utility.progress.ProgressContext or None.
        :param firstFolder: if not None, the first folder copied in a tree of
                            folders.
        :returns: the new folder document.
        """
        from .item import Item

        # copy metadata and other extension values
        updated = False
        if srcFolder['meta']:
            newFolder['meta'] = copy.deepcopy(srcFolder['meta'])
            updated = True

        filteredFolder = self.filter(newFolder, creator)
        for key in srcFolder:
            if key not in filteredFolder and key not in newFolder:
                newFolder[key] = copy.deepcopy(srcFolder[key])
                updated = True
        if updated:
            newFolder = self.save(newFolder, triggerEvents=False)
        # Give listeners a chance to change things
        events.trigger('model.folder.copy.prepare', (srcFolder, newFolder))
        # copy items
        itemModel = Item()
        for item in self.childItems(folder=srcFolder):
            setResponseTimeLimit()
            itemModel.copyItem(item, creator, folder=newFolder)
            if progress:
                progress.update(increment=1, message='Copied item ' + item['name'])
        # copy subfolders
        for sub in self.childFolders(parentType='folder', parent=srcFolder, user=creator):
            if firstFolder and firstFolder['_id'] == sub['_id']:
                continue
            self.copyFolder(sub, parent=newFolder, parentType='folder',
                            creator=creator, progress=progress)
        events.trigger('model.folder.copy.after', newFolder)
        if progress:
            progress.update(increment=1, message='Copied folder ' + newFolder['name'])

        # Reload to get updated size value
        return self.load(newFolder['_id'], force=True)
 def _importDataAsItem(self, name, user, folder, path, files, reuseExisting=True, params=None):
     params = params or {}
     item = Item().createItem(
         name=name, creator=user, folder=folder, reuseExisting=reuseExisting)
     events.trigger('filesystem_assetstore_imported',
                    {'id': item['_id'], 'type': 'item',
                     'importPath': path})
     for fname in files:
         fpath = os.path.join(path, fname)
         if self.shouldImportFile(fpath, params):
             self.importFile(item, fpath, user, name=fname)
Exemple #18
0
    def createUpload(self, user, name, parentType, parent, size, mimeType=None):
        """
        Creates a new upload record, and creates its temporary file
        that the chunks will be written into. Chunks should then be sent
        in order using the _id of the upload document generated by this method.

        :param user: The user performing the upload.
        :type user: dict
        :param name: The name of the file being uploaded.
        :type name: str
        :param parentType: The type of the parent being uploaded into.
        :type parentType: str ('folder' or 'item')
        :param parent: The document representing the parent.
        :type parentId: dict
        :param size: Total size in bytes of the whole file.
        :type size: int
        :param mimeType: The mimeType of the file.
        :type mimeType: str
        :returns: The upload document that was created.
        """
        eventParams = {'model': parentType, 'resource': parent}
        events.trigger('model.upload.assetstore', eventParams)
        assetstore = eventParams.get('assetstore',
                                     self.model('assetstore').getCurrent())

        adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
        now = datetime.datetime.utcnow()

        if not mimeType:
            mimeType = 'application/octet-stream'
        upload = {
            'created': now,
            'updated': now,
            'assetstoreId': assetstore['_id'],
            'size': size,
            'name': name,
            'mimeType': mimeType,
            'received': 0
        }

        if parentType and parent:
            upload['parentType'] = parentType.lower()
            upload['parentId'] = ObjectId(parent['_id'])
        else:
            upload['parentType'] = None
            upload['parentId'] = None

        if user:
            upload['userId'] = user['_id']
        else:
            upload['userId'] = None

        upload = adapter.initUpload(upload)
        return self.save(upload)
    def _importFileToFolder(self, name, user, parent, parentType, path):
        if parentType != 'folder':
            raise ValidationException(
                'Files cannot be imported directly underneath a %s.' % parentType)

        item = Item().createItem(name=name, creator=user, folder=parent, reuseExisting=True)
        events.trigger('filesystem_assetstore_imported', {
            'id': item['_id'],
            'type': 'item',
            'importPath': path
        })
        self.importFile(item, path, user, name=name)
Exemple #20
0
    def finalizeUpload(self, upload, assetstore=None):
        """
        This should only be called manually in the case of creating an
        empty file, i.e. one that has no chunks.
        """
        events.trigger("model.upload.finalize", upload)
        if assetstore is None:
            assetstore = self.model("assetstore").load(upload["assetstoreId"])

        if "fileId" in upload:  # Updating an existing file's contents
            file = self.model("file").load(upload["fileId"])

            # Delete the previous file contents from the containing assetstore
            assetstore_utilities.getAssetstoreAdapter(self.model("assetstore").load(file["assetstoreId"])).deleteFile(
                file
            )

            item = self.model("item").load(file["itemId"], force=True)
            self.model("file").propagateSizeChange(item, upload["size"] - file["size"])

            # Update file info
            file["creatorId"] = upload["userId"]
            file["created"] = datetime.datetime.utcnow()
            file["assetstoreId"] = assetstore["_id"]
            file["size"] = upload["size"]
        else:  # Creating a new file record
            if upload["parentType"] == "folder":
                # Create a new item with the name of the file.
                item = self.model("item").createItem(
                    name=upload["name"], creator={"_id": upload["userId"]}, folder={"_id": upload["parentId"]}
                )
            else:
                item = self.model("item").load(id=upload["parentId"], force=True)

            file = self.model("file").createFile(
                item=item,
                name=upload["name"],
                size=upload["size"],
                creator={"_id": upload["userId"]},
                assetstore=assetstore,
                mimeType=upload["mimeType"],
                saveFile=False,
            )

        adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
        file = adapter.finalizeUpload(upload, file)
        self.model("file").save(file)
        self.remove(upload)

        # Add an async event for handlers that wish to process this file.
        events.daemon.trigger("data.process", {"file": file, "assetstore": assetstore})

        return file
    def _imageSearch(self, params):
        limit = params['limit'] if 'limit' in params else '100'
        query = params['query'] if 'query' in params else '*:*'
        offset = params['offset'] if 'offset' in params else '0'
        classifications = json.loads(params['classifications']) if 'classifications' in params else []
        base = setting.get('IMAGE_SPACE_SOLR') + '/select'

        if classifications:
            query += ' AND (%s)' % ' OR '.join(['%s:[.7 TO *]' % key
                                                for key in classifications])

        qparams = {
            'wt': 'json',
            'hl': 'true',
            'hl.fl': '*',
            'q': query,
            'start': offset,
            'rows': limit
        }

        # Give plugins a chance to adjust the Solr query parameters
        event = events.trigger('imagespace.imagesearch.qparams', qparams)
        for response in event.responses:
            qparams = response

        try:
            result = requests.get(base, params=qparams, verify=False).json()
        except ValueError:
            return []

        try:
            for image in result['response']['docs']:
                image['highlight'] = result['highlighting'][image['id']]
        except KeyError:
            return {
                'numFound': 0,
                'docs': []
            }

        response = {
            'numFound': result['response']['numFound'],
            'docs': result['response']['docs']
        }

        # Give plugins a chance to adjust the end response of the imagesearch
        event = events.trigger('imagespace.imagesearch.results', response)
        for eventResponse in event.responses:
            response = eventResponse

        return response
def load(info):

    urls = ['/static/built/plugins/bsve/extra/BSVE.API.js']
    events.trigger('minerva.additional_js_urls', urls)

    info['apiRoot'].bsve = Authentication()

    # Add an endpoint for bsve wms dataset
    info['apiRoot'].bsve_datasets_wms = bsve_wms.BsveWmsDataset()

    # Add test endpoints
    info['apiRoot'].test = TestEndpoint()

    events.bind('minerva.get_layer_info', 'bsve', get_layer_info)
    def importData(self, parent, parentType, params, progress, user, leafFoldersAsItems):
        importPath = params['importPath']

        if not os.path.exists(importPath):
            raise ValidationException('Not found: %s.' % importPath)
        if not os.path.isdir(importPath):
            name = os.path.basename(importPath)
            progress.update(message=name)
            self._importFileToFolder(name, user, parent, parentType, importPath)
            return

        listDir = os.listdir(importPath)

        if parentType != 'folder' and any(
                os.path.isfile(os.path.join(importPath, val)) for val in listDir):
            raise ValidationException(
                'Files cannot be imported directly underneath a %s.' % parentType)

        if leafFoldersAsItems and self._hasOnlyFiles(importPath, listDir):
            self._importDataAsItem(
                os.path.basename(importPath.rstrip(os.sep)), user, parent, importPath,
                listDir, params=params)
            return

        for name in listDir:
            progress.update(message=name)
            path = os.path.join(importPath, name)

            if os.path.isdir(path):
                localListDir = os.listdir(path)
                if leafFoldersAsItems and self._hasOnlyFiles(path, localListDir):
                    self._importDataAsItem(name, user, parent, path, localListDir, params=params)
                else:
                    folder = Folder().createFolder(
                        parent=parent, name=name, parentType=parentType,
                        creator=user, reuseExisting=True)
                    events.trigger(
                        'filesystem_assetstore_imported', {
                            'id': folder['_id'],
                            'type': 'folder',
                            'importPath': path
                        })
                    nextPath = os.path.join(importPath, name)
                    self.importData(
                        folder, 'folder', params=dict(params, importPath=nextPath),
                        progress=progress, user=user, leafFoldersAsItems=leafFoldersAsItems)
            else:
                if self.shouldImportFile(path, params):
                    self._importFileToFolder(name, user, parent, parentType, path)
Exemple #24
0
def getCurrentUser(returnToken=False):
    """
    Returns the currently authenticated user based on the token header or
    parameter.

    :param returnToken: Whether we should return a tuple that also contains the
                        token.
    :type returnToken: bool
    :returns: the user document from the database, or None if the user is not
              logged in or the token is invalid or expired.  If
              returnToken=True, returns a tuple of (user, token).
    """
    event = events.trigger('auth.user.get')
    if event.defaultPrevented and len(event.responses) > 0:
        return event.responses[0]

    token = getCurrentToken()

    def retVal(user, token):
        if returnToken:
            return (user, token)
        else:
            return user

    if (token is None or token['expires'] < datetime.datetime.utcnow() or
            'userId' not in token):
        return retVal(None, token)
    else:
        try:
            ensureTokenScopes(token, TokenScope.USER_AUTH)
        except Exception:
            return retVal(None, token)
        user = ModelImporter.model('user').load(token['userId'], force=True)
        return retVal(user, token)
Exemple #25
0
    def isValid(status):
        event = events.trigger('notebook.status.validate', info=status)

        if event.defaultPrevented and len(event.responses):
            return event.responses[-1]

        return status in (NotebookStatus.RUNNING, NotebookStatus.ERROR)
Exemple #26
0
    def authenticate(self, user, password):
        """
        Authenticate a user.

        :param user: The user document.
        :type user: dict
        :param password: The attempted password.
        :type password: str
        :returns: Whether authentication succeeded (bool).
        """
        if not self.hasPassword(user):
            e = events.trigger('no_password_login_attempt', {
                'user': user,
                'password': password
            })

            if len(e.responses):
                return e.responses[-1]

            raise ValidationException(
                'This user does not have a password. You must log in with an '
                'external service, or reset your password.')

        hash = self._digest(salt=user['salt'], alg=user['hashAlg'],
                            password=password)

        if user['hashAlg'] == 'bcrypt':
            if isinstance(user['salt'], six.text_type):
                user['salt'] = user['salt'].encode('utf8')
            return hash == user['salt']
        else:
            return self.load(hash, False) is not None
Exemple #27
0
    def updateAssetstore(self, assetstore, params):
        self.requireParams(('name', 'current'), params)

        assetstore['name'] = params['name'].strip()
        assetstore['current'] = params['current'].lower() == 'true'

        if assetstore['type'] == AssetstoreType.FILESYSTEM:
            self.requireParams('root', params)
            assetstore['root'] = params['root']
        elif assetstore['type'] == AssetstoreType.GRIDFS:
            self.requireParams('db', params)
            assetstore['db'] = params['db']
            if 'mongohost' in params:
                assetstore['mongohost'] = params['mongohost']
            if 'replicaset' in params:
                assetstore['replicaset'] = params['replicaset']
        elif assetstore['type'] == AssetstoreType.S3:
            self.requireParams(('bucket', 'accessKeyId', 'secret'), params)
            assetstore['bucket'] = params['bucket']
            assetstore['prefix'] = params.get('prefix', '')
            assetstore['accessKeyId'] = params['accessKeyId']
            assetstore['secret'] = params['secret']
            assetstore['service'] = params.get('service', '')
            assetstore['readOnly'] = self.boolParam(
                'readOnly', params, default=assetstore.get('readOnly'))
        else:
            event = events.trigger('assetstore.update', info={
                'assetstore': assetstore,
                'params': params
            })
            if event.defaultPrevented:
                return
        return self.model('assetstore').save(assetstore)
Exemple #28
0
    def getFeatureInfo(self, params):

        activeLayers = params['activeLayers[]']

        # Return a list for all cases
        if isinstance(activeLayers, (str, unicode)):
            activeLayers = [activeLayers]

        layerSource = []

        for i in activeLayers:
            item = self._getMinervaItem(i)
            url = item['meta']['minerva'].get('base_url')
            layerSource.append((url, item['meta']['minerva']['type_name']))

        layerUrlMap = defaultdict(list)
        for k, v in layerSource:
            layerUrlMap[k].append(v)

        grandResponse = []
        for baseUrl, layers in layerUrlMap.items():
            event = events.trigger('minerva.get_layer_info', {
                'baseUrl': baseUrl,
                'params': params,
                'layers': layers
            })
            response = event.responses
            if not event.defaultPrevented:
                response = self.callFeatureInfo(baseUrl, params, layers)

            grandResponse.append(response)
        return grandResponse
Exemple #29
0
def getAssetstoreAdapter(assetstore, instance=True):
    """
    This is a factory method that will return the appropriate assetstore adapter
    for the specified assetstore. The returned object will conform to
    the interface of the AbstractAssetstoreAdapter.

    :param assetstore: The assetstore document used to instantiate the adapter.
    :type assetstore: dict
    :param instance: Whether to return an instance of the adapter or the class.
        If you are performing validation, set this to False to avoid throwing
        unwanted exceptions during instantiation.
    :type instance: bool
    :returns: An adapter descending from AbstractAssetstoreAdapter
    """
    storeType = assetstore['type']

    cls = _assetstoreTable.get(storeType)
    if cls is None:
        e = events.trigger('assetstore.adapter.get', assetstore)
        if len(e.responses) > 0:
            cls = e.responses[-1]
        else:
            raise Exception('No AssetstoreAdapter for type: %s.' % storeType)

    if instance:
        return cls(assetstore)
    else:
        return cls
Exemple #30
0
    def filter(self, job, user):
        # Allow downstreams to filter job info as they see fit
        event = events.trigger('jobs.filter', info={
            'job': job,
            'user': user
        })

        keys = ['title', 'type', 'created', 'interval', 'when', 'status',
                'progress', 'log', 'meta', '_id', 'public', 'async']

        if user and user['admin'] is True:
            keys.extend(('args', 'kwargs'))

        for resp in event.responses:
            if 'exposeFields' in resp:
                keys.extend(resp['exposeFields'])
            if 'removeFields' in resp:
                keys = [k for k in keys if k not in resp['removeFields']]

        doc = self.filterDocument(job, allow=keys)

        if 'kwargs' in doc and isinstance(doc['kwargs'], six.string_types):
            doc['kwargs'] = json_util.loads(doc['kwargs'])

        return doc
Exemple #31
0
    def authenticate(self, login, password, otpToken=None):
        """
        Validate a user login via username and password. If authentication fails,
        a ``AccessException`` is raised.

        :param login: The user's login or email.
        :type login: str
        :param password: The user's password.
        :type password: str
        :param otpToken: A one-time password for the user. If "True", then the one-time password
                         (if required) is assumed to be concatenated to the password.
        :type otpToken: str or bool or None
        :returns: The corresponding user if the login was successful.
        :rtype: dict
        """
        from .password import Password

        event = events.trigger('model.user.authenticate', {
            'login': login,
            'password': password
        })

        if event.defaultPrevented and len(event.responses):
            return event.responses[-1]

        login = login.lower().strip()
        loginField = 'email' if '@' in login else 'login'

        user = self.findOne({loginField: login})
        if user is None:
            raise AccessException('登录失败.')

        # Handle OTP token concatenation
        if otpToken is True and self.hasOtpEnabled(user):
            # Assume the last (typically 6) characters are the OTP, so split at that point
            otpTokenLength = self._TotpFactory.digits
            otpToken = password[-otpTokenLength:]
            password = password[:-otpTokenLength]

        # Verify password
        if not Password().authenticate(user, password):
            raise AccessException('登录失败.')

        # Verify OTP
        if self.hasOtpEnabled(user):
            if otpToken is None:
                raise AccessException(
                    'User authentication must include a one-time password '
                    '(typically in the "Girder-OTP" header).')
            self.verifyOtp(user, otpToken)
        elif isinstance(otpToken, six.string_types):
            raise AccessException(
                'The user has not enabled one-time passwords.')

        # This has the same behavior as User.canLogin, but returns more
        # detailed error messages
        if user.get('status', 'enabled') == 'disabled':
            raise AccessException('Account is disabled.', extra='disabled')

        if self.emailVerificationRequired(user):
            raise AccessException('Email verification required.',
                                  extra='emailVerification')

        if self.adminApprovalRequired(user):
            raise AccessException('Account approval required.',
                                  extra='accountApproval')

        return user
Exemple #32
0
    def updateJob(self, job, log=None, overwrite=False, status=None,
                  progressTotal=None, progressCurrent=None, notify=True,
                  progressMessage=None, otherFields=None):
        """
        Update an existing job. Any of the updateable fields that are set to None in the kwargs of
        this method will not be modified. If you set progress information on the job for the first
        time and set notify=True, a new notification record for the job progress will be created.
        If notify=True, job status changes will also create a notification with type="job_status",
        and log changes will create a notification with type="job_log".

        :param job: The job document to update.
        :param log: Message to append to the job log. If you wish to overwrite
            instead of append, pass overwrite=True.
        :type log: str
        :param overwrite: Whether to overwrite the log (default is append).
        :type overwrite: bool
        :param status: New status for the job.
        :type status: JobStatus
        :param progressTotal: Max progress value for this job.
        :param otherFields: Any additional fields to set on the job.
        :type otherFields: dict
        """
        event = events.trigger('jobs.job.update', {
            'job': job,
            'params': {
                'log': log,
                'overwrite': overwrite,
                'status': status,
                'progressTotal': progressTotal,
                'progressMessage': progressMessage,
                'otherFields': otherFields
            }
        })

        if event.defaultPrevented:
            return job

        now = datetime.datetime.utcnow()
        user = None
        otherFields = otherFields or {}
        if job['userId']:
            user = User().load(job['userId'], force=True)

        query = {
            '_id': job['_id']
        }

        updates = {
            '$push': {},
            '$set': {}
        }

        statusChanged = False
        if log is not None:
            self._updateLog(job, log, overwrite, now, notify, user, updates)
        if status is not None:
            try:
                status = int(status)
            except ValueError:
                # Allow non int states
                pass
            statusChanged = status != job['status']
            self._updateStatus(job, status, now, query, updates)
        if progressMessage is not None or progressCurrent is not None or progressTotal is not None:
            self._updateProgress(
                job, progressTotal, progressCurrent, progressMessage, notify, user, updates)

        for k, v in otherFields.items():
            job[k] = v
            updates['$set'][k] = v

        if updates['$set'] or updates['$push']:
            if not updates['$push']:
                del updates['$push']
            job['updated'] = now
            updates['$set']['updated'] = now

            updateResult = self.update(query, update=updates, multi=False)
            # If our query didn't match anything then our state transition
            # was not valid. So raise an exception
            if updateResult.matched_count != 1:
                job = self.load(job['_id'], force=True)
                msg = "Invalid state transition to '%s', Current state is '%s'." % (
                    status, job['status'])
                raise ValidationException(msg, field='status')

            events.trigger('jobs.job.update.after', {
                'job': job
            })

        # We don't want todo this until we know the update was successful
        if statusChanged and user is not None and notify:
            self._createUpdateStatusNotification(now, user, job)

        return job
Exemple #33
0
    def updateJob(self, job, log=None, overwrite=False, status=None,
                  progressTotal=None, progressCurrent=None, notify=True,
                  progressMessage=None, otherFields=None):
        """
        Update an existing job. Any of the updateable fields that are set to None in the kwargs of
        this method will not be modified. If you set progress information on the job for the first
        time and set notify=True, a new notification record for the job progress will be created.
        If notify=True, job status changes will also create a notification with type="job_status",
        and log changes will create a notification with type="job_log".

        :param job: The job document to update.
        :param log: Message to append to the job log. If you wish to overwrite
            instead of append, pass overwrite=True.
        :type log: str
        :param overwrite: Whether to overwrite the log (default is append).
        :type overwrite: bool
        :param status: New status for the job.
        :type status: JobStatus
        :param progressTotal: Max progress value for this job.
        :param otherFields: Any additional fields to set on the job.
        :type otherFields: dict
        """
        event = events.trigger('jobs.job.update', {
            'job': job,
            'params': {
                'log': log,
                'overwrite': overwrite,
                'status': status,
                'progressTotal': progressTotal,
                'progressMessage': progressMessage,
                'otherFields': otherFields
            }
        })

        if event.defaultPrevented:
            return job

        now = datetime.datetime.utcnow()
        user = None
        otherFields = otherFields or {}

        if job['userId']:
            user = self.model('user').load(job['userId'], force=True)

        updates = {
            '$push': {},
            '$set': {}
        }

        if log is not None:
            self._updateLog(job, log, overwrite, now, notify, user, updates)
        if status is not None:
            self._updateStatus(job, status, now, notify, user, updates)
        if progressMessage is not None or progressCurrent is not None or progressTotal is not None:
            self._updateProgress(
                job, progressTotal, progressCurrent, progressMessage, notify, user, updates)

        for k, v in six.viewitems(otherFields):
            job[k] = v
            updates['$set'][k] = v

        if updates['$set'] or updates['$push']:
            if not updates['$push']:
                del updates['$push']
            job['updated'] = now
            updates['$set']['updated'] = now

            self.update({'_id': job['_id']}, update=updates, multi=False)

            events.trigger('jobs.job.update.after', {
                'job': job
            })

        return job
def run(job):
    jobModel = Job()
    jobModel.updateJob(job, status=JobStatus.RUNNING)

    tale_dir, manifest_file = job["args"]
    user = User().load(job["userId"], force=True)
    tale = Tale().load(job["kwargs"]["taleId"], user=user)
    token = Token().createToken(user=user,
                                days=0.5,
                                scope=(TokenScope.USER_AUTH,
                                       REST_CREATE_JOB_TOKEN_SCOPE))

    progressTotal = 3
    progressCurrent = 0

    try:
        os.chdir(tale_dir)
        with open(manifest_file, "r") as manifest_fp:
            manifest = json.load(manifest_fp)

        # 1. Register data
        progressCurrent += 1
        jobModel.updateJob(
            job,
            status=JobStatus.RUNNING,
            progressTotal=progressTotal,
            progressCurrent=progressCurrent,
            progressMessage="Registering external data",
        )
        dataIds = [obj["identifier"] for obj in manifest["Datasets"]]
        dataIds += [
            obj["uri"] for obj in manifest["aggregates"]
            if obj["uri"].startswith("http")
        ]
        if dataIds:
            dataMap = pids_to_entities(
                dataIds,
                user=user,
                base_url=DataONELocations.prod_cn,
                lookup=True)  # DataONE shouldn't be here
            register_dataMap(
                dataMap,
                getOrCreateRootFolder(CATALOG_NAME),
                "folder",
                user=user,
                base_url=DataONELocations.prod_cn,
            )

        # 2. Construct the dataSet
        dataSet = []
        for obj in manifest["aggregates"]:
            if "bundledAs" not in obj:
                continue
            uri = obj["uri"]
            fobj = File().findOne(
                {"linkUrl": uri})  # TODO: That's expensive, use something else
            if fobj:
                dataSet.append({
                    "itemId": fobj["itemId"],
                    "_modelType": "item",
                    "mountPath": obj["bundledAs"]["filename"],
                })
            # TODO: handle folders

        # 3. Update Tale's dataSet
        update_citations = {_["itemId"]
                            for _ in tale["dataSet"]
                            } ^ {_["itemId"]
                                 for _ in dataSet}
        tale["dataSet"] = dataSet
        tale = Tale().updateTale(tale)

        if update_citations:
            eventParams = {"tale": tale, "user": user}
            event = events.trigger("tale.update_citation", eventParams)
            if len(event.responses):
                tale = Tale().updateTale(event.responses[-1])

        # 4. Copy data to the workspace using WebDAVFS (if it exists)
        progressCurrent += 1
        jobModel.updateJob(
            job,
            status=JobStatus.RUNNING,
            progressTotal=progressTotal,
            progressCurrent=progressCurrent,
            progressMessage="Copying files to workspace",
        )
        orig_tale_id = pathlib.Path(manifest_file).parts[0]
        for workdir in ("workspace", "data/workspace", None):
            if workdir:
                workdir = os.path.join(orig_tale_id, workdir)
                if os.path.isdir(workdir):
                    break

        if workdir:
            password = "******".format(**token)
            root = "/tales/{_id}".format(**tale)
            url = "http://localhost:{}".format(
                config.getConfig()["server.socket_port"])
            with WebDAVFS(url,
                          login=user["login"],
                          password=password,
                          root=root) as webdav_handle:
                copy_fs(OSFS(workdir), webdav_handle)

        # Tale is ready to be built
        tale = Tale().load(tale["_id"], user=user)  # Refresh state
        tale["status"] = TaleStatus.READY
        tale = Tale().updateTale(tale)

        progressCurrent += 1
        jobModel.updateJob(
            job,
            status=JobStatus.SUCCESS,
            log="Tale created",
            progressTotal=progressTotal,
            progressCurrent=progressCurrent,
            progressMessage="Tale created",
        )
    except Exception:
        tale = Tale().load(tale["_id"], user=user)  # Refresh state
        tale["status"] = TaleStatus.ERROR
        tale = Tale().updateTale(tale)
        t, val, tb = sys.exc_info()
        log = "%s: %s\n%s" % (t.__name__, repr(val), traceback.extract_tb(tb))
        jobModel.updateJob(job, status=JobStatus.ERROR, log=log)
        raise
Exemple #35
0
    def finalizeUpload(self, upload, assetstore=None):
        """
        This should only be called manually in the case of creating an
        empty file, i.e. one that has no chunks.

        :param upload: The upload document.
        :type upload: dict
        :param assetstore: If known, the containing assetstore for the upload.
        :type assetstore: dict
        :returns: The file object that was created.
        """
        from .assetstore import Assetstore
        from .file import File
        from .item import Item
        from girder.utility import assetstore_utilities

        events.trigger('model.upload.finalize', upload)
        if assetstore is None:
            assetstore = Assetstore().load(upload['assetstoreId'])
            logger.info('(#####)girder/girder/model/upload.py:finalizeUpload:assetstore='+str(assetstore))

        if 'fileId' in upload:  # Updating an existing file's contents
            file = File().load(upload['fileId'], force=True)

            # Delete the previous file contents from the containing assetstore
            assetstore_utilities.getAssetstoreAdapter(
                Assetstore().load(file['assetstoreId'])).deleteFile(file)

            item = Item().load(file['itemId'], force=True)
            File().propagateSizeChange(item, upload['size'] - file['size'])

            # Update file info
            file['creatorId'] = upload['userId']
            file['created'] = datetime.datetime.utcnow()
            file['assetstoreId'] = assetstore['_id']
            file['size'] = upload['size']
            # If the file was previously imported, it is no longer.
            if file.get('imported'):
                file['imported'] = False
            logger.info('(#####)girder/girder/model/upload.py:finalizeUpload:Updating+'+str(file))

        else:  # Creating a new file record
            if upload.get('attachParent'):
                item = None
            elif upload['parentType'] == 'folder':
                # Create a new item with the name of the file.
                item = Item().createItem(
                    name=upload['name'], creator={'_id': upload['userId']},
                    folder={'_id': upload['parentId']})

                #####################################################
                ################# Insert annotation start ###########
                #####################################################
                import pymongo
                mongoClient = pymongo.MongoClient('mongodb://localhost:27017/')
                db_girder = mongoClient['girder']
                coll_annotation = db_girder["annotation"]

                now = datetime.datetime.utcnow()
                annotation_id = ObjectId()
                annotation={
                            "_id" : annotation_id,
                            ### "itemId" need to be update
                            "itemId" : item['_id'],
                            #################################################
                            # "updated" : ISODate("2018-12-13T08:59:09.307Z"),
                            "groups" : [],
                            # "created" : ISODate("2018-12-13T08:56:23.083Z"),
                            "_version" : 1,
                            "annotation" : {
                                "name" : '标注1'
                            },
                            "access" : {
                                "users" : [
                                    {
                                        "flags" : [],
                                        "id" : ObjectId("5b9b7d25d4a48a28c5f8ef84"),
                                        "level" : 2
                                    }
                                ],
                                "groups" : []
                            },
                            'created': now,
                            'updated': now,
                            "creatorId" : ObjectId(upload['userId']),
                            "public" : False,
                            "updatedId" : ObjectId(upload['userId'])
                        }
                result = coll_annotation.insert_one(annotation)

                #####################################################
                ################# Insert annotation END ###########
                #####################################################

            elif upload['parentType'] == 'item':
                item = Item().load(id=upload['parentId'], force=True)
            else:
                item = None
            logger.info('(#####)girder/girder/model/upload.py:finalizeUpload:item+'+str(item))

            file = File().createFile(
                item=item, name=upload['name'], size=upload['size'],
                creator={'_id': upload['userId']}, assetstore=assetstore,
                mimeType=upload['mimeType'], saveFile=False)
            if upload.get('attachParent'):
                if upload['parentType'] and upload['parentId']:
                    file['attachedToType'] = upload['parentType']
                    file['attachedToId'] = upload['parentId']
            logger.info('(#####)girder/girder/model/upload.py:finalizeUpload:Creating='+str(file))


        adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
        file = adapter.finalizeUpload(upload, file)

        event_document = {'file': file, 'upload': upload}
        events.trigger('model.file.finalizeUpload.before', event_document)
        file = File().save(file)
        events.trigger('model.file.finalizeUpload.after', event_document)
        if '_id' in upload:
            self.remove(upload)

        logger.info('Upload complete. Upload=%s File=%s User=%s' % (
            upload['_id'], file['_id'], upload['userId']))

        # Add an async event for handlers that wish to process this file.
        eventParams = {
            'file': file,
            'assetstore': assetstore,
            'currentToken': rest.getCurrentToken(),
            'currentUser': rest.getCurrentUser()
        }
        if 'reference' in upload:
            eventParams['reference'] = upload['reference']
        events.daemon.trigger('data.process', eventParams)
        logger.info('(#####)girder/girder/model/upload.py:finalizeUpload:file3='+str(file))



        return file
Exemple #36
0
    def releaseLock(self, user, lock):
        itemId = lock['itemId']
        self.removeLock(lock)

        if (self.unlock(itemId)):
            events.trigger('dm.itemUnlocked', info=itemId)
Exemple #37
0
def createThumbnail(width, height, crop, fileId, attachToType, attachToId):
    """
    Creates the thumbnail. Validation and access control must be done prior
    to the invocation of this method.
    """
    fileModel = File()
    file = fileModel.load(fileId, force=True)
    streamFn = functools.partial(fileModel.download, file, headers=False)

    event = events.trigger('thumbnails.create', info={
        'file': file,
        'width': width,
        'height': height,
        'crop': crop,
        'attachToType': attachToType,
        'attachToId': attachToId,
        'streamFn': streamFn
    })

    if len(event.responses):
        resp = event.responses[-1]
        newFile = resp['file']

        if event.defaultPrevented:
            if resp.get('attach', True):
                newFile = attachThumbnail(file, newFile, attachToType, attachToId, width, height)
            return newFile
        else:
            file = newFile
            streamFn = functools.partial(
                fileModel.download, file, headers=False)

    if 'assetstoreId' not in file:
        # TODO we could thumbnail link files if we really wanted.
        raise Exception('File %s has no assetstore.' % fileId)

    stream = streamFn()
    data = b''.join(stream())

    image = _getImage(file['mimeType'], file['exts'], data)

    if not width:
        width = int(height * image.size[0] / image.size[1])
    elif not height:
        height = int(width * image.size[1] / image.size[0])
    elif crop:
        x1 = y1 = 0
        x2, y2 = image.size
        wr = float(image.size[0]) / width
        hr = float(image.size[1]) / height

        if hr > wr:
            y1 = int(y2 / 2 - height * wr / 2)
            y2 = int(y2 / 2 + height * wr / 2)
        else:
            x1 = int(x2 / 2 - width * hr / 2)
            x2 = int(x2 / 2 + width * hr / 2)
        image = image.crop((x1, y1, x2, y2))

    image.thumbnail((width, height), Image.ANTIALIAS)

    out = six.BytesIO()
    image.convert('RGB').save(out, 'JPEG', quality=85)
    size = out.tell()
    out.seek(0)

    thumbnail = Upload().uploadFromFile(
        out, size=size, name='_thumb.jpg', parentType=attachToType,
        parent={'_id': ObjectId(attachToId)}, user=None, mimeType='image/jpeg',
        attachParent=True)

    return attachThumbnail(file, thumbnail, attachToType, attachToId, width, height)
Exemple #38
0
    def handleRoute(self, method, path, params):
        """
        Match the requested path to its corresponding route, and calls the
        handler for that route with the appropriate kwargs. If no route
        matches the path requested, throws a RestException.

        This method fires two events for each request if a matching route is
        found. The names of these events are derived from the route matched by
        the request. As an example, if the user calls GET /api/v1/item/123,
        the following two events would be fired:

            ``rest.get.item/:id.before``

        would be fired prior to calling the default API function, and

            ``rest.get.item/:id.after``

        would be fired after the route handler returns. The query params are
        passed in the info of the before and after event handlers as
        event.info['params'], and the matched route tokens are passed in
        as dict items of event.info, so in the previous example event.info would
        also contain an 'id' key with the value of 123. For endpoints with empty
        sub-routes, the trailing slash is omitted from the event name, e.g.:

            ``rest.post.group.before``

        .. note:: You will normally not need to call this method directly, as it
           is called by the internals of this class during the routing process.

        :param method: The HTTP method of the current request.
        :type method: str
        :param path: The path params of the request.
        :type path: tuple[str]
        """
        method = method.lower()

        route, handler, kwargs = self._matchRoute(method, path)

        cherrypy.request.requiredScopes = getattr(
            handler, 'requiredScopes', None) or TokenScope.USER_AUTH

        if hasattr(handler, 'cookieAuth'):
            if isinstance(handler.cookieAuth, tuple):
                cookieAuth, forceCookie = handler.cookieAuth
            else:
                # previously, cookieAuth was not set by a decorator, so the
                # legacy way must be supported too
                cookieAuth = handler.cookieAuth
                forceCookie = False
            if cookieAuth:
                if forceCookie or method in ('head', 'get'):
                    # getCurrentToken will cache its output, so calling it
                    # once with allowCookie will make the parameter
                    # effectively permanent (for the request)
                    getCurrentToken(allowCookie=True)

        kwargs['params'] = params
        # Add before call for the API method. Listeners can return
        # their own responses by calling preventDefault() and
        # adding a response on the event.

        if hasattr(self, 'resourceName'):
            resource = self.resourceName
        else:
            resource = handler.__module__.rsplit('.', 1)[-1]

        routeStr = '/'.join((resource, '/'.join(route))).rstrip('/')
        eventPrefix = '.'.join(('rest', method, routeStr))

        event = events.trigger('.'.join((eventPrefix, 'before')),
                               kwargs, pre=self._defaultAccess)
        if event.defaultPrevented and len(event.responses) > 0:
            val = event.responses[0]
        else:
            self._defaultAccess(handler)
            val = handler(**kwargs)

        # Fire the after-call event that has a chance to augment the
        # return value of the API method that was called. You can
        # reassign the return value completely by adding a response to
        # the event and calling preventDefault() on it.
        kwargs['returnVal'] = val
        event = events.trigger('.'.join((eventPrefix, 'after')), kwargs)
        if event.defaultPrevented and len(event.responses) > 0:
            val = event.responses[0]

        return val
Exemple #39
0
    def download(self, file, offset=0, headers=True, endByte=None,
                 contentDisposition=None, extraParameters=None):
        """
        Use the appropriate assetstore adapter for whatever assetstore the
        file is stored in, and call downloadFile on it. If the file is a link
        file rather than a file in an assetstore, we redirect to it.

        :param file: The file to download.
        :param offset: The start byte within the file.
        :type offset: int
        :param headers: Whether to set headers (i.e. is this an HTTP request
            for a single file, or something else).
        :type headers: bool
        :param endByte: Final byte to download. If ``None``, downloads to the
            end of the file.
        :type endByte: int or None
        :param contentDisposition: Content-Disposition response header
            disposition-type value.
        :type contentDisposition: str or None
        :type extraParameters: str or None
        """
        events.trigger('model.file.download.request', info={
            'file': file,
            'startByte': offset,
            'endByte': endByte})

        auditLogger.info('file.download', extra={
            'details': {
                'fileId': file['_id'],
                'startByte': offset,
                'endByte': endByte,
                'extraParameters': extraParameters
            }
        })

        if file.get('assetstoreId'):
            try:
                fileDownload = self.getAssetstoreAdapter(file).downloadFile(
                    file, offset=offset, headers=headers, endByte=endByte,
                    contentDisposition=contentDisposition,
                    extraParameters=extraParameters)

                def downloadGenerator():
                    for data in fileDownload():
                        yield data
                    if endByte is None or endByte >= file['size']:
                        events.trigger('model.file.download.complete', info={
                            'file': file,
                            'startByte': offset,
                            'endByte': endByte,
                            'redirect': False})
                return downloadGenerator
            except cherrypy.HTTPRedirect:
                events.trigger('model.file.download.complete', info={
                    'file': file,
                    'startByte': offset,
                    'endByte': endByte,
                    'redirect': True})
                raise
        elif file.get('linkUrl'):
            if headers:
                events.trigger('model.file.download.complete', info={
                    'file': file,
                    'startByte': offset,
                    'endByte': endByte,
                    'redirect': True})
                raise cherrypy.HTTPRedirect(file['linkUrl'])
            else:
                endByte = endByte or len(file['linkUrl'])

                def stream():
                    yield file['linkUrl'][offset:endByte]
                    if endByte >= len(file['linkUrl']):
                        events.trigger('model.file.download.complete', info={
                            'file': file,
                            'startByte': offset,
                            'endByte': endByte,
                            'redirect': False})
                return stream
        else:
            raise Exception('File has no known download mechanism.')
Exemple #40
0
    def _transformInputs(self, inputs, token):
        """
        Validates and sanitizes the input bindings. If they are Girder inputs, adds
        the necessary token info. If the token does not allow DATA_READ, or if the user
        does not have read access to the resource, raises an AccessException.
        """
        transformed = {}
        for k, v in six.viewitems(inputs):
            if v['mode'] == 'girder':
                ensureTokenScopes(token, TokenScope.DATA_READ)
                rtype = v.get('resource_type', 'file')
                if rtype not in {'file', 'item', 'folder'}:
                    raise ValidationException(
                        'Invalid input resource_type: %s.' % rtype)

                try:
                    resource = self.model(rtype).load(
                        v['id'],
                        level=AccessType.READ,
                        user=self.getCurrentUser(),
                        exc=True)
                except ValidationException:
                    # if we asked for a file, we may have been given an item,
                    # which case get the first file within it.
                    if rtype != 'file':
                        raise
                    item = Item().load(v['id'],
                                       level=AccessType.READ,
                                       user=self.getCurrentUser(),
                                       exc=True)
                    # Event handlers can add a response which contains
                    # {'file': <file document>}.  Otherwise, the first file is
                    # used.
                    event = events.trigger('item_tasks.transforms.file', {
                        'item': item,
                        'input': v,
                        'input_key': k
                    })
                    if (len(event.responses)
                            and isinstance(event.responses[-1], dict)
                            and 'file' in event.responses[-1]):
                        resource = event.responses[-1]['file']
                    else:
                        childFiles = list(Item().childFiles(
                            item, limit=1, sort=[('_id', SortDir.ASCENDING)]))
                        if not len(childFiles):
                            raise ValidationException('Item %s has no files' %
                                                      v['id'])
                        resource = childFiles[0]

                transformed[k] = utils.girderInputSpec(resource,
                                                       resourceType=rtype,
                                                       token=token,
                                                       dataFormat='none')
            elif v['mode'] == 'inline':
                transformed[k] = {'mode': 'inline', 'data': v['data']}
            else:
                raise ValidationException('Invalid input mode: %s.' %
                                          v['mode'])

        return transformed
 def deleteSession(self, user, session):
     self.checkOwnership(user, session)
     self.remove(session)
     events.trigger('dm.sessionDeleted', info=session)
Exemple #42
0
    def copyFolderComponents(self,
                             srcFolder,
                             newFolder,
                             creator,
                             progress,
                             firstFolder=None):
        """
        Copy the items, subfolders, and extended data of a folder that was just
        copied.

        :param srcFolder: the original folder.
        :type srcFolder: dict
        :param newFolder: the new folder.
        :type newFolder: dict
        :param creator: user representing the creator of the new folder.
        :type creator: dict
        :param progress: a progress context to record process on.
        :type progress: girder.utility.progress.ProgressContext or None.
        :param firstFolder: if not None, the first folder copied in a tree of
                            folders.
        :returns: the new folder document.
        """
        from .item import Item

        # copy metadata and other extension values
        filteredFolder = self.filter(newFolder, creator)
        updated = False
        for key in srcFolder:
            if key not in filteredFolder and key not in newFolder:
                newFolder[key] = copy.deepcopy(srcFolder[key])
                updated = True
        if updated:
            newFolder = self.save(newFolder, triggerEvents=False)
        # Give listeners a chance to change things
        events.trigger('model.folder.copy.prepare', (srcFolder, newFolder))
        # copy items
        itemModel = Item()
        for item in self.childItems(folder=srcFolder):
            setResponseTimeLimit()
            itemModel.copyItem(item, creator, folder=newFolder)
            if progress:
                progress.update(increment=1,
                                message='Copied item ' + item['name'])
        # copy subfolders
        for sub in self.childFolders(parentType='folder',
                                     parent=srcFolder,
                                     user=creator):
            if firstFolder and firstFolder['_id'] == sub['_id']:
                continue
            self.copyFolder(sub,
                            parent=newFolder,
                            parentType='folder',
                            creator=creator,
                            progress=progress)
        events.trigger('model.folder.copy.after', newFolder)
        if progress:
            progress.update(increment=1,
                            message='Copied folder ' + newFolder['name'])

        # Reload to get updated size value
        return self.load(newFolder['_id'], force=True)
Exemple #43
0
def createThumbnail(width, height, crop, fileId, attachToType, attachToId):
    """
    Creates the thumbnail. Validation and access control must be done prior
    to the invocation of this method.
    """
    fileModel = ModelImporter.model('file')
    file = fileModel.load(fileId, force=True)
    streamFn = functools.partial(fileModel.download, file, headers=False)

    event = events.trigger('thumbnails.create', info={
        'file': file,
        'width': width,
        'height': height,
        'crop': crop,
        'attachToType': attachToType,
        'attachToId': attachToId,
        'streamFn': streamFn
    })

    if len(event.responses):
        resp = event.responses[-1]
        newFile = resp['file']

        if event.defaultPrevented:
            if resp.get('attach', True):
                newFile = attachThumbnail(
                    file, newFile, attachToType, attachToId, width, height)
            return newFile
        else:
            file = newFile
            streamFn = functools.partial(
                fileModel.download, file, headers=False)

    if 'assetstoreId' not in file:
        # TODO(zachmullen) we could thumbnail link files if we really wanted.
        raise Exception('File %s has no assetstore.' % fileId)

    stream = streamFn()
    data = b''.join(stream())

    image = Image.open(six.BytesIO(data))

    if not width:
        width = int(height * image.size[0] / image.size[1])
    elif not height:
        height = int(width * image.size[1] / image.size[0])
    elif crop:
        x1 = y1 = 0
        x2, y2 = image.size
        wr = float(image.size[0]) / width
        hr = float(image.size[1]) / height

        if hr > wr:
            y1 = int(y2 / 2 - height * wr / 2)
            y2 = int(y2 / 2 + height * wr / 2)
        else:
            x1 = int(x2 / 2 - width * hr / 2)
            x2 = int(x2 / 2 + width * hr / 2)
        image = image.crop((x1, y1, x2, y2))

    image.thumbnail((width, height), Image.ANTIALIAS)

    uploadModel = ModelImporter.model('upload')

    out = six.BytesIO()
    image.save(out, 'JPEG', quality=85)
    contents = out.getvalue()

    upload = uploadModel.createUpload(
        user=None, name='_thumb.jpg', parentType=None, parent=None,
        size=len(contents), mimeType='image/jpeg')

    thumbnail = uploadModel.handleChunk(upload, six.BytesIO(contents))

    return attachThumbnail(
        file, thumbnail, attachToType, attachToId, width, height)
Exemple #44
0
    def handleRoute(self, method, path, params):
        """
        Match the requested path to its corresponding route, and calls the
        handler for that route with the appropriate kwargs. If no route
        matches the path requested, throws a RestException.

        This method fires two events for each request if a matching route is
        found. The names of these events are derived from the route matched by
        the request. As an example, if the user calls GET /api/v1/item/123,
        the following two events would be fired:

            ``rest.get.item/:id.before``

        would be fired prior to calling the default API function, and

            ``rest.get.item/:id.after``

        would be fired after the route handler returns. The query params are
        passed in the info of the before and after event handlers as
        event.info['params'], and the matched route tokens are passed in
        as dict items of event.info, so in the previous example event.info would
        also contain an 'id' key with the value of 123. For endpoints with empty
        sub-routes, the trailing slash is omitted from the event name, e.g.:

            ``rest.post.group.before``

        Note: You will normally not need to call this method directly, as it
        is called by the internals of this class during the routing process.

        :param method: The HTTP method of the current request.
        :type method: str
        :param path: The path params of the request.
        :type path: list
        """
        if not self._routes:
            raise Exception('No routes defined for resource')

        method = method.lower()

        for route, handler in self._routes[method][len(path)]:
            kwargs = self._matchRoute(path, route)
            if kwargs is not False:
                if hasattr(handler, 'cookieAuth') and handler.cookieAuth:
                    getCurrentToken(allowCookie=True)

                kwargs['params'] = params
                # Add before call for the API method. Listeners can return
                # their own responses by calling preventDefault() and
                # adding a response on the event.

                if hasattr(self, 'resourceName'):
                    resource = self.resourceName
                else:
                    resource = handler.__module__.rsplit('.', 1)[-1]

                routeStr = '/'.join((resource, '/'.join(route))).rstrip('/')
                eventPrefix = '.'.join(('rest', method, routeStr))

                event = events.trigger('.'.join((eventPrefix, 'before')),
                                       kwargs,
                                       pre=self._defaultAccess)
                if event.defaultPrevented and len(event.responses) > 0:
                    val = event.responses[0]
                else:
                    self._defaultAccess(handler)
                    val = handler(**kwargs)

                # Fire the after-call event that has a chance to augment the
                # return value of the API method that was called. You can
                # reassign the return value completely by adding a response to
                # the event and calling preventDefault() on it.
                kwargs['returnVal'] = val
                event = events.trigger('.'.join((eventPrefix, 'after')),
                                       kwargs)
                if event.defaultPrevented and len(event.responses) > 0:
                    val = event.responses[0]

                return val

        raise RestException('No matching route for "{} {}"'.format(
            method.upper(), '/'.join(path)))
Exemple #45
0
    def finalizeUpload(self, upload, assetstore=None):
        """
        This should only be called manually in the case of creating an
        empty file, i.e. one that has no chunks.

        :param upload: The upload document.
        :type upload: dict
        :param assetstore: If known, the containing assetstore for the upload.
        :type assetstore: dict
        :returns: The file object that was created.
        """
        events.trigger('model.upload.finalize', upload)
        if assetstore is None:
            assetstore = self.model('assetstore').load(upload['assetstoreId'])

        if 'fileId' in upload:  # Updating an existing file's contents
            file = self.model('file').load(upload['fileId'], force=True)

            # Delete the previous file contents from the containing assetstore
            assetstore_utilities.getAssetstoreAdapter(
                self.model('assetstore').load(
                    file['assetstoreId'])).deleteFile(file)

            item = self.model('item').load(file['itemId'], force=True)
            self.model('file').propagateSizeChange(
                item, upload['size'] - file['size'])

            # Update file info
            file['creatorId'] = upload['userId']
            file['created'] = datetime.datetime.utcnow()
            file['assetstoreId'] = assetstore['_id']
            file['size'] = upload['size']
        else:  # Creating a new file record
            if upload['parentType'] == 'folder':
                # Create a new item with the name of the file.
                item = self.model('item').createItem(
                    name=upload['name'], creator={'_id': upload['userId']},
                    folder={'_id': upload['parentId']})
            elif upload['parentType'] == 'item':
                item = self.model('item').load(
                    id=upload['parentId'], force=True)
            else:
                item = None

            file = self.model('file').createFile(
                item=item, name=upload['name'], size=upload['size'],
                creator={'_id': upload['userId']}, assetstore=assetstore,
                mimeType=upload['mimeType'], saveFile=False)

        adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
        file = adapter.finalizeUpload(upload, file)
        self.model('file').save(file)
        self.remove(upload)

        # Add an async event for handlers that wish to process this file.
        events.daemon.trigger('data.process', {
            'file': file,
            'assetstore': assetstore
        })

        return file
Exemple #46
0
def solr_documents_from_field(field, values, classifications=None):
    """Given a field, and a list of values, return list of relevant solr documents.

    Additionally it can take an iterable of classifications which will be
    searched for through Solr.

    :param paths: List of solr paths corresponding to the Solr id attribute
    :param classifications: List of classifications to search by
    :returns: List of solr documents
    """
    def paged_request(params):
        """
        Takes a params dictionary and manages paging.

        Uses POST so very large request bodies can be sent to Solr.

        Returns a list of all documents.
        """
        documents = []

        # Adjust paging params
        params['start'] = 0
        params['rows'] = 1000

        numFound = None
        numRetrieved = None
        while numRetrieved is None or numRetrieved < numFound:
            r = requests.post(imagespaceSetting.get('IMAGE_SPACE_SOLR') +
                              '/select',
                              data=params,
                              verify=False).json()

            numFound = r['response']['numFound']
            numRetrieved = len(r['response']['docs']) if numRetrieved is None \
                           else numRetrieved + len(r['response']['docs'])
            documents += r['response']['docs']

            # Setup offset for next request
            params['start'] = numRetrieved

        return documents

    event = events.trigger('imagespace.solr_documents_from_field',
                           info={
                               'field': field,
                               'values': values
                           })
    for response in event.responses:
        field = response['field']
        values = response['values']

    if classifications:
        q = ' OR '.join(['%s:[.7 TO *]' % key for key in classifications])
    else:
        q = '*:*'

    qparams = {'wt': 'json', 'q': q}

    # Give plugins a chance to adjust the Solr query parameters
    event = events.trigger('imagespace.imagesearch.qparams', qparams)
    for response in event.responses:
        qparams = response

    # Filter by field
    qparams['fq'] = qparams['fq'] if 'fq' in qparams else []
    qparams['fq'].append('%(field)s:(%(value)s)' % {
        'field': field,
        'value': ' '.join(values)
    })

    return paged_request(qparams)
Exemple #47
0
    def save(self, annotation, *args, **kwargs):
        """
        When saving an annotation, override the collection insert_one and
        replace_one methods so that we don't save the elements with the main
        annotation.  Still use the super class's save method, so that all of
        the triggers are fired as expected and cancelling and modifications can
        be done as needed.

        Because Mongo doesn't support transactions, a version number is stored
        with the annotation and with the associated elements.  This is used to
        add the new elements first, then update the annotation, and delete the
        old elements.  The allows version integrity if another thread queries
        the annotation at the same time.

        :param annotation: the annotation document to save.
        :returns: the saved document.  If it is a new document, the _id has
                  been added.
        """
        starttime = time.time()
        with self._writeLock:
            replace_one = self.collection.replace_one
            insert_one = self.collection.insert_one
        version = Annotationelement().getNextVersionValue()
        if '_id' not in annotation:
            oldversion = None
        else:
            if '_annotationId' in annotation:
                annotation['_id'] = annotation['_annotationId']
            # We read the old version from the existing record, because we
            # don't want to trust that the input _version has not been altered
            # or is present.
            oldversion = self.collection.find_one({
                '_id': annotation['_id']
            }).get('_version')
        annotation['_version'] = version
        _elementQuery = annotation.pop('_elementQuery', None)
        annotation.pop('_active', None)
        annotation.pop('_annotationId', None)

        def replaceElements(query, doc, *args, **kwargs):
            Annotationelement().updateElements(doc)
            elements = doc['annotation'].pop('elements', None)
            if self._historyEnabled:
                oldAnnotation = self.collection.find_one(query)
                if oldAnnotation:
                    oldAnnotation['_annotationId'] = oldAnnotation.pop('_id')
                    oldAnnotation['_active'] = False
                    insert_one(oldAnnotation)
            ret = replace_one(query, doc, *args, **kwargs)
            if elements:
                doc['annotation']['elements'] = elements
            if not self._historyEnabled:
                Annotationelement().removeOldElements(doc, oldversion)
            return ret

        def insertElements(doc, *args, **kwargs):
            # When creating an annotation, store the elements first, then store
            # the annotation without elements, then restore the elements.
            doc.setdefault('_id', ObjectId())
            if doc['annotation'].get('elements') is not None:
                Annotationelement().updateElements(doc)
            # If we are inserting, we shouldn't have any old elements, so don't
            # bother removing them.
            elements = doc['annotation'].pop('elements', None)
            ret = insert_one(doc, *args, **kwargs)
            if elements is not None:
                doc['annotation']['elements'] = elements
            return ret

        with self._writeLock:
            self.collection.replace_one = replaceElements
            self.collection.insert_one = insertElements
            try:
                result = super().save(annotation, *args, **kwargs)
            finally:
                self.collection.replace_one = replace_one
                self.collection.insert_one = insert_one
        if _elementQuery:
            result['_elementQuery'] = _elementQuery

        annotation.pop('groups', None)
        self.injectAnnotationGroupSet(annotation)

        logger.debug('Saved annotation in %5.3fs' % (time.time() - starttime))
        events.trigger('large_image.annotations.save_history',
                       {'annotation': annotation},
                       asynchronous=True)
        return result
Exemple #48
0
 def start(self, cluster, params):
     body = self._get_body()
     adapter = get_cluster_adapter(cluster)
     adapter.start(body)
     events.trigger('cumulus.cluster.started', info=cluster)
    def testHandleAnntation(self):
        admin = self.model('user').findOne({'login': '******'})
        item = self.model('item').findOne({'name': 'Item 1'})
        file1 = self.model('file').findOne({'name': 'File 1'})
        file2 = self.model('file').findOne({'name': 'File 2'})
        file3 = self.model('file').findOne({'name': 'File 3'})
        file4 = self.model('file').findOne({'name': 'File 4'})
        assetstore = self.model('assetstore').load(id=file1['assetstoreId'])
        annot = list(
            self.model('annotation',
                       'large_image').find({'itemId': item['_id']}))
        self.assertEqual(len(annot), 0)

        # Process a list of annotations
        events.trigger(
            'data.process', {
                'file':
                file1,
                'assetstore':
                assetstore,
                'reference':
                json.dumps({
                    'identifier': 'sampleAnnotationFile',
                    'itemId': str(file1['_id']),
                    'userId': str(admin['_id']),
                })
            })
        annot = list(
            self.model('annotation',
                       'large_image').find({'itemId': item['_id']}))
        self.assertEqual(len(annot), 2)

        # If the reference doesn't contain userId or itemId, we won't add any
        # annotations
        events.trigger(
            'data.process', {
                'file':
                file1,
                'assetstore':
                assetstore,
                'reference':
                json.dumps({
                    'identifier': 'sampleAnnotationFile',
                    'userId': str(admin['_id']),
                })
            })
        annot = list(
            self.model('annotation',
                       'large_image').find({'itemId': item['_id']}))
        self.assertEqual(len(annot), 2)
        events.trigger(
            'data.process', {
                'file':
                file1,
                'assetstore':
                assetstore,
                'reference':
                json.dumps({
                    'identifier': 'sampleAnnotationFile',
                    'itemId': str(file1['_id']),
                })
            })
        annot = list(
            self.model('annotation',
                       'large_image').find({'itemId': item['_id']}))
        self.assertEqual(len(annot), 2)

        # If the user id isn't valid, we won't add an annotation
        events.trigger(
            'data.process',
            {
                'file':
                file1,
                'assetstore':
                assetstore,
                'reference':
                json.dumps({
                    'identifier': 'sampleAnnotationFile',
                    'itemId': str(file1['_id']),
                    'userId': str(item['_id']),  # this is not a user
                })
            })
        annot = list(
            self.model('annotation',
                       'large_image').find({'itemId': item['_id']}))
        self.assertEqual(len(annot), 2)

        # Process a single annotation
        events.trigger(
            'data.process', {
                'file':
                file2,
                'assetstore':
                assetstore,
                'reference':
                json.dumps({
                    'identifier': 'sampleAnnotationFile',
                    'itemId': str(file2['_id']),
                    'userId': str(admin['_id']),
                })
            })
        annot = list(
            self.model('annotation',
                       'large_image').find({'itemId': item['_id']}))
        self.assertEqual(len(annot), 3)

        # A file that isn't json shouldn't throw an error or add anything
        with self.assertRaises(ValueError):
            events.trigger(
                'data.process', {
                    'file':
                    file3,
                    'assetstore':
                    assetstore,
                    'reference':
                    json.dumps({
                        'identifier': 'sampleAnnotationFile',
                        'itemId': str(file3['_id']),
                        'userId': str(admin['_id']),
                    })
                })
        annot = list(
            self.model('annotation',
                       'large_image').find({'itemId': item['_id']}))
        self.assertEqual(len(annot), 3)

        # A json file that isn't an annotation shouldn't add anything either
        with self.assertRaises(AttributeError):
            events.trigger(
                'data.process', {
                    'file':
                    file4,
                    'assetstore':
                    assetstore,
                    'reference':
                    json.dumps({
                        'identifier': 'sampleAnnotationFile',
                        'itemId': str(file4['_id']),
                        'userId': str(admin['_id']),
                    })
                })
        annot = list(
            self.model('annotation',
                       'large_image').find({'itemId': item['_id']}))
        self.assertEqual(len(annot), 3)
Exemple #50
0
    def authenticate(self, login, password, otpToken=None):
        """
        Validate a user login via username and password. If authentication fails,
        a ``AccessException`` is raised.

        :param login: The user's login or email.
        :type login: str
        :param password: The user's password.
        :type password: str
        :param otpToken: A one-time password for the user. If "True", then the one-time password
                         (if required) is assumed to be concatenated to the password.
        :type otpToken: str or bool or None
        :returns: The corresponding user if the login was successful.
        :rtype: dict
        """
        event = events.trigger('model.user.authenticate', {
            'login': login,
            'password': password
        })

        if event.defaultPrevented and len(event.responses):
            return event.responses[-1]

        login = login.lower().strip()
        loginField = 'email' if '@' in login else 'login'

        user = self.findOne({loginField: login})
        if user is None:
            raise AccessException('Login failed.')

        # Handle users with no password
        if not self.hasPassword(user):
            e = events.trigger('no_password_login_attempt', {
                'user': user,
                'password': password
            })

            if len(e.responses):
                return e.responses[-1]

            raise ValidationException(
                'This user does not have a password. You must log in with an '
                'external service, or reset your password.')

        # Handle OTP token concatenation
        if otpToken is True and self.hasOtpEnabled(user):
            # Assume the last (typically 6) characters are the OTP, so split at that point
            otpTokenLength = self._TotpFactory.digits
            otpToken = password[-otpTokenLength:]
            password = password[:-otpTokenLength]

        # Verify password
        if not self._cryptContext.verify(password, user['salt']):
            raise AccessException('Login failed.')

        # Verify OTP
        if self.hasOtpEnabled(user):
            if otpToken is None:
                raise AccessException(
                    'User authentication must include a one-time password '
                    '(typically in the "Girder-OTP" header).')
            self.verifyOtp(user, otpToken)
        elif isinstance(otpToken, str):
            raise AccessException(
                'The user has not enabled one-time passwords.')

        # Verify the user account is enabled and auth policies are fulfilled
        self.verifyLogin(user)

        return user
Exemple #51
0
 def scheduleJob(self, job):
     """
     Trigger the event to schedule this job. Other plugins are in charge of
     actually scheduling and/or executing the job.
     """
     events.trigger('jobs.schedule', info=job)
Exemple #52
0
    def finalizeUpload(self, upload, assetstore=None):
        """
        This should only be called manually in the case of creating an
        empty file, i.e. one that has no chunks.

        :param upload: The upload document.
        :type upload: dict
        :param assetstore: If known, the containing assetstore for the upload.
        :type assetstore: dict
        :returns: The file object that was created.
        """
        from .assetstore import Assetstore
        from .file import File
        from .item import Item
        from girder.utility import assetstore_utilities

        events.trigger('model.upload.finalize', upload)
        if assetstore is None:
            assetstore = Assetstore().load(upload['assetstoreId'])

        if 'fileId' in upload:  # Updating an existing file's contents
            file = File().load(upload['fileId'], force=True)

            # Delete the previous file contents from the containing assetstore
            assetstore_utilities.getAssetstoreAdapter(Assetstore().load(
                file['assetstoreId'])).deleteFile(file)

            item = Item().load(file['itemId'], force=True)
            File().propagateSizeChange(item, upload['size'] - file['size'])

            # Update file info
            file['creatorId'] = upload['userId']
            file['created'] = datetime.datetime.utcnow()
            file['assetstoreId'] = assetstore['_id']
            file['size'] = upload['size']
            # If the file was previously imported, it is no longer.
            if file.get('imported'):
                file['imported'] = False

        else:  # Creating a new file record
            if upload.get('attachParent'):
                item = None
            elif upload['parentType'] == 'folder':
                # Create a new item with the name of the file.
                item = Item().createItem(name=upload['name'],
                                         creator={'_id': upload['userId']},
                                         folder={'_id': upload['parentId']})
            elif upload['parentType'] == 'item':
                item = Item().load(id=upload['parentId'], force=True)
            else:
                item = None

            file = File().createFile(item=item,
                                     name=upload['name'],
                                     size=upload['size'],
                                     creator={'_id': upload['userId']},
                                     assetstore=assetstore,
                                     mimeType=upload['mimeType'],
                                     saveFile=False)
            if upload.get('attachParent'):
                if upload['parentType'] and upload['parentId']:
                    file['attachedToType'] = upload['parentType']
                    file['attachedToId'] = upload['parentId']

        adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
        file = adapter.finalizeUpload(upload, file)

        event_document = {'file': file, 'upload': upload}
        events.trigger('model.file.finalizeUpload.before', event_document)
        file = File().save(file)
        events.trigger('model.file.finalizeUpload.after', event_document)
        if '_id' in upload:
            self.remove(upload)

        logger.info('Upload complete. Upload=%s File=%s User=%s' %
                    (upload['_id'], file['_id'], upload['userId']))

        # Add an async event for handlers that wish to process this file.
        eventParams = {
            'file': file,
            'assetstore': assetstore,
            'currentToken': rest.getCurrentToken(),
            'currentUser': rest.getCurrentUser()
        }
        if 'reference' in upload:
            eventParams['reference'] = upload['reference']
        events.daemon.trigger('data.process', eventParams)

        return file
Exemple #53
0
    def testSynchronousEvents(self):
        name, failname = '_test.event', '_test.failure'
        handlerName = '_test.handler'
        with events.bound(name, handlerName, self._increment), \
                events.bound(failname, handlerName, self._raiseException):
            # Make sure our exception propagates out of the handler
            try:
                events.trigger(failname)
                self.assertTrue(False)
            except Exception as e:
                self.assertEqual(e.args[0], 'Failure condition')

            # Bind an event to increment the counter
            self.assertEqual(self.ctr, 0)
            event = events.trigger(name, {'amount': 2})
            self.assertEqual(self.ctr, 2)
            self.assertTrue(event.propagate)
            self.assertFalse(event.defaultPrevented)
            self.assertEqual(event.responses, [])

            # The event should still be bound here if another handler unbinds
            events.unbind(name, 'not the handler name')
            events.trigger(name, {'amount': 2})
            self.assertEqual(self.ctr, 4)

        # Actually unbind the event, by going out of scope of "bound"
        events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 4)

        # Bind an event that prevents the default action and passes a response
        with events.bound(name, handlerName, self._eatEvent), \
                events.bound(name, 'other handler name',
                             self._shouldNotBeCalled):
            event = events.trigger(name)
            self.assertTrue(event.defaultPrevented)
            self.assertFalse(event.propagate)
            self.assertEqual(event.responses, [{'foo': 'bar'}])

        # Test that the context manager unbinds after an unhandled exception
        try:
            with events.bound(failname, handlerName, self._raiseException):
                events.trigger(failname)
        except Exception:
            # The event should should be unbound at this point
            events.trigger(failname)
Exemple #54
0
def testSynchronousEvents(eventsHelper):
    name, failname = '_test.event', '_test.failure'
    handlerName = '_test.handler'
    with events.bound(name, handlerName, eventsHelper._increment), \
            events.bound(failname, handlerName, eventsHelper._raiseException):
        # Make sure our exception propagates out of the handler
        with pytest.raises(Exception, match='^Failure condition$'):
            events.trigger(failname)

        # Bind an event to increment the counter
        assert eventsHelper.ctr == 0
        event = events.trigger(name, {'amount': 2})
        assert eventsHelper.ctr == 2
        assert event.propagate
        assert not event.defaultPrevented
        assert event.responses == []

        # The event should still be bound here if another handler unbinds
        events.unbind(name, 'not the handler name')
        events.trigger(name, {'amount': 2})
        assert eventsHelper.ctr == 4

    # Actually unbind the event, by going out of scope of "bound"
    events.trigger(name, {'amount': 2})
    assert eventsHelper.ctr == 4

    # Bind an event that prevents the default action and passes a response
    with events.bound(name, handlerName, eventsHelper._eatEvent), \
            events.bound(name, 'other handler name',
                         eventsHelper._shouldNotBeCalled):
        event = events.trigger(name)
        assert event.defaultPrevented
        assert not event.propagate
        assert event.responses == [{'foo': 'bar'}]

    # Test that the context manager unbinds after an unhandled exception
    try:
        with events.bound(failname, handlerName, eventsHelper._raiseException):
            events.trigger(failname)
    except Exception:
        # The event should should be unbound at this point
        events.trigger(failname)
Exemple #55
0
def mountServerFuse(name,
                    path,
                    level=AccessType.ADMIN,
                    user=None,
                    force=False):
    """
    Mount a FUSE at a specific path with authorization for a given user.

    :param name: a key for this mount mount.  Each mount point must have a
        distinct key.
    :param path: the location where this mount will be in the local filesystem.
        This should be an empty directory.
    :param level: access level used when checking which resources are available
        within the FUSE.  This is ignored currently, but could be used if
        non-readonly access is ever implemented.
    :param user: the user used for authorizing resource access.
    :param force: if True, all resources are available without checking the
        user or level.
    :returns: True if successful.  'present' if the mount is already present.
        None on failure.
    """
    with _fuseMountsLock:
        if name in _fuseMounts:
            if (_fuseMounts[name]['level'] == level
                    and _fuseMounts[name]['user'] == user
                    and _fuseMounts[name]['force'] == force):
                return 'present'
            unmountServerFuse(name)
        entry = {
            'level':
            level,
            'user':
            user,
            'force':
            force,
            'path':
            path,
            'stat':
            dict((key, getattr(os.stat(path), key))
                 for key in ('st_atime', 'st_ctime', 'st_gid', 'st_mode',
                             'st_mtime', 'st_nlink', 'st_size', 'st_uid')),
            'thread':
            None
        }
        try:
            # We run the file system in a thread, but as a foreground process.
            # This allows multiple mounted fuses to play well together and stop
            # when the program is stopped.
            opClass = ServerFuse(level=level,
                                 user=user,
                                 force=force,
                                 stat=os.stat(path))
            options = {
                # Running in a thread in the foreground makes it easier to
                # clean up the process when we need to shut it down.
                'foreground': True,
                # Automatically unmount when python we try to mount again
                'auto_unmount': True,
                # Cache files if their size and timestamp haven't changed.
                # This lets to OS buffer files efficiently.
                'auto_cache': True,
                # We aren't specifying our own inos
                'use_ino': False,
                # read-only file system
                'ro': True,
            }
            if sys.platform == 'darwin':
                del options['auto_unmount']
            fuseThread = threading.Thread(target=FUSELogError,
                                          args=(name, handleFuseMountFailure,
                                                opClass, path),
                                          kwargs=options)
            entry['thread'] = fuseThread
            _fuseMounts[name] = entry
            fuseThread.daemon = True
            fuseThread.start()
            logprint.info('Mounted %s at %s' % (name, path))
            events.trigger('server_fuse.mount', {'name': name})
            return True
        except Exception:
            logger.exception('Failed to mount %s at %s' % (name, path))
Exemple #56
0
def run(job):
    jobModel = Job()
    jobModel.updateJob(job, status=JobStatus.RUNNING)

    lookup_kwargs, = job["args"]
    user = User().load(job["userId"], force=True)
    tale = Tale().load(job["kwargs"]["taleId"], user=user)
    spawn = job["kwargs"]["spawn"]
    asTale = job["kwargs"]["asTale"]
    token = Token().createToken(user=user, days=0.5)

    progressTotal = 3 + int(spawn)
    progressCurrent = 0

    try:
        # 0. Spawn instance in the background
        if spawn:
            instance = Instance().createInstance(tale, user, token, spawn=spawn)

        # 1. Register data using url
        progressCurrent += 1
        jobModel.updateJob(
            job,
            status=JobStatus.RUNNING,
            progressTotal=progressTotal,
            progressCurrent=progressCurrent,
            progressMessage="Registering external data",
        )
        dataIds = lookup_kwargs.pop("dataId")
        base_url = lookup_kwargs.get("base_url", DataONELocations.prod_cn)
        dataMap = pids_to_entities(
            dataIds, user=user, base_url=base_url, lookup=True
        )  # DataONE shouldn't be here
        imported_data = register_dataMap(
            dataMap,
            getOrCreateRootFolder(CATALOG_NAME),
            "folder",
            user=user,
            base_url=base_url,
        )

        if dataMap[0]["repository"].lower().startswith("http"):
            resource = Item().load(imported_data[0], user=user, level=AccessType.READ)
            resourceType = "item"
        else:
            resource = Folder().load(imported_data[0], user=user, level=AccessType.READ)
            resourceType = "folder"

        data_set = [
            {
                "itemId": imported_data[0],
                "mountPath": resource["name"],
                "_modelType": resourceType,
            }
        ]

        if asTale:
            if resourceType == "folder":
                # Create a dataset with the content of root ds folder,
                # so that it looks nicely and it's easy to copy to workspace later on
                workspace_data_set = [
                    {
                        "itemId": folder["_id"],
                        "mountPath": folder["name"],
                        "_modelType": "folder ",
                    }
                    for folder in Folder().childFolders(
                        parentType="folder", parent=resource, user=user
                    )
                ]
                workspace_data_set += [
                    {
                        "itemId": item["_id"],
                        "mountPath": item["name"],
                        "_modelType": "item",
                    }
                    for item in Folder().childItems(resource)
                ]
            else:
                workspace_data_set = data_set

            # 2. Create a session
            # TODO: yay circular dependencies! IMHO we really should merge
            # wholetale and wt_data_manager plugins...
            from girder.plugins.wt_data_manager.models.session import Session

            # Session is created so that we can easily copy files to workspace,
            # without worrying about how to handler transfers. DMS will do that for us <3
            session = Session().createSession(user, dataSet=workspace_data_set)

            # 3. Copy data to the workspace using WebDAVFS
            progressCurrent += 1
            jobModel.updateJob(
                job,
                status=JobStatus.RUNNING,
                log="Copying files to workspace",
                progressTotal=progressTotal,
                progressCurrent=progressCurrent,
                progressMessage="Copying files to workspace",
            )
            girder_root = "http://localhost:{}".format(
                config.getConfig()["server.socket_port"]
            )
            with WebDAVFS(
                girder_root,
                login=user["login"],
                password="******".format(**token),
                root="/tales/{_id}".format(**tale),
            ) as destination_fs, DMSFS(
                str(session["_id"]), girder_root + "/api/v1", str(token["_id"])
            ) as source_fs:
                copy_fs(source_fs, destination_fs)
                sanitize_binder(destination_fs)

            Session().deleteSession(user, session)
        else:
            # 3. Update Tale's dataSet
            update_citations = {_["itemId"] for _ in tale["dataSet"]} ^ {
                _["itemId"] for _ in data_set
            }
            tale["dataSet"] = data_set
            tale = Tale().updateTale(tale)

            if update_citations:
                eventParams = {"tale": tale, "user": user}
                event = events.trigger("tale.update_citation", eventParams)
                if len(event.responses):
                    tale = Tale().updateTale(event.responses[-1])

        # Tale is ready to be built
        tale = Tale().load(tale["_id"], user=user)  # Refresh state
        tale["status"] = TaleStatus.READY
        tale = Tale().updateTale(tale)

        # 4. Wait for container to show up
        if spawn:
            progressCurrent += 1
            jobModel.updateJob(
                job,
                status=JobStatus.RUNNING,
                log="Waiting for a Tale container",
                progressTotal=progressTotal,
                progressCurrent=progressCurrent,
                progressMessage="Waiting for a Tale container",
            )

            sleep_step = 10
            timeout = 15 * 60
            while instance["status"] == InstanceStatus.LAUNCHING and timeout > 0:
                time.sleep(sleep_step)
                instance = Instance().load(instance["_id"], user=user)
                timeout -= sleep_step
            if timeout <= 0:
                raise RuntimeError(
                    "Failed to launch instance {}".format(instance["_id"])
                )
        else:
            instance = None

    except Exception:
        tale = Tale().load(tale["_id"], user=user)  # Refresh state
        tale["status"] = TaleStatus.ERROR
        tale = Tale().updateTale(tale)
        t, val, tb = sys.exc_info()
        log = "%s: %s\n%s" % (t.__name__, repr(val), traceback.extract_tb(tb))
        jobModel.updateJob(
            job,
            progressTotal=progressTotal,
            progressCurrent=progressTotal,
            progressMessage="Task failed",
            status=JobStatus.ERROR,
            log=log,
        )
        raise

    # To get rid of ObjectId's, dates etc.
    tale = json.loads(
        json.dumps(tale, sort_keys=True, allow_nan=False, cls=JsonEncoder)
    )
    instance = json.loads(
        json.dumps(instance, sort_keys=True, allow_nan=False, cls=JsonEncoder)
    )

    jobModel.updateJob(
        job,
        status=JobStatus.SUCCESS,
        log="Tale created",
        progressTotal=progressTotal,
        progressCurrent=progressTotal,
        progressMessage="Tale created",
        otherFields={"result": {"tale": tale, "instance": instance}},
    )
    def importData(self, parent, parentType, params, progress, user,
                   leafFoldersAsItems):
        importPath = params['importPath']

        if not os.path.exists(importPath):
            raise ValidationException('Not found: %s.' % importPath)
        if not os.path.isdir(importPath):
            name = os.path.basename(importPath)
            progress.update(message=name)
            self._importFileToFolder(name, user, parent, parentType,
                                     importPath)
            return

        listDir = os.listdir(importPath)

        if parentType != 'folder' and any(
                os.path.isfile(os.path.join(importPath, val))
                for val in listDir):
            raise ValidationException(
                'Files cannot be imported directly underneath a %s.' %
                parentType)

        if leafFoldersAsItems and self._hasOnlyFiles(importPath, listDir):
            self._importDataAsItem(os.path.basename(importPath.rstrip(os.sep)),
                                   user,
                                   parent,
                                   importPath,
                                   listDir,
                                   params=params)
            return

        for name in listDir:
            progress.update(message=name)
            path = os.path.join(importPath, name)

            if os.path.isdir(path):
                localListDir = os.listdir(path)
                if leafFoldersAsItems and self._hasOnlyFiles(
                        path, localListDir):
                    self._importDataAsItem(name,
                                           user,
                                           parent,
                                           path,
                                           localListDir,
                                           params=params)
                else:
                    folder = Folder().createFolder(parent=parent,
                                                   name=name,
                                                   parentType=parentType,
                                                   creator=user,
                                                   reuseExisting=True)
                    events.trigger('filesystem_assetstore_imported', {
                        'id': folder['_id'],
                        'type': 'folder',
                        'importPath': path
                    })
                    nextPath = os.path.join(importPath, name)
                    self.importData(folder,
                                    'folder',
                                    params=dict(params, importPath=nextPath),
                                    progress=progress,
                                    user=user,
                                    leafFoldersAsItems=leafFoldersAsItems)
            else:
                if self.shouldImportFile(path, params):
                    self._importFileToFolder(name, user, parent, parentType,
                                             path)
Exemple #58
0
    def createTale(self,
                   image,
                   data,
                   creator=None,
                   save=True,
                   title=None,
                   description=None,
                   public=None,
                   config=None,
                   authors=None,
                   icon=None,
                   category=None,
                   illustration=None,
                   narrative=None,
                   licenseSPDX=WholeTaleLicense.default_spdx(),
                   status=TaleStatus.READY,
                   publishInfo=None,
                   relatedIdentifiers=None):

        if creator is None:
            creatorId = None
        else:
            creatorId = creator.get('_id', None)

        if title is None:
            title = '{} with {}'.format(image['name'], DATADIRS_NAME)
        # if illustration is None:
        # Get image from SILS

        now = datetime.datetime.utcnow()
        tale = {
            'authors': authors,
            'category': category,
            'config': config or {},
            'copyOfTale': None,
            'creatorId': creatorId,
            'dataSet': data or [],
            'description': description,
            'format': _currentTaleFormat,
            'created': now,
            'icon': icon,
            'iframe': image.get('iframe', False),
            'imageId': ObjectId(image['_id']),
            'illustration': illustration,
            'narrative': narrative or [],
            'title': title,
            'public': public,
            'publishInfo': publishInfo or [],
            'relatedIdentifiers': relatedIdentifiers or [],
            'updated': now,
            'licenseSPDX': licenseSPDX,
            'status': status,
        }
        if public is not None and isinstance(public, bool):
            self.setPublic(tale, public, save=False)
        else:
            public = False

        if creator is not None:
            self.setUserAccess(tale,
                               user=creator,
                               level=AccessType.ADMIN,
                               save=False)
        if tale['dataSet']:
            eventParams = {'tale': tale, 'user': creator}
            event = events.trigger('tale.update_citation', eventParams)
            if len(event.responses):
                tale = event.responses[-1]

        if save:
            tale = self.save(tale)
            workspace = self.createWorkspace(tale, creator=creator)
            data_folder = self.createDataMountpoint(tale, creator=creator)
            tale['folderId'] = data_folder['_id']
            tale['workspaceId'] = workspace['_id']
            narrative_folder = self.createNarrativeFolder(
                tale, creator=creator, default=not bool(tale['narrative']))
            for obj_id in tale['narrative']:
                item = Item().load(obj_id, user=creator)
                Item().copyItem(item, creator, folder=narrative_folder)
            tale['narrativeId'] = narrative_folder['_id']
            tale = self.save(tale)

        return tale