예제 #1
0
    def _getAssetstoreModel(self, file):
        from .assetstore import Assetstore

        if file.get('assetstoreType'):
            try:
                if isinstance(file['assetstoreType'], six.string_types):
                    return ModelImporter.model(file['assetstoreType'])
                else:
                    return ModelImporter.model(*file['assetstoreType'])
            except Exception:
                raise ValidationException('Invalid assetstore type: %s.' %
                                          (file['assetstoreType'], ))
        else:
            return Assetstore()
예제 #2
0
    def download(self, image, params):
        contentDisp = params.get('contentDisposition', None)
        if contentDisp is not None and contentDisp not in {
                'inline', 'attachment'
        }:
            raise ValidationException(
                f'Unallowed contentDisposition type "{contentDisp}".',
                'contentDisposition')

        originalFile = Image().originalFile(image)
        fileStream = File().download(originalFile,
                                     headers=True,
                                     contentDisposition=contentDisp)
        return fileStream
예제 #3
0
    def validate(self, doc):
        doc['name'] = doc['name'].strip()
        if doc['description']:
            doc['description'] = doc['description'].strip()

        if not doc['name']:
            raise ValidationException(
                'Collection name must not be empty.', 'name')

        # Ensure unique name for the collection
        q = {
            'name': doc['name']
        }
        if '_id' in doc:
            q['_id'] = {'$ne': doc['_id']}
        duplicate = self.findOne(q, fields=['_id'])
        if duplicate is not None:
            raise ValidationException('A collection with that name already '
                                      'exists.', 'name')

        doc['lowerName'] = doc['name'].lower()

        return doc
예제 #4
0
    def _clientUploadChunk(self, upload, chunk):
        """
        Clients that support direct-to-S3 upload behavior will go through this
        method by sending a normally-encoded form string as the chunk parameter,
        containing the required JSON info for uploading. This generates the
        signed URL that the client should use to upload the chunk to S3.
        """
        info = json.loads(chunk)
        index = int(info['partNumber']) - 1
        length = min(self.CHUNK_LEN, upload['size'] - index * self.CHUNK_LEN)

        if 'contentLength' in info and int(info['contentLength']) != length:
            raise ValidationException('Expected chunk size %d, but got %d.' %
                                      (length, info['contentLength']))

        if length <= 0:
            raise ValidationException('Invalid chunk length %d.' % length)

        url = self.client.generate_presigned_url(ClientMethod='upload_part',
                                                 Params={
                                                     'Bucket':
                                                     self.assetstore['bucket'],
                                                     'Key':
                                                     upload['s3']['key'],
                                                     'ContentLength':
                                                     length,
                                                     'UploadId':
                                                     info['s3UploadId'],
                                                     'PartNumber':
                                                     info['partNumber']
                                                 })

        upload['s3']['uploadId'] = info['s3UploadId']
        upload['s3']['partNumber'] = info['partNumber']
        upload['s3']['request'] = {'method': 'PUT', 'url': url}

        return upload
예제 #5
0
    def importData(self, parent, parentType, params, progress, user, **kwargs):
        importPath = params.get('importPath', '').strip().lstrip('/')

        if importPath and not importPath.endswith('/'):
            importPath += '/'

        bucket = self.assetstore['bucket']
        paginator = self.client.get_paginator('list_objects')
        pageIterator = paginator.paginate(Bucket=bucket, Prefix=importPath, Delimiter='/')
        for resp in pageIterator:
            # Start with objects
            for obj in resp.get('Contents', []):
                if progress:
                    progress.update(message=obj['Key'])

                name = obj['Key'].rsplit('/', 1)[-1]
                if not name:
                    continue

                if parentType != 'folder':
                    raise ValidationException(
                        'Keys cannot be imported directly underneath a %s.' % parentType)

                if self.shouldImportFile(obj['Key'], params):
                    item = Item().createItem(
                        name=name, creator=user, folder=parent, reuseExisting=True)
                    # Create a file record; delay saving it until we have added
                    # the import information.
                    file = File().createFile(
                        name=name, creator=user, item=item, reuseExisting=True,
                        assetstore=self.assetstore, mimeType=None, size=obj['Size'],
                        saveFile=False)
                    file['s3Key'] = obj['Key']
                    file['imported'] = True
                    File().save(file)

            # Now recurse into subdirectories
            for obj in resp.get('CommonPrefixes', []):
                if progress:
                    progress.update(message=obj['Prefix'])

                name = obj['Prefix'].rstrip('/').rsplit('/', 1)[-1]

                folder = Folder().createFolder(
                    parent=parent, name=name, parentType=parentType, creator=user,
                    reuseExisting=True)
                self.importData(parent=folder, parentType='folder', params={
                    'importPath': obj['Prefix']
                }, progress=progress, user=user, **kwargs)
 def _get_base(self, pathArray, test=False):
     model = pathArray[0]
     if model == "user":
         username = pathArray[1]
         parent = User().findOne({"login": username})
         if parent is None:
             self._lookup_err("User not found: %s" % username, test=test)
     elif model == "collection":
         collectionName = pathArray[1]
         parent = Collection().findOne({"name": collectionName})
         if parent is None:
             self._lookup_err("Collection not found: %s" % collectionName, test=test)
     else:
         raise ValidationException("Invalid path format")
     return parent, model
예제 #7
0
def validateHistomicsTKAnalysisAccess(doc):
    value = doc['value']
    if not isinstance(value, dict):
        raise ValidationException('Analysis access policy must be a JSON object.')
    for i, groupId in enumerate(value.get('groups', ())):
        if isinstance(groupId, dict):
            groupId = groupId.get('_id', groupId.get('id'))
        group = Group().load(groupId, force=True, exc=True)
        value['groups'][i] = group['_id']
    for i, userId in enumerate(value.get('users', ())):
        if isinstance(userId, dict):
            userId = userId.get('_id', userId.get('id'))
        user = User().load(userId, force=True, exc=True)
        value['users'][i] = user['_id']
    value['public'] = bool(value.get('public'))
예제 #8
0
    def _validateMask(self, mask, image):
        if len(mask.shape) != 2:
            raise ValidationException('Mask must be a single-channel image.')
        if mask.shape != (
                image['meta']['acquisition']['pixelsY'],
                image['meta']['acquisition']['pixelsX']):
            raise ValidationException(
                'Mask must have the same dimensions as the image.')
        if mask.dtype != numpy.uint8:
            raise ValidationException('Mask may only contain 8-bit values.')

        maskValues = frozenset(numpy.unique(mask))
        if maskValues <= {0, 255}:
            # Expected values
            pass
        elif len(maskValues) == 1:
            # Single value, non-0
            mask.fill(0)
        elif len(maskValues) == 2:
            # Binary image with high value other than 255 can be corrected
            lowValue = min(maskValues)
            if lowValue != 0:
                mask[mask == lowValue] = 0
            highValue = max(maskValues)
            if highValue != 255:
                mask[mask == highValue] = 255
        else:
            raise ValidationException(
                'Mask may only contain values of 0 and 255.')

        contours = OpenCVSegmentationHelper._maskToContours(mask)
        if len(contours) > 1:
            raise ValidationException(
                'Mask may not contain multiple disconnected components.')

        return mask
예제 #9
0
    def mask(self, segmentation, params):
        contentDisp = params.get('contentDisposition', None)
        if contentDisp is not None and contentDisp not in {'inline', 'attachment'}:
            raise ValidationException(f'Unallowed contentDisposition type "{contentDisp}".',
                                      'contentDisposition')

        # TODO: convert this to make Segmentation use an AccessControlMixin
        Image().load(
            segmentation['imageId'], level=AccessType.READ, user=self.getCurrentUser(), exc=True)

        maskFile = Segmentation().maskFile(segmentation)
        if maskFile is None:
            raise RestException('This segmentation is failed, and thus has no mask.', code=410)

        return File().download(maskFile, headers=True, contentDisposition=contentDisp)
예제 #10
0
def validateListOrJSON(doc):
    val = doc['value']
    try:
        if isinstance(val, list):
            doc['value'] = json.dumps(val)
        elif val is None or val.strip() == '':
            doc['value'] = None
        else:
            parsed = json.loads(val)
            if not isinstance(parsed, list):
                raise ValueError
            doc['value'] = val.strip()
    except (ValueError, AttributeError):
        raise ValidationException('%s must be a JSON list.' % doc['key'],
                                  'value')
예제 #11
0
    def initiateZipUploadToS3(self, dataset, params):
        params = self._decodeParams(params)
        self.requireParams(['signature'], params)

        user = self.getCurrentUser()
        User().requireCreateDataset(user)

        signature = params['signature'].strip()
        if not signature:
            raise ValidationException('Signature must be specified.', 'signature')

        try:
            return Dataset().initiateZipUploadS3(dataset=dataset, signature=signature, user=user)
        except GirderException as e:
            raise RestException(e.message)
예제 #12
0
파일: setting.py 프로젝트: nicholsn/girder
    def validateCorePluginsEnabled(doc):
        """
        Ensures that the set of plugins passed in is a list of valid plugin
        names. Removes any invalid plugin names, removes duplicates, and adds
        all transitive dependencies to the enabled list.
        """
        from girder.utility import plugin_utilities

        if not isinstance(doc['value'], list):
            raise ValidationException(
                'Plugins enabled setting must be a list.', 'value')

        # Add all transitive dependencies and store in toposorted order
        doc['value'] = list(plugin_utilities.getToposortedPlugins(
            doc['value']))
예제 #13
0
    def validateInfo(doc):
        """
        Validate the assetstore -- make sure we can connect to it and that the
        necessary indexes are set up.
        """
        if not doc.get('db', ''):
            raise ValidationException('Database name must not be empty.', 'db')
        if '.' in doc['db'] or ' ' in doc['db']:
            raise ValidationException(
                'Database name cannot contain spaces'
                ' or periods.', 'db')

        try:
            chunkColl = getDbConnection(
                doc.get('mongohost'),
                doc.get('replicaset'),
                autoRetry=False,
                serverSelectionTimeoutMS=10000)[doc['db']].chunk
            _ensureChunkIndices(chunkColl)
        except pymongo.errors.ServerSelectionTimeoutError as e:
            raise ValidationException('Could not connect to the database: %s' %
                                      str(e))

        return doc
예제 #14
0
    def validate(self, doc):
        """
        This method is in charge of validating that the setting key is a valid
        key, and that for that key, the provided value is valid. It first
        allows plugins to validate the setting, but if none of them can, it
        assumes it is a core setting and does the validation here.
        """
        key = doc['key']
        validator = setting_utilities.getValidator(key)
        if validator:
            validator(doc)
        else:
            raise ValidationException('Invalid setting key "%s".' % key, 'key')

        return doc
예제 #15
0
    def move(self, folder, parent, parentType):
        """
        Move the given folder from its current parent to another parent object.
        Raises an exception if folder is an ancestor of parent.

        :param folder: The folder to move.
        :type folder: dict
        :param parent: The new parent object.
        :param parentType: The type of the new parent object (user, collection,
                           or folder).
        :type parentType: str
        """
        if (parentType == 'folder' and (self._isAncestor(folder, parent)
                                        or folder['_id'] == parent['_id'])):
            raise ValidationException(
                'You may not move a folder underneath itself.')

        folder['parentId'] = parent['_id']
        folder['parentCollection'] = parentType

        if parentType == 'folder':
            rootType, rootId = parent['baseParentType'], parent['baseParentId']
        else:
            rootType, rootId = parentType, parent['_id']

        if (folder['baseParentType'], folder['baseParentId']) !=\
           (rootType, rootId):

            def propagateSizeChange(folder, inc):
                ModelImporter.model(folder['baseParentType']).increment(
                    query={'_id': folder['baseParentId']},
                    field='size',
                    amount=inc,
                    multi=False)

            totalSize = self.getSizeRecursive(folder)
            propagateSizeChange(folder, -totalSize)
            folder['baseParentType'] = rootType
            folder['baseParentId'] = rootId
            propagateSizeChange(folder, totalSize)
            self._updateDescendants(
                folder['_id'],
                {'$set': {
                    'baseParentType': rootType,
                    'baseParentId': rootId
                }})

        return self.save(folder)
예제 #16
0
    def createAuthorizedUpload(self, folder, params):
        try:
            if params.get('duration'):
                days = int(params.get('duration'))
            else:
                days = Setting().get(SettingKey.COOKIE_LIFETIME)
        except ValueError:
            raise ValidationException('Token duration must be an integer, or leave it empty.')

        token = Token().createToken(days=days, user=self.getCurrentUser(), scope=(
            TOKEN_SCOPE_AUTHORIZED_UPLOAD, 'authorized_upload_folder_%s' % folder['_id']))

        url = '%s#authorized_upload/%s/%s' % (
            mail_utils.getEmailUrlPrefix(), folder['_id'], token['_id'])

        return {'url': url}
    def path(self, event, path, root, user=None):
        res_type = event.info["params"]["type"]
        try:
            if res_type == "folder":
                self.is_dir(path, root)
            elif res_type in ("item", "file"):
                self.is_file(path, root)
            else:
                raise ValidationException("Not a file, nor a folder")
        except ValidationException:
            raise RestException("Invalid resource id.")

        root_girder_path = pathlib.Path(getResourcePath("folder", root, user=user))
        remainder_path = path.relative_to(pathlib.PosixPath(root["fsPath"]))
        response = (root_girder_path / remainder_path).as_posix()
        event.preventDefault().addResponse(response)
 def _get_vobject(self, document, path, i):
     pathArray = split(path)
     root = document
     n = 3 + i
     fspath = os.path.join(document["fsPath"], "/".join(pathArray[n:]))
     fspath = pathlib.Path(fspath)
     if not fspath.exists():
         raise ValidationException("Path not found: %s" % path)
     if fspath.is_dir():
         document = self.vFolder(fspath, root)
         model = "folder"
     elif fspath.is_file():
         document = self.vItem(fspath, root)
         model = "item"
     # TODO: add vLink here...
     return document, model
예제 #19
0
 def crop_task(self, item, by_item, name, folder):
     target_file = [i for i in Item().childFiles(item, limit=1)][0]
     by_file = [i for i in Item().childFiles(by_item, limit=1)][0]
     output = Item().createItem(name,
                                creator=self.getCurrentUser(),
                                folder=folder)
     driver = item.get('geometa', {}).get('driver', None)
     if not driver:
         raise ValidationException('Unsupported target dataset')
     result = crop_task.delay(
         GirderFileId(str(target_file['_id'])),
         driver,
         GirderFileId(str(by_file['_id'])),
         name,
         girder_result_hooks=[GirderUploadToItem(str(output['_id']))])
     return result.job
예제 #20
0
    def _importFileToFolder(self, name, user, parent, parentType, path):
        if parentType != 'folder':
            raise ValidationException(
                'Files cannot be imported directly underneath a %s.' %
                parentType)

        item = Item().createItem(name=name,
                                 creator=user,
                                 folder=parent,
                                 reuseExisting=True)
        events.trigger('filesystem_assetstore_imported', {
            'id': item['_id'],
            'type': 'item',
            'importPath': path
        })
        self.importFile(item, path, user, name=name)
예제 #21
0
    def _validate_dataset(tale):
        try:
            jsonschema.validate(tale["dataSet"], dataSetSchema)
        except jsonschema.exceptions.ValidationError as exc:
            raise ValidationException(str(exc))

        creator = User().load(tale["creatorId"], force=True)
        for obj in tale["dataSet"]:
            if obj["_modelType"] == "folder":
                model = Folder()
            else:
                model = Item()
            model.load(obj["itemId"],
                       level=AccessType.READ,
                       user=creator,
                       fields={},
                       exc=True)
예제 #22
0
    def validate(self, doc):
        from .folder import Folder

        doc['name'] = self._validateString(doc.get('name', ''))
        doc['description'] = self._validateString(doc.get('description', ''))

        if not doc['name']:
            raise ValidationException('Item name must not be empty.', 'name')

        # Ensure unique name among sibling items and folders. If the desired
        # name collides with an existing item or folder, we will append (n)
        # onto the end of the name, incrementing n until the name is unique.
        name = doc['name']
        # If the item already exists with the current name, don't check.
        # Although we don't want duplicate names, they can occur when there are
        # simultaneous uploads, and also because Mongo has no guaranteed
        # multi-collection uniqueness constraints.  If this occurs, and we are
        # changing a non-name property, don't validate the name (since that may
        # fail).  If the name is being changed, validate that it is probably
        # unique.
        checkName = '_id' not in doc or not self.findOne({
            '_id': doc['_id'],
            'name': name
        })
        n = 0
        while checkName:
            q = {'name': name, 'folderId': doc['folderId']}
            if '_id' in doc:
                q['_id'] = {'$ne': doc['_id']}
            dupItem = self.findOne(q, fields=['_id'])

            q = {
                'parentId': doc['folderId'],
                'name': name,
                'parentCollection': 'folder'
            }
            dupFolder = Folder().findOne(q, fields=['_id'])
            if dupItem is None and dupFolder is None:
                doc['name'] = name
                checkName = False
            else:
                n += 1
                name = '%s (%d)' % (doc['name'], n)

        doc['lowerName'] = doc['name'].lower()
        return doc
예제 #23
0
def checkOauthUser(event):
    """
    If an OAuth2 user without a password tries to log in with a password, we
    want to give them a useful error message.
    """
    user = event.info['user']
    if user.get('oauth'):
        if isinstance(user['oauth'], dict):
            # Handle a legacy format where only 1 provider (Google) was stored
            prettyProviderNames = 'Google'
        else:
            prettyProviderNames = ', '.join(
                providers.idMap[val['provider']].getProviderName(external=True)
                for val in user['oauth'])
        raise ValidationException(
            'You don\'t have a password. Please log in with %s, or use the '
            'password reset link.' % prettyProviderNames)
예제 #24
0
    def childFolders(self,
                     parent,
                     parentType,
                     user=None,
                     limit=0,
                     offset=0,
                     sort=None,
                     filters=None,
                     **kwargs):
        """
        This generator will yield child folders of a user, collection, or
        folder, with access policy filtering.  Passes any kwargs to the find
        function.

        :param parent: The parent object.
        :type parentType: Type of the parent object.
        :param parentType: The parent type.
        :type parentType: 'user', 'folder', or 'collection'
        :param user: The user running the query. Only returns folders that this
                     user can see.
        :param limit: Result limit.
        :param offset: Result offset.
        :param sort: The sort structure to pass to pymongo.
        :param filters: Additional query operators.
        """
        if not filters:
            filters = {}

        parentType = parentType.lower()
        if parentType not in ('folder', 'user', 'collection'):
            raise ValidationException(
                'The parentType must be folder, collection, or user.')

        q = {'parentId': parent['_id'], 'parentCollection': parentType}
        q.update(filters)

        cursor = self.findWithPermissions(q,
                                          sort=sort,
                                          user=user,
                                          level=AccessType.READ,
                                          limit=limit,
                                          offset=offset,
                                          **kwargs)

        return iter(cursor)
예제 #25
0
    def validateCoreCollectionCreatePolicy(doc):
        from .group import Group
        from .user import User

        value = doc['value']

        if not isinstance(value, dict):
            raise ValidationException('Collection creation policy must be a JSON object.')

        for i, groupId in enumerate(value.get('groups', ())):
            Group().load(groupId, force=True, exc=True)
            value['groups'][i] = ObjectId(value['groups'][i])

        for i, userId in enumerate(value.get('users', ())):
            User().load(userId, force=True, exc=True)
            value['users'][i] = ObjectId(value['users'][i])

        value['open'] = value.get('open', False)
예제 #26
0
    def registerMetadata(self, dataset, params):
        params = self._decodeParams(params)
        self.requireParams(['filename'], params)
        user = self.getCurrentUser()

        filename = params['filename'].strip()
        if not filename:
            raise ValidationException('Filename must be specified.', 'filename')

        metadataDataStream = RequestBodyStream(cherrypy.request.body)
        if not len(metadataDataStream):
            raise RestException('No data provided in request body.')

        Dataset().registerMetadata(
            dataset=dataset, user=user, metadataDataStream=metadataDataStream, filename=filename,
            sendMail=True)
        # TODO: return value?
        return {'status': 'success'}
예제 #27
0
    def createExecution(self, params, user):
        if len(params['fileId']) == 0:
            raise ValidationException("Parameter fileId should not be empty")

        execution = {
            'name': params['name'],
            'fileId': params['fileId'],
            'userId': user['_id'],
            'pipelineName': params['pipelineName'],
            'vipExecutionId': params['vipExecutionId'],
            'idFolderResult': params['idFolderResult'],
            'status': params['status'],
            'sendMail': params['sendMail'],
            'timestampCreation': time.time(),
            'timestampFin': params['timestampFin']
        }

        # Save in the db
        return self.save(execution)
예제 #28
0
파일: password.py 프로젝트: xinlaoda/girder
    def encryptAndStore(self, password):
        """
        Encrypt and store the given password. The exact internal details and
        mechanisms used for storage are abstracted away, but the guarantee is
        made that once this method is called on a password and the returned salt
        and algorithm are stored with the user document, calling
        Password.authenticate() with that user document and the same password
        will return True.

        :param password: The password to encrypt and store.
        :type password: str
        :returns: {tuple} (salt, hashAlg) The salt to store with the
                  user document and the algorithm used for secure
                  storage. Both should be stored in the corresponding
                  user document as 'salt' and 'hashAlg' respectively.
        """
        cur_config = config.getConfig()

        # Normally this would go in validate() but password is a special case.
        if not re.match(cur_config['users']['password_regex'], password):
            raise ValidationException(
                cur_config['users']['password_description'], 'password')

        alg = cherrypy.config['auth']['hash_alg']
        if alg == 'bcrypt':
            """
            With bcrypt, we actually need the one-to-one correspondence of
            hashed password to user, so we store the hash as the salt entry in
            the user table.
            """
            salt = self._digest(alg=alg, password=password)
        else:
            """
            With other hashing algorithms, we store the salt with the user
            and store the hashed value in a separate table with no
            correspondence to the user.
            """
            salt = genToken()
            hash = self._digest(salt=salt, alg=alg, password=password)
            self.save({'_id': hash})

        return salt, alg
예제 #29
0
    def deleteAnnotator(self, study, annotatorUser, params):
        currentUser = self.getCurrentUser()
        # For now, study admins will be the ones that can delete annotators
        User().requireAdminStudy(currentUser)

        if Study().childAnnotations(study=study,
                                    annotatorUser=annotatorUser,
                                    state=Study().State.COMPLETE).count():
            raise RestException('Annotator user has completed annotations.',
                                409)

        # Check if user is already an annotator in the study
        if not Study().hasAnnotator(study, annotatorUser):
            raise ValidationException(
                f'User "{annotatorUser["_id"]}" is not part of the study.')

        Study().removeAnnotator(study, annotatorUser)

        # No Content
        cherrypy.response.status = 204
예제 #30
0
    def _validateOutputParentType(self, outputId, parentType, outputSpec):
        """
        Checks if the output parent type is compatible with the output type.
        """

        # Find the corresponding output specification for the given outputID
        for output in outputSpec:
            if outputId == output['id']:
                # If a corresponding output is found, check if its parent type is valid
                if output['type'] == 'new-file' and parentType not in {
                        'item', 'folder'
                }:
                    return False
                elif output['type'] == 'new-folder' and\
                        parentType not in {'folder', 'user', 'collection'}:
                    return False
                else:
                    return True
        else:
            raise ValidationException('Invalid output id: %s.' % outputId)