Exemple #1
0
    def remove(self, item, **kwargs):
        """
        Delete an item, and all references to it in the database.

        :param item: The item document to delete.
        :type item: dict
        """
        from girderformindlogger.models.file import File
        from girderformindlogger.models.upload import Upload

        # Delete all files in this item
        fileModel = File()
        files = fileModel.find({'itemId': item['_id']})
        for file in files:
            fileKwargs = kwargs.copy()
            fileKwargs.pop('updateItemSize', None)
            fileModel.remove(file, updateItemSize=False, **fileKwargs)

        # Delete pending uploads into this item
        uploadModel = Upload()
        uploads = uploadModel.find({
            'parentId': item['_id'],
            'parentType': 'item'
        })
        for upload in uploads:
            uploadModel.remove(upload, **kwargs)

        # Delete the item itself
        Model.remove(self, item)
Exemple #2
0
    def remove(self, folder, progress=None, **kwargs):
        """
        Delete a profile recursively.

        :param profile: The profile document to delete.
        :type folder: dict
        :param progress: A progress context to record progress on.
        :type progress: girderformindlogger.utility.progress.ProgressContext or
            None.
        """
        # Remove the contents underneath this folder recursively.
        from girderformindlogger.models.upload import Upload

        # self.clean(folder, progress, **kwargs)

        # Delete pending uploads into this folder
        uploadModel = Upload()
        uploads = uploadModel.find({
            'appletId': folder['_id'],
            'parentType': 'profile'
        })
        for upload in uploads:
            uploadModel.remove(upload, progress=progress, **kwargs)
        uploads.close()

        # Delete this folder
        AccessControlledModel.remove(self, folder, progress=progress, **kwargs)
        if progress:
            progress.update(increment=1,
                            message='Deleted profile %s' % folder['name'])
        def uploadFile(self, params):
            """
            Providing this works around a limitation in phantom that makes us
            unable to upload binary files, or at least ones that contain certain
            byte values. The path parameter should be provided relative to the
            root directory of the repository.
            """
            self.requireParams(('folderId', 'path'), params)

            if params['path'].startswith(
                    '${'):  # relative to plugin e.g. ${my_plugin}/path
                end = params['path'].find('}')
                plugin = params['path'][2:end]
                plugin = getPlugin(plugin)
                if plugin is None:
                    raise Exception('Invalid plugin %s.' % plugin)
                root = os.path.dirname(inspect.getfile(plugin.__class__))
                path = root + params['path'][end + 1:]
            else:  # assume relative to core package
                path = os.path.join(ROOT_DIR, params['path'])
            name = os.path.basename(path)
            folder = Folder().load(params['folderId'], force=True)

            upload = Upload().createUpload(user=self.getCurrentUser(),
                                           name=name,
                                           parentType='folder',
                                           parent=folder,
                                           size=os.path.getsize(path))

            with open(path, 'rb') as fd:
                file = Upload().handleChunk(upload, fd)

            return file
Exemple #4
0
    def getPartialUploads(self, uploadId, userId, parentId, assetstoreId,
                          minimumAge, includeUntracked, limit, offset, sort):
        filters = {}
        if uploadId is not None:
            filters['uploadId'] = uploadId
        if userId is not None:
            filters['userId'] = userId
        if assetstoreId is not None:
            filters['assetstoreId'] = assetstoreId
        if parentId is not None:
            filters['parentId'] = parentId
        if minimumAge is not None:
            filters['minimumAge'] = minimumAge

        uploadList = list(Upload().list(filters=filters,
                                        limit=limit,
                                        offset=offset,
                                        sort=sort))
        if includeUntracked and (limit == 0 or len(uploadList) < limit):
            untrackedList = Upload().untrackedUploads('list', assetstoreId)
            if limit == 0:
                uploadList += untrackedList
            elif len(uploadList) < limit:
                uploadList += untrackedList[:limit - len(uploadList)]
        return uploadList
Exemple #5
0
 def discardPartialUploads(self, uploadId, userId, parentId, assetstoreId,
                           minimumAge, includeUntracked):
     filters = {}
     if uploadId is not None:
         filters['uploadId'] = uploadId
     if userId is not None:
         filters['userId'] = userId
     if assetstoreId is not None:
         filters['assetstoreId'] = assetstoreId
     if parentId is not None:
         filters['parentId'] = parentId
     if minimumAge is not None:
         filters['minimumAge'] = minimumAge
     uploadList = list(Upload().list(filters=filters))
     # Move the results to list that isn't a cursor so we don't have to have
     # the cursor sitting around while we work on the data.
     for upload in uploadList:
         try:
             Upload().cancelUpload(upload)
         except OSError as exc:
             if exc.errno == errno.EACCES:
                 raise GirderException(
                     'Failed to delete upload.',
                     'girderformindlogger.api.v1.system.delete-upload-failed'
                 )
             raise
     if includeUntracked:
         uploadList += Upload().untrackedUploads('delete', assetstoreId)
     return uploadList
Exemple #6
0
    def requestOffset(self, upload):
        """
        This should be called when resuming an interrupted upload. It will
        report the offset into the upload that should be used to resume.
        :param uploadId: The _id of the temp upload record being resumed.
        :returns: The offset in bytes that the client should use.
        """
        offset = Upload().requestOffset(upload)

        if isinstance(offset, six.integer_types):
            upload['received'] = offset
            Upload().save(upload)
            return {'offset': offset}
        else:
            return offset
    def testAutoComputeHashes(self):
        with self.assertRaises(ValidationException):
            Setting().set(hashsum_download.PluginSettings.AUTO_COMPUTE, 'bad')

        old = hashsum_download.SUPPORTED_ALGORITHMS
        hashsum_download.SUPPORTED_ALGORITHMS = {'sha512', 'sha256'}
        Setting().set(hashsum_download.PluginSettings.AUTO_COMPUTE, True)

        file = Upload().uploadFromFile(
            obj=six.BytesIO(self.userData), size=len(self.userData), name='Another file',
            parentType='folder', parent=self.privateFolder, user=self.user)

        start = time.time()
        while time.time() < start + 15:
            file = File().load(file['_id'], force=True)
            if 'sha256' in file:
                break
            time.sleep(0.2)

        expected = hashlib.sha256()
        expected.update(self.userData)
        self.assertIn('sha256', file)
        self.assertEqual(file['sha256'], expected.hexdigest())

        expected = hashlib.sha512()
        expected.update(self.userData)
        self.assertIn('sha512', file)
        self.assertEqual(file['sha512'], expected.hexdigest())

        hashsum_download.SUPPORTED_ALGORITHMS = old
 def testDicomWithBinaryValues(self):
     # One of the test files in the pydicom module will throw an IOError
     # when parsing metadata.  We should work around that and still be able
     # to import the file
     samplePath = os.path.join(os.path.dirname(os.path.abspath(
         pydicom.__file__)), 'data', 'test_files', 'OBXXXX1A.dcm')
     admin, user = self.users
     # Create a collection, folder, and item
     collection = Collection().createCollection('collection5', admin, public=True)
     folder = Folder().createFolder(collection, 'folder5', parentType='collection', public=True)
     item = Item().createItem('item5', admin, folder)
     # Upload this dicom file
     with open(samplePath, 'rb') as fp, _EventHelper('dicom_viewer.upload.success') as helper:
         dcmFile = Upload().uploadFromFile(
             obj=fp,
             size=os.path.getsize(samplePath),
             name=os.path.basename(samplePath),
             parentType='item',
             parent=item,
             mimeType='application/dicom',
             user=user
         )
         self.assertIsNotNone(dcmFile)
         # Wait for handler success event
         handled = helper.wait()
         self.assertTrue(handled)
     # Check if the 'dicomItem' is well processed
     dicomItem = Item().load(item['_id'], force=True)
     self.assertIn('dicom', dicomItem)
     self.assertHasKeys(dicomItem['dicom'], ['meta', 'files'])
Exemple #9
0
    def cancelUpload(self, upload):
        user = self.getCurrentUser()

        if upload['userId'] != user['_id'] and not user['admin']:
            raise AccessException('You did not initiate this upload.')

        Upload().cancelUpload(upload)
        return {'message': 'Upload canceled.'}
Exemple #10
0
    def finalizeUpload(self, upload):
        user = self.getCurrentUser()

        if upload['userId'] != user['_id']:
            raise AccessException('You did not initiate this upload.')

        # If we don't have as much data as we were told would be uploaded and
        # the upload hasn't specified it has an alternate behavior, refuse to
        # complete the upload.
        if upload['received'] != upload['size'] and 'behavior' not in upload:
            raise RestException(
                'Server has only received %s bytes, but the file should be %s bytes.'
                % (upload['received'], upload['size']))

        file = Upload().finalizeUpload(upload)
        extraKeys = file.get('additionalFinalizeKeys', ())
        return self._model.filter(file, user, additionalKeys=extraKeys)
Exemple #11
0
    def updateFileContents(self, file, size, reference, assetstoreId):
        user = self.getCurrentUser()

        assetstore = None
        if assetstoreId:
            self.requireAdmin(
                user,
                message=
                'You must be an admin to select a destination assetstore.')
            assetstore = Assetstore().load(assetstoreId)
        # Create a new upload record into the existing file
        upload = Upload().createUploadToFile(file=file,
                                             user=user,
                                             size=size,
                                             reference=reference,
                                             assetstore=assetstore)

        if upload['size'] > 0:
            return upload
        else:
            return self._model.filter(Upload().finalizeUpload(upload), user)
Exemple #12
0
    def moveFileToAssetstore(self, file, assetstore, progress):
        user = self.getCurrentUser()
        title = 'Moving file "%s" to assetstore "%s"' % (file['name'],
                                                         assetstore['name'])

        with ProgressContext(progress,
                             user=user,
                             title=title,
                             total=file['size']) as ctx:
            return Upload().moveFileToAssetstore(file=file,
                                                 user=user,
                                                 assetstore=assetstore,
                                                 progress=ctx)
    def _uploadNonDicomFiles(self, item, user):
        # Upload a fake file to check that the item is not traited
        nonDicomContent = b'hello world\n'

        ndcmFile = Upload().uploadFromFile(
            obj=six.BytesIO(nonDicomContent),
            size=len(nonDicomContent),
            name='nonDicom.txt',
            parentType='item',
            parent=item,
            mimeType='text/plain',
            user=user
        )
        self.assertIsNotNone(ndcmFile)
 def _uploadDicomFiles(self, item, user):
     # Upload the files in the reverse order to check if they're well sorted
     for i in [1, 3, 0, 2]:
         file = os.path.join(self.dataDir, '00000%i.dcm' % i)
         with open(file, 'rb') as fp, _EventHelper('dicom_viewer.upload.success') as helper:
             dcmFile = Upload().uploadFromFile(
                 obj=fp,
                 size=os.path.getsize(file),
                 name='dicomFile{}.dcm'.format(i),
                 parentType='item',
                 parent=item,
                 mimeType='application/dicom',
                 user=user
             )
             self.assertIsNotNone(dcmFile)
             # Wait for handler success event
             handled = helper.wait()
             self.assertTrue(handled)
Exemple #15
0
    def finalizeUpload(self, upload, file):
        """
        Moves the file into its permanent content-addressed location within the
        assetstore. Directory hierarchy yields 256^2 buckets.
        """
        hash = _hash_state.restoreHex(upload['sha512state'], 'sha512').hexdigest()
        dir = os.path.join(hash[0:2], hash[2:4])
        absdir = os.path.join(self.assetstore['root'], dir)

        path = os.path.join(dir, hash)
        abspath = os.path.join(self.assetstore['root'], path)

        # Store the hash in the upload so that deleting a file won't delete
        # this file
        if '_id' in upload:
            upload['sha512'] = hash
            Upload().update({'_id': upload['_id']}, update={'$set': {'sha512': hash}})

        mkdir(absdir)

        # Only maintain the lock which checking if the file exists.  The only
        # other place the lock is used is checking if an upload task has
        # reserved the file, so this is sufficient.
        with filelock.FileLock(abspath + '.deleteLock'):
            pathExists = os.path.exists(abspath)
        if pathExists:
            # Already have this file stored, just delete temp file.
            os.unlink(upload['tempFile'])
        else:
            # Move the temp file to permanent location in the assetstore.
            # shutil.move works across filesystems
            shutil.move(upload['tempFile'], abspath)
            try:
                os.chmod(abspath, self.assetstore.get('perms', DEFAULT_PERMS))
            except OSError:
                # some filesystems may not support POSIX permissions
                pass

        file['sha512'] = hash
        file['path'] = path

        return file
Exemple #16
0
    def checkUploadFinalize(self, event):
        """
        Check if an upload will fit within a quota restriction before
        finalizing it.  If it doesn't, discard it.

        :param event: event record.
        """
        upload = event.info
        quotaInfo = self._checkUploadSize(upload)
        if quotaInfo:
            # Delete the upload
            Upload().cancelUpload(upload)
            raise ValidationException(
                'Upload exceeded file storage quota (need %s, only %s '
                'available - used %s out of %s)' %
                (formatSize(quotaInfo['sizeNeeded']),
                 formatSize(quotaInfo['quotaLeft']),
                 formatSize(quotaInfo['quotaUsed']),
                 formatSize(quotaInfo['fileSizeQuota'])),
                field='size')
Exemple #17
0
    def readChunk(self, upload, offset, params):
        """
        After the temporary upload record has been created (see initUpload),
        the bytes themselves should be passed up in ordered chunks. The user
        must remain logged in when passing each chunk, to authenticate that
        the writer of the chunk is the same as the person who initiated the
        upload. The passed offset is a verification mechanism for ensuring the
        server and client agree on the number of bytes sent/received.
        """
        if cherrypy.request.headers.get('Content-Type',
                                        '').startswith('multipart/form-data'):
            raise RestException(
                'Multipart encoding is no longer supported. Send the chunk in '
                'the request body, and other parameters in the query string.')

        if 'chunk' in params:
            # If we see the undocumented "chunk" query string parameter, then we abort trying to
            # read the body, use the query string value as chunk, and pass it through to
            # Upload().handleChunk. This case is used by the direct S3 upload process.
            chunk = params['chunk']
        else:
            chunk = RequestBodyStream(cherrypy.request.body)
        user = self.getCurrentUser()

        if upload['userId'] != user['_id']:
            raise AccessException('You did not initiate this upload.')

        if upload['received'] != offset:
            raise RestException(
                'Server has received %s bytes, but client sent offset %s.' %
                (upload['received'], offset))
        try:
            return Upload().handleChunk(upload, chunk, filter=True, user=user)
        except IOError as exc:
            if exc.errno == errno.EACCES:
                raise Exception('Failed to store upload.')
            raise
Exemple #18
0
    def deleteFile(self, file):
        """
        Deletes the file from disk if it is the only File in this assetstore
        with the given sha512. Imported files are not actually deleted.
        """
        from girderformindlogger.models.file import File

        if file.get('imported') or 'path' not in file:
            return

        q = {
            'sha512': file['sha512'],
            'assetstoreId': self.assetstore['_id']
        }
        path = os.path.join(self.assetstore['root'], file['path'])
        if os.path.isfile(path):
            with filelock.FileLock(path + '.deleteLock'):
                matching = File().find(q, limit=2, fields=[])
                matchingUpload = Upload().findOne(q)
                if matching.count(True) == 1 and matchingUpload is None:
                    try:
                        os.unlink(path)
                    except Exception:
                        logger.exception('Failed to delete file %s' % path)
    def testDownload(self):
        collection = Collection().createCollection('collection1', public=True)
        folder = Folder().createFolder(collection,
                                       'folder1',
                                       parentType='collection',
                                       public=True)
        item = Item().createItem('item1', self.admin, folder)

        # Path to test files
        file1Path = os.path.join(self.filesDir, 'txt1.txt')
        file2Path = os.path.join(self.filesDir, 'txt2.txt')

        # Upload files to item
        with open(file1Path, 'rb') as fp:
            file1 = Upload().uploadFromFile(fp,
                                            os.path.getsize(file1Path),
                                            'txt1.txt',
                                            parentType='item',
                                            parent=item,
                                            user=self.admin)

        with open(file2Path, 'rb') as fp:
            file2 = Upload().uploadFromFile(fp,
                                            os.path.getsize(file2Path),
                                            'txt2.txt',
                                            mimeType='image/jpeg',
                                            parentType='item',
                                            parent=item,
                                            user=self.admin)

        # Download item and its files several times and ensure downloads are recorded
        # Each file is downloaded 10 times
        for _ in range(0, 5):
            self._downloadItem(item['_id'])
            self._downloadFile(file1['_id'])
            self._downloadFile(file2['_id'])

        # Download each file 1 time by downloading parent folder
        self._downloadFolder(folder['_id'])

        # Download each file over 2 requests
        self._downloadFileInTwoChunks(file1['_id'])
        self._downloadFileInTwoChunks(file2['_id'])

        # Download each file partially, adding 1 to start and 4 to requested
        self._downloadPartialFile(file1['_id'])
        self._downloadPartialFile(file2['_id'])

        # Download entire collection
        # Each file is downloaded 1 additional time
        path = '/collection/%s/download' % collection['_id']
        resp = self.request(path, user=self.admin, isJson=False)

        # Iterate through generator to trigger download events
        for data in resp.body:
            data

        # Download collection filtered by mime type
        # file2 is downloaded one additional time
        path = '/collection/%s/download' % collection['_id']
        resp = self.request(path,
                            user=self.admin,
                            isJson=False,
                            method='GET',
                            params={
                                'id': collection['_id'],
                                'mimeFilter': json.dumps(['image/jpeg'])
                            })

        # iterate through generator to trigger download events
        for data in resp.body:
            data

        self._checkDownloadsCount(file1['_id'], 14, 18, 13)
        self._checkDownloadsCount(file2['_id'], 15, 19, 14)
Exemple #20
0
def createThumbnail(width, height, crop, fileId, attachToType, attachToId):
    """
    Creates the thumbnail. Validation and access control must be done prior
    to the invocation of this method.
    """
    fileModel = File()
    file = fileModel.load(fileId, force=True)
    streamFn = functools.partial(fileModel.download, file, headers=False)

    event = events.trigger('thumbnails.create',
                           info={
                               'file': file,
                               'width': width,
                               'height': height,
                               'crop': crop,
                               'attachToType': attachToType,
                               'attachToId': attachToId,
                               'streamFn': streamFn
                           })

    if len(event.responses):
        resp = event.responses[-1]
        newFile = resp['file']

        if event.defaultPrevented:
            if resp.get('attach', True):
                newFile = attachThumbnail(file, newFile, attachToType,
                                          attachToId, width, height)
            return newFile
        else:
            file = newFile
            streamFn = functools.partial(fileModel.download,
                                         file,
                                         headers=False)

    if 'assetstoreId' not in file:
        # TODO we could thumbnail link files if we really wanted.
        raise Exception('File %s has no assetstore.' % fileId)

    stream = streamFn()
    data = b''.join(stream())

    image = _getImage(file['mimeType'], file['exts'], data)

    if not width:
        width = int(height * image.size[0] / image.size[1])
    elif not height:
        height = int(width * image.size[1] / image.size[0])
    elif crop:
        x1 = y1 = 0
        x2, y2 = image.size
        wr = float(image.size[0]) / width
        hr = float(image.size[1]) / height

        if hr > wr:
            y1 = int(y2 / 2 - height * wr / 2)
            y2 = int(y2 / 2 + height * wr / 2)
        else:
            x1 = int(x2 / 2 - width * hr / 2)
            x2 = int(x2 / 2 + width * hr / 2)
        image = image.crop((x1, y1, x2, y2))

    image.thumbnail((width, height), Image.ANTIALIAS)

    out = six.BytesIO()
    image.convert('RGB').save(out, 'JPEG', quality=85)
    size = out.tell()
    out.seek(0)

    thumbnail = Upload().uploadFromFile(out,
                                        size=size,
                                        name='_thumb.jpg',
                                        parentType=attachToType,
                                        parent={'_id': ObjectId(attachToId)},
                                        user=None,
                                        mimeType='image/jpeg',
                                        attachParent=True)

    return attachThumbnail(file, thumbnail, attachToType, attachToId, width,
                           height)
Exemple #21
0
    def testAuthorizedUpload(self):
        Setting().set(SettingKey.UPLOAD_MINIMUM_CHUNK_SIZE, 1)

        # Anon access should not work
        resp = self.request('/authorized_upload',
                            method='POST',
                            params={'folderId': self.privateFolder['_id']})
        self.assertStatus(resp, 401)

        # Create our secure URL
        resp = self.request('/authorized_upload',
                            method='POST',
                            user=self.admin,
                            params={'folderId': self.privateFolder['_id']})
        self.assertStatusOk(resp)
        parts = resp.json['url'].rsplit('/', 3)
        tokenId, folderId = parts[-1], parts[-2]

        token = Token().load(tokenId, force=True, objectId=False)

        self.assertIsNotNone(token)
        self.assertEqual(folderId, str(self.privateFolder['_id']))
        self.assertEqual(
            set(token['scope']), {
                TOKEN_SCOPE_AUTHORIZED_UPLOAD,
                'authorized_upload_folder_%s' % self.privateFolder['_id']
            })

        # Make sure this token doesn't let us upload into a different folder
        params = {
            'parentType': 'folder',
            'parentId': self.publicFolder['_id'],
            'name': 'hello.txt',
            'size': 11,
            'mimeType': 'text/plain'
        }

        resp = self.request(path='/file',
                            method='POST',
                            params=params,
                            token=tokenId)
        self.assertStatus(resp, 401)

        # Initialize upload into correct folder
        params['parentId'] = self.privateFolder['_id']
        resp = self.request(path='/file',
                            method='POST',
                            params=params,
                            token=tokenId)
        self.assertStatusOk(resp)

        # We should remove the scope that allows further uploads
        upload = Upload().load(resp.json['_id'])
        token = Token().load(tokenId, force=True, objectId=False)
        self.assertEqual(
            token['scope'],
            ['authorized_upload_folder_%s' % self.privateFolder['_id']])

        # Authorized upload ID should be present in the token
        self.assertEqual(token['authorizedUploadId'], upload['_id'])

        # Attempting to initialize new uploads using the token should fail
        resp = self.request(path='/file',
                            method='POST',
                            params=params,
                            token=tokenId)
        self.assertStatus(resp, 401)

        # Uploading a chunk should work with the token
        resp = self.request(path='/file/chunk',
                            method='POST',
                            token=tokenId,
                            body='hello ',
                            params={'uploadId': str(upload['_id'])},
                            type='text/plain')
        self.assertStatusOk(resp)

        # Requesting our offset should work with the token
        # The offset should not have changed
        resp = self.request(path='/file/offset',
                            method='GET',
                            token=tokenId,
                            params={'uploadId': upload['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['offset'], 6)

        # Upload the second chunk
        resp = self.request(path='/file/chunk',
                            method='POST',
                            token=tokenId,
                            body='world',
                            params={
                                'offset': 6,
                                'uploadId': str(upload['_id'])
                            },
                            type='text/plain')
        self.assertStatusOk(resp)

        # Trying to upload more chunks should fail
        resp = self.request(path='/file/chunk',
                            method='POST',
                            token=tokenId,
                            body='extra',
                            params={
                                'offset': 11,
                                'uploadId': str(upload['_id'])
                            },
                            type='text/plain')
        self.assertStatus(resp, 401)

        # The token should be destroyed
        self.assertIsNone(Token().load(tokenId, force=True, objectId=False))
Exemple #22
0
    def createResponseItem(
        self,
        applet,
        activity,
        metadata,
        subject_id,
        pending,
        params
    ):
        from girderformindlogger.models.profile import Profile
        try:
            from girderformindlogger.utility.response import aggregateAndSave
            # TODO: pending
            metadata['applet'] = {
                "@id": applet.get('_id'),
                "name": AppletModel().preferredName(applet),
                "url": applet.get(
                    'url',
                    applet.get('meta', {}).get('applet', {}).get('url')
                )
            }
            metadata['activity'] = {
                "@id": activity.get('_id'),
                "name": ActivityModel().preferredName(activity),
                "url": activity.get(
                    'url',
                    activity.get('meta', {}).get('activity', {}).get('url')
                )
            }
            informant = self.getCurrentUser()
            subject_id = subject_id if subject_id else str(
                informant['_id']
            )

            subject_id = Profile().createProfile(
                applet,
                subject_id
            ).get('_id')

            print(subject_id)

            if isinstance(metadata.get('subject'), dict):
                metadata['subject']['@id'] = subject_id
            else:
                metadata['subject'] = {'@id': subject_id}
            now = datetime.now(tzlocal.get_localzone())
            appletName=AppletModel().preferredName(applet)
            UserResponsesFolder = ResponseFolderModel().load(
                user=informant,
                reviewer=informant,
                force=True
            )
            UserAppletResponsesFolder = Folder().createFolder(
                parent=UserResponsesFolder, parentType='folder',
                name=appletName, reuseExisting=True, public=False)
            AppletSubjectResponsesFolder = Folder().createFolder(
                parent=UserAppletResponsesFolder, parentType='folder',
                name=str(subject_id), reuseExisting=True, public=False)

            try:
                newItem = self._model.createResponseItem(
                    folder=AppletSubjectResponsesFolder,
                    name=now.strftime("%Y-%m-%d-%H-%M-%S-%Z"),
                    creator=informant,
                    description="{} response on {} at {}".format(
                        Folder().preferredName(activity),
                        now.strftime("%Y-%m-%d"),
                        now.strftime("%H:%M:%S %Z")
                    ), reuseExisting=False)
            except:
                raise ValidationException(
                    "Couldn't find activity name for this response"
                )

            # for each blob in the parameter, upload it to a File under the item.
            for key, value in params.items():
                # upload the value (a blob)
                um = UploadModel()
                filename = "{}.{}".format(
                    key,
                    metadata['responses'][key]['type'].split('/')[-1]
                )
                newUpload = um.uploadFromFile(
                    value.file,
                    metadata['responses'][key]['size'],
                    filename,
                    'item',
                    newItem,
                    informant,
                    metadata['responses'][key]['type']
                )
                # now, replace the metadata key with a link to this upload
                metadata['responses'][key] = "file::{}".format(newUpload['_id'])

            if metadata:
                newItem = self._model.setMetadata(newItem, metadata)

            print(metadata)
            if not pending:
                # create a Thread to calculate and save aggregates

                # TODO: probably uncomment this as we scale.
                # idea: thread all time, but synchronously do last7 days
                # agg = threading.Thread(target=aggregateAndSave, args=(newItem, informant))
                # agg.start()
                aggregateAndSave(newItem, informant)
                newItem['readOnly'] = True
            print(newItem)
            return(newItem)
        except:
            import sys, traceback
            print(sys.exc_info())
            print(traceback.print_tb(sys.exc_info()[2]))
            return(str(traceback.print_tb(sys.exc_info()[2])))
    def setUp(self):
        base.TestCase.setUp(self, assetstoreType='filesystem')

        # Two users are created (user and otherUser).
        # A hierarchy is created as is:
        #  - user:
        #       |- [Folder (public)] publicFolder:
        #           |- publicFile
        #           |- duplicatePublicFile
        #       |- [Folder (private)] private:
        #           |- privateFile
        #           |- privateOnlyFile
        #
        #  - otherUser:
        #       |- (nothing)
        #
        # In summary, user has access to all the files and otherUser to none.

        self.user = User().createUser(
            login='******',
            password='******',
            firstName='Leeloominai',
            lastName='Sebat',
            email='*****@*****.**'
        )

        for folder in Folder().childFolders(parent=self.user, parentType='user', user=self.user):
            if folder['public'] is True:
                self.publicFolder = folder
            else:
                self.privateFolder = folder

        self.userData = u'\u266a Il dolce suono mi ' \
                        u'colp\u00ec di sua voce! \u266a'.encode('utf8')
        self.privateFile = Upload().uploadFromFile(
            obj=six.BytesIO(self.userData),
            size=len(self.userData),
            name='Il dolce suono - PRIVATE',
            parentType='folder',
            parent=self.privateFolder,
            user=self.user,
            mimeType='audio/mp4'
        )
        self.publicFile = Upload().uploadFromFile(
            obj=six.BytesIO(self.userData),
            size=len(self.userData),
            name='Il dolce suono - PUBLIC',
            parentType='folder',
            parent=self.publicFolder,
            user=self.user,
            mimeType='audio/flac'
        )
        self.duplicatePublicFile = Upload().uploadFromFile(
            obj=six.BytesIO(self.userData),
            size=len(self.userData),
            name='Il dolce suono - PUBLIC DUPLICATE',
            parentType='folder',
            parent=self.publicFolder,
            user=self.user,
            mimeType='audio/mp3'
        )

        self.privateOnlyData =\
            u'\u2641 \u2600 \u2601 \u2614 \u2665'.encode('utf8')
        self.privateOnlyFile = Upload().uploadFromFile(
            obj=six.BytesIO(self.privateOnlyData),
            size=len(self.privateOnlyData),
            name='Powers combined',
            parentType='folder',
            parent=self.privateFolder,
            user=self.user,
            mimeType='image/png'
        )

        self.otherUser = User().createUser(
            login='******',
            password='******',
            firstName='Jean-Baptiste',
            lastName='Zorg',
            email='*****@*****.**'
        )
Exemple #24
0
    def initUpload(self, parentType, parentId, name, size, mimeType, linkUrl,
                   reference, assetstoreId):
        """
        Before any bytes of the actual file are sent, a request should be made
        to initialize the upload. This creates the temporary record of the
        forthcoming upload that will be passed in chunks to the readChunk
        method. If you pass a "linkUrl" parameter, it will make a link file
        in the designated parent.
        """
        user = self.getCurrentUser()
        parent = ModelImporter.model(parentType).load(id=parentId,
                                                      user=user,
                                                      level=AccessType.WRITE,
                                                      exc=True)

        if linkUrl is not None:
            return self._model.filter(
                self._model.createLinkFile(url=linkUrl,
                                           parent=parent,
                                           name=name,
                                           parentType=parentType,
                                           creator=user,
                                           size=size,
                                           mimeType=mimeType), user)
        else:
            self.requireParams({'size': size})
            assetstore = None
            if assetstoreId:
                self.requireAdmin(
                    user,
                    message=
                    'You must be an admin to select a destination assetstore.')
                assetstore = Assetstore().load(assetstoreId)

            chunk = None
            if size > 0 and cherrypy.request.headers.get('Content-Length'):
                ct = cherrypy.request.body.content_type.value
                if (ct not in cherrypy.request.body.processors and ct.split(
                        '/', 1)[0] not in cherrypy.request.body.processors):
                    chunk = RequestBodyStream(cherrypy.request.body)
            if chunk is not None and chunk.getSize() <= 0:
                chunk = None

            try:
                # TODO: This can be made more efficient by adding
                #    save=chunk is None
                # to the createUpload call parameters.  However, since this is
                # a breaking change, that should be deferred until a major
                # version upgrade.
                upload = Upload().createUpload(user=user,
                                               name=name,
                                               parentType=parentType,
                                               parent=parent,
                                               size=size,
                                               mimeType=mimeType,
                                               reference=reference,
                                               assetstore=assetstore)
            except OSError as exc:
                if exc.errno == errno.EACCES:
                    raise GirderException(
                        'Failed to create upload.',
                        'girderformindlogger.api.v1.file.create-upload-failed')
                raise
            if upload['size'] > 0:
                if chunk:
                    return Upload().handleChunk(upload,
                                                chunk,
                                                filter=True,
                                                user=user)

                return upload
            else:
                return self._model.filter(Upload().finalizeUpload(upload),
                                          user)