Exemple #1
0
def get_or_create_auxiliary_folder(folder, user):
    return Folder().createFolder(folder, "auxiliary", reuseExisting=True, creator=user)
 def getOwnerId(folder):
     aclList = Folder().getFullAccessList(folder)
     for acl in aclList['users']:
         if acl['level'] == AccessType.ADMIN:
             return str(acl['id'])
     return None
Exemple #3
0
    def testConsistencyCheck(self):
        user = self.users[0]
        c1 = Collection().createCollection('c1', user)
        f1 = Folder().createFolder(c1, 'f1', parentType='collection')
        Folder().createFolder(c1, 'f2', parentType='collection')
        f3 = Folder().createFolder(user, 'f3', parentType='user')
        Folder().createFolder(user, 'f4', parentType='user')
        i1 = Item().createItem('i1', user, f1)
        i2 = Item().createItem('i2', user, f1)
        Item().createItem('i3', user, f1)
        i4 = Item().createItem('i4', user, f3)
        Item().createItem('i5', user, f3)
        Item().createItem('i6', user, f3)
        assetstore = {'_id': 0}
        File().createFile(user, i1, 'foo', 7, assetstore)
        File().createFile(user, i1, 'foo', 13, assetstore)
        File().createFile(user, i2, 'foo', 19, assetstore)
        File().createFile(user, i4, 'foo', 23, assetstore)

        self.assertEqual(39, Collection().load(c1['_id'], force=True)['size'])
        self.assertEqual(39, Folder().load(f1['_id'], force=True)['size'])
        self.assertEqual(23, Folder().load(f3['_id'], force=True)['size'])
        self.assertEqual(20, Item().load(i1['_id'], force=True)['size'])
        self.assertEqual(23, User().load(user['_id'], force=True)['size'])

        resp = self.request(path='/system/check', user=user, method='PUT')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['baseParentsFixed'], 0)
        self.assertEqual(resp.json['orphansRemoved'], 0)
        self.assertEqual(resp.json['sizesChanged'], 0)

        Item().update({'_id': i1['_id']},
                      update={'$set': {
                          'baseParentId': None
                      }})

        resp = self.request(path='/system/check', user=user, method='PUT')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['baseParentsFixed'], 1)
        self.assertEqual(resp.json['orphansRemoved'], 0)
        self.assertEqual(resp.json['sizesChanged'], 0)

        Collection().update({'_id': c1['_id']}, update={'$set': {'size': 0}})
        Folder().update({'_id': f1['_id']}, update={'$set': {'size': 0}})
        Item().update({'_id': i1['_id']}, update={'$set': {'size': 0}})

        resp = self.request(path='/system/check', user=user, method='PUT')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['baseParentsFixed'], 0)
        self.assertEqual(resp.json['orphansRemoved'], 0)
        self.assertEqual(resp.json['sizesChanged'], 3)

        self.assertEqual(39, Collection().load(c1['_id'], force=True)['size'])
        self.assertEqual(39, Folder().load(f1['_id'], force=True)['size'])
        self.assertEqual(23, Folder().load(f3['_id'], force=True)['size'])
        self.assertEqual(20, Item().load(i1['_id'], force=True)['size'])
        self.assertEqual(23, User().load(user['_id'], force=True)['size'])

        Folder().collection.delete_one({'_id': f3['_id']})

        resp = self.request(path='/system/check', user=user, method='PUT')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['baseParentsFixed'], 0)
        self.assertEqual(resp.json['orphansRemoved'], 3)
        self.assertEqual(resp.json['sizesChanged'], 0)

        self.assertEqual(0, User().load(user['_id'], force=True)['size'])
    def importData(self,
                   parent,
                   parentType,
                   params,
                   progress,
                   user,
                   force_recursive=True,
                   **kwargs):
        importPath = params.get('importPath', '').strip().lstrip('/')
        bucket = self.assetstore['bucket']
        now = datetime.datetime.utcnow()
        paginator = self.client.get_paginator('list_objects')
        pageIterator = paginator.paginate(Bucket=bucket,
                                          Prefix=importPath,
                                          Delimiter='/')
        for resp in pageIterator:
            # Start with objects
            for obj in resp.get('Contents', []):
                if progress:
                    progress.update(message=obj['Key'])

                name = obj['Key'].rsplit('/', 1)[-1]
                if not name:
                    continue

                if parentType != 'folder':
                    raise ValidationException(
                        'Keys cannot be imported directly underneath a %s.' %
                        parentType)

                if self.shouldImportFile(obj['Key'], params):
                    item = Item().createItem(name=name,
                                             creator=user,
                                             folder=parent,
                                             reuseExisting=True)
                    events.trigger(
                        's3_assetstore_imported', {
                            'id': item['_id'],
                            'type': 'item',
                            'importPath': obj['Key'],
                        })
                    # Create a file record; delay saving it until we have added
                    # the import information.
                    file = File().createFile(name=name,
                                             creator=user,
                                             item=item,
                                             reuseExisting=True,
                                             assetstore=self.assetstore,
                                             mimeType=None,
                                             size=obj['Size'],
                                             saveFile=False)
                    file['s3Key'] = obj['Key']
                    file['imported'] = True
                    File().save(file)

            for obj in resp.get('CommonPrefixes', []):
                if progress:
                    progress.update(message=obj['Prefix'])

                name = obj['Prefix'].rstrip('/').rsplit('/', 1)[-1]
                folder = Folder().createFolder(parent=parent,
                                               name=name,
                                               parentType=parentType,
                                               creator=user,
                                               reuseExisting=True)

                events.trigger(
                    's3_assetstore_imported', {
                        'id': folder['_id'],
                        'type': 'folder',
                        'importPath': obj['Prefix'],
                    })
                # recurse into subdirectories if force_recursive is true
                # or the folder was newly created.
                if force_recursive or folder['created'] >= now:
                    self.importData(parent=folder,
                                    parentType='folder',
                                    params={
                                        **params, 'importPath': obj['Prefix']
                                    },
                                    progress=progress,
                                    user=user,
                                    **kwargs)
Exemple #5
0
    def testCollectionAccess(self):
        # Asking to change to an invalid access list should fail
        resp = self.request(path='/collection/%s/access' %
                            self.collection['_id'], method='PUT', params={
                                'access': 'not an access list',
                                'public': False
                            }, user=self.admin)
        self.assertStatus(resp, 400)

        # Create some folders underneath the collection
        folder1 = Folder().createFolder(
            parentType='collection', parent=self.collection, creator=self.admin,
            public=False, name='top level')
        folder2 = Folder().createFolder(
            parentType='folder', parent=folder1, creator=self.admin,
            public=False, name='subfolder')
        Folder().createFolder(
            parentType='collection', parent=self.collection, creator=self.admin,
            public=False, name='another top level folder')

        # Admin should see two top level folders
        resp = self.request(path='/collection/%s/details' % self.collection['_id'], user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['nFolders'], 2)
        self.assertNotIn('nItems', resp.json)

        # Normal user should see 0 folders
        resp = self.request(path='/collection/%s/details' % self.collection['_id'], user=self.user)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['nFolders'], 0)

        # Add read access on one of the folders
        Folder().setUserAccess(folder1, self.user, AccessType.READ, save=True)

        # Normal user should see one folder now
        resp = self.request(path='/collection/%s/details' % self.collection['_id'], user=self.user)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['nFolders'], 1)

        # Change the access to allow just the user
        obj = {'users': [{'id': str(self.user['_id']),
                          'level': AccessType.WRITE}]}
        resp = self.request(path='/collection/%s/access' %
                            self.collection['_id'], method='PUT', params={
                                'access': json.dumps(obj),
                                'public': True
                            }, user=self.admin)
        self.assertStatusOk(resp)

        # Request the collection access
        resp = self.request(path='/collection/%s/access' % self.collection['_id'], user=self.admin)
        self.assertStatusOk(resp)
        access = resp.json
        self.assertEqual(access['users'][0]['id'], str(self.user['_id']))
        self.assertEqual(access['users'][0]['level'], AccessType.WRITE)
        coll = Collection().load(self.collection['_id'], force=True)
        folder1 = Folder().load(folder1['_id'], force=True)
        folder2 = Folder().load(folder2['_id'], force=True)
        self.assertEqual(coll['public'], True)
        self.assertEqual(folder1['public'], False)

        # Update the collection recursively to public
        resp = self.request(
            path='/collection/%s/access' % coll['_id'], method='PUT', params={
                'access': json.dumps(obj),
                'public': True,
                'recurse': True,
                'progress': True
            }, user=self.admin)
        self.assertStatusOk(resp)
        coll = Collection().load(coll['_id'], force=True)
        folder1 = Folder().load(folder1['_id'], force=True)
        folder2 = Folder().load(folder2['_id'], force=True)
        self.assertEqual(coll['public'], True)
        self.assertEqual(folder1['public'], True)
        self.assertEqual(folder2['public'], True)
        self.assertEqual(folder1['access'], coll['access'])
        self.assertEqual(folder1['access'], folder2['access'])
        self.assertEqual(folder2['access'], {
            'users': [{
                'id': self.user['_id'],
                'level': AccessType.WRITE,
                'flags': []
            }],
            'groups': []
        })

        # Recursively drop the user's access level to READ
        obj['users'][0]['level'] = AccessType.READ
        resp = self.request(
            path='/collection/%s/access' % coll['_id'], method='PUT', params={
                'access': json.dumps(obj),
                'public': True,
                'recurse': True,
                'progress': True
            }, user=self.admin)
        self.assertStatusOk(resp)
        coll = Collection().load(coll['_id'], force=True)
        folder1 = Folder().load(folder1['_id'], force=True)
        folder2 = Folder().load(folder2['_id'], force=True)
        self.assertEqual(coll['public'], True)
        self.assertEqual(folder1['public'], True)
        self.assertEqual(folder2['public'], True)
        self.assertEqual(folder1['access'], coll['access'])
        self.assertEqual(folder1['access'], folder2['access'])
        self.assertEqual(folder2['access'], {
            'users': [{
                'id': self.user['_id'],
                'level': AccessType.READ,
                'flags': []
            }],
            'groups': []
        })

        # Recursively remove the user's access altogether, also make sure that
        # passing no "public" param just retains the current flag state
        obj['users'] = ()
        resp = self.request(
            path='/collection/%s/access' % coll['_id'], method='PUT', params={
                'access': json.dumps(obj),
                'recurse': True
            }, user=self.admin)
        self.assertStatusOk(resp)
        coll = Collection().load(coll['_id'], force=True)
        folder1 = Folder().load(folder1['_id'], force=True)
        folder2 = Folder().load(folder2['_id'], force=True)
        self.assertEqual(coll['public'], True)
        self.assertEqual(folder1['public'], True)
        self.assertEqual(folder2['public'], True)
        self.assertEqual(folder1['access'], coll['access'])
        self.assertEqual(folder1['access'], folder2['access'])
        self.assertEqual(folder2['access'], {
            'users': [],
            'groups': []
        })

        # Add group access to the collection
        group = Group().createGroup('test', self.admin)
        obj = {
            'groups': [{
                'id': str(group['_id']),
                'level': AccessType.WRITE
            }]
        }

        resp = self.request(
            path='/collection/%s/access' % coll['_id'], method='PUT', params={
                'access': json.dumps(obj),
                'recurse': False
            }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a new top-level folder, it should inherit the collection ACL.
        resp = self.request(path='/folder', method='POST', params={
            'name': 'top level 2',
            'parentId': coll['_id'],
            'parentType': 'collection'
        }, user=self.admin)
        self.assertStatusOk(resp)
        folder = Folder().load(resp.json['_id'], force=True)
        coll = Collection().load(coll['_id'], force=True)
        self.assertEqual(coll['access']['users'], [])
        self.assertEqual(folder['access']['users'], [{
            'id': self.admin['_id'],
            'level': AccessType.ADMIN,
            'flags': []
        }])
        self.assertEqual(folder['access']['groups'], [{
            'id': group['_id'],
            'level': AccessType.WRITE,
            'flags': []
        }])
        self.assertEqual(folder['access']['groups'], coll['access']['groups'])
Exemple #6
0
def namedFolder(user, folderName='Public'):
    return Folder().find({
        'parentId': user['_id'],
        'name': folderName,
    })[0]
Exemple #7
0
def allChildItems(parent,
                  parentType,
                  user,
                  limit=0,
                  offset=0,
                  sort=None,
                  _internal=None,
                  **kwargs):
    """
    This generator will yield all items that are children of the resource
    or recursively children of child folders of the resource, with access
    policy filtering.  Passes any kwargs to the find function.

    :param parent: The parent object.
    :type parentType: Type of the parent object.
    :param parentType: The parent type.
    :type parentType: 'user', 'folder', or 'collection'
    :param user: The user running the query. Only returns items that this
                 user can see.
    :param limit: Result limit.
    :param offset: Result offset.
    :param sort: The sort structure to pass to pymongo.  Child folders are
        served depth first, and this sort is applied within the resource
        and then within each child folder.  Child items are processed
        before child folders.
    """
    if _internal is None:
        _internal = {'limit': limit, 'offset': offset, 'done': False}
    model = ModelImporter.model(parentType)
    if hasattr(model, 'childItems'):
        if parentType == 'folder':
            kwargs = kwargs.copy()
            kwargs['includeVirtual'] = True
        for item in model.childItems(parent,
                                     user=user,
                                     limit=_internal['limit'] +
                                     _internal['offset'],
                                     offset=0,
                                     sort=sort,
                                     **kwargs):
            if _internal['offset']:
                _internal['offset'] -= 1
            else:
                yield item
                if _internal['limit']:
                    _internal['limit'] -= 1
                    if not _internal['limit']:
                        _internal['done'] = True
                        return
    for folder in Folder().childFolders(parentType=parentType,
                                        parent=parent,
                                        user=user,
                                        limit=0,
                                        offset=0,
                                        sort=sort,
                                        **kwargs):
        if _internal['done']:
            return
        for item in allChildItems(folder,
                                  'folder',
                                  user,
                                  sort=sort,
                                  _internal=_internal,
                                  **kwargs):
            yield item
    def testAddItemTasksToFolderFromJson(self):
        """
        Test adding item tasks to a folder from a JSON spec.
        """
        # Create a new folder that will contain the tasks
        folder = Folder().createFolder(name='placeholder',
                                       creator=self.admin,
                                       parent=self.admin,
                                       parentType='user')

        # Create task to introspect container
        with mock.patch(
                'girder_jobs.models.job.Job.scheduleJob') as scheduleMock:
            resp = self.request('/folder/%s/item_task_json_description' %
                                folder['_id'],
                                method='POST',
                                params={'image': 'johndoe/foo:v5'},
                                user=self.admin)
            self.assertStatusOk(resp)
            self.assertEqual(resp.json['_modelType'], 'job')
            self.assertEqual(len(scheduleMock.mock_calls), 1)
            job = scheduleMock.mock_calls[0][1][0]
            self.assertEqual(job['handler'], 'worker_handler')
            self.assertEqual(job['itemTaskId'], folder['_id'])
            self.assertEqual(job['kwargs']['outputs']['_stdout']['method'],
                             'POST')
            self.assertTrue(
                job['kwargs']['outputs']['_stdout']['url'].endswith(
                    'folder/%s/item_task_json_specs' % folder['_id']))
            params = job['kwargs']['outputs']['_stdout']['params']
            self.assertEqual(params['image'], 'johndoe/foo:v5')
            self.assertEqual(params['pullImage'], True)
            token = job['kwargs']['outputs']['_stdout']['headers'][
                'Girder-Token']

        # Task should not be registered until we get the callback
        resp = self.request('/item_task', user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json, [])

        # Simulate callback from introspection job
        with open(os.path.join(os.path.dirname(__file__), 'specs.json')) as f:
            specs = f.read()

        parsedSpecs = json.loads(specs)

        resp = self.request('/folder/%s/item_task_json_specs' % folder['_id'],
                            method='POST',
                            params={
                                'image': 'johndoe/foo:v5',
                                'pullImage': False
                            },
                            token=token,
                            body=specs,
                            type='application/json')

        self.assertStatusOk(resp)

        items = list(Folder().childItems(folder, user=self.admin))
        self.assertEqual(len(items), 2)

        # Image name and item task flag should be stored in the item metadata
        for itemIndex, item in enumerate(items):
            item = Item().load(item['_id'], force=True)
            self.assertEqual(item['name'],
                             'johndoe/foo:v5 %s' % (str(itemIndex)))
            self.assertEqual(item['description'],
                             parsedSpecs[itemIndex]['description'])
            self.assertTrue(item['meta']['isItemTask'])
            parsedSpecs[itemIndex]['pull_image'] = False
            parsedSpecs[itemIndex]['docker_image'] = 'johndoe/foo:v5'
            self.assertEqual(item['meta']['itemTaskSpec'],
                             parsedSpecs[itemIndex])
            self.assertEqual(item['meta']['itemTaskName'], '')

        # We should only be able to see tasks we have read access on
        resp = self.request('/item_task')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json, [])

        resp = self.request('/item_task', user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 2)

        # Test adding single task spec
        folder2 = Folder().createFolder(name='placeholder2',
                                        creator=self.admin,
                                        parent=self.admin,
                                        parentType='user')
        with open(os.path.join(os.path.dirname(__file__), 'spec.json')) as f:
            spec = f.read()
        parsedSpec = json.loads(spec)

        token = Token().createToken(user=self.admin,
                                    scope='item_task.set_task_spec.%s' %
                                    folder2['_id'])
        resp = self.request('/folder/%s/item_task_json_specs' % folder2['_id'],
                            method='POST',
                            params={
                                'image': 'johndoe/foo:v5',
                                'pullImage': False
                            },
                            token=token,
                            body=spec,
                            type='application/json')
        self.assertStatusOk(resp)
        items = list(Folder().childItems(folder2, user=self.admin))
        self.assertEqual(len(items), 1)

        # Check that the single item has the correct metadata
        item = Item().load(items[0]['_id'], force=True)
        self.assertEqual(item['name'], 'johndoe/foo:v5')
        self.assertEqual(item['description'], parsedSpec['description'])
        self.assertTrue(item['meta']['isItemTask'])
        parsedSpec['pull_image'] = False
        parsedSpec['docker_image'] = 'johndoe/foo:v5'
        self.assertEqual(item['meta']['itemTaskSpec'], parsedSpec)
        self.assertEqual(item['meta']['itemTaskName'], '')
Exemple #9
0
    def testCuration(self):
        admin, user = self.users

        # create a collection and a folder
        c1 = Collection().createCollection('c1', admin, public=True)
        f1 = Folder().createFolder(c1,
                                   'f1',
                                   parentType='collection',
                                   public=False)
        f2 = Folder().createFolder(c1,
                                   'f2',
                                   parentType='collection',
                                   public=False)
        Folder().setUserAccess(f2, user, AccessType.WRITE, True)

        # test initial curation values
        path = '/folder/%s/curation' % f1.get('_id')
        resp = self.request(path=path, user=admin)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['enabled'], False)
        self.assertEqual(resp.json['timeline'], [])

        # test non-admin access to private folder
        path = '/folder/%s/curation' % f1.get('_id')
        resp = self.request(path=path, user=user)
        self.assertStatus(resp, 403)

        # test non-admin access to folder with permissions
        path = '/folder/%s/curation' % f2.get('_id')
        resp = self.request(path=path, user=user)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['enabled'], False)
        self.assertEqual(resp.json['timeline'], [])

        # test non-admin unable to enable curation
        path = '/folder/%s/curation' % f2.get('_id')
        params = dict(enabled='true')
        resp = self.request(path=path, user=user, method='PUT', params=params)
        self.assertStatus(resp, 403)

        # test admin able to enable curation
        path = '/folder/%s/curation' % f2.get('_id')
        params = dict(enabled='true')
        resp = self.request(path=path, user=admin, method='PUT', params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['enabled'], True)
        self.assertEqual(resp.json['status'], 'construction')

        # test non-admin unable to disable curation
        path = '/folder/%s/curation' % f2.get('_id')
        params = dict(enabled='false')
        resp = self.request(path=path, user=user, method='PUT', params=params)
        self.assertStatus(resp, 403)

        # test non-admin able to request approval
        path = '/folder/%s/curation' % f2.get('_id')
        params = dict(status='requested')
        resp = self.request(path=path, user=user, method='PUT', params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['enabled'], True)
        self.assertEqual(resp.json['status'], 'requested')

        # test non-admin unable to change status
        path = '/folder/%s/curation' % f2.get('_id')
        params = dict(status='approved')
        resp = self.request(path=path, user=user, method='PUT', params=params)
        self.assertStatus(resp, 403)

        path = '/folder/%s/curation' % f2.get('_id')
        params = dict(status='construction')
        resp = self.request(path=path, user=user, method='PUT', params=params)
        self.assertStatus(resp, 403)

        # test admin able to approve
        path = '/folder/%s/curation' % f2.get('_id')
        params = dict(status='approved')
        resp = self.request(path=path, user=admin, method='PUT', params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['enabled'], True)
        self.assertEqual(resp.json['status'], 'approved')

        # test timeline is correct
        path = '/folder/%s/curation' % f2.get('_id')
        resp = self.request(path=path, user=user)
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json['timeline']), 3)
        self.assertEqual(resp.json['timeline'][0]['oldEnabled'], False)
        self.assertEqual(resp.json['timeline'][0]['enabled'], True)
        self.assertEqual(resp.json['timeline'][0]['oldStatus'], 'construction')
        self.assertEqual(resp.json['timeline'][0]['status'], 'construction')
        self.assertEqual(resp.json['timeline'][1]['oldEnabled'], True)
        self.assertEqual(resp.json['timeline'][1]['enabled'], True)
        self.assertEqual(resp.json['timeline'][1]['oldStatus'], 'construction')
        self.assertEqual(resp.json['timeline'][1]['status'], 'requested')
        self.assertEqual(resp.json['timeline'][2]['oldEnabled'], True)
        self.assertEqual(resp.json['timeline'][2]['enabled'], True)
        self.assertEqual(resp.json['timeline'][2]['oldStatus'], 'requested')
        self.assertEqual(resp.json['timeline'][2]['status'], 'approved')
    def testAddItemTasksToFolderFromSlicerCli(self):
        """
        Test adding item tasks to a folder from Slicer CLI XML.
        """
        # Create a new folder that will contain the tasks
        folder = Folder().createFolder(name='placeholder',
                                       creator=self.admin,
                                       parent=self.admin,
                                       parentType='user')

        # Create task to introspect container
        with mock.patch(
                'girder_jobs.models.job.Job.scheduleJob') as scheduleMock:
            resp = self.request('/folder/%s/item_task_slicer_cli_description' %
                                folder['_id'],
                                method='POST',
                                params={
                                    'image': 'johndoe/foo:v5',
                                    'args': json.dumps(['--foo', 'bar'])
                                },
                                user=self.admin)
            self.assertStatusOk(resp)
            self.assertEqual(resp.json['_modelType'], 'job')
            self.assertEqual(len(scheduleMock.mock_calls), 1)
            job = scheduleMock.mock_calls[0][1][0]
            self.assertEqual(job['handler'], 'worker_handler')
            self.assertEqual(job['itemTaskId'], folder['_id'])
            self.assertEqual(job['kwargs']['outputs']['_stdout']['method'],
                             'POST')
            self.assertTrue(
                job['kwargs']['outputs']['_stdout']['url'].endswith(
                    'folder/%s/item_task_slicer_cli_xml' % folder['_id']))
            params = job['kwargs']['outputs']['_stdout']['params']
            self.assertEqual(params['image'], 'johndoe/foo:v5')
            self.assertEqual(params['args'], '["--foo", "bar"]')
            self.assertEqual(params['pullImage'], True)
            token = job['kwargs']['outputs']['_stdout']['headers'][
                'Girder-Token']

        # Task should not be registered until we get the callback
        resp = self.request('/item_task', user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json, [])

        # Simulate callback from introspection job
        with open(os.path.join(os.path.dirname(__file__),
                               'slicer_cli.xml')) as f:
            xml = f.read()

        resp = self.request('/folder/%s/item_task_slicer_cli_xml' %
                            folder['_id'],
                            method='POST',
                            params={
                                'image': 'johndoe/foo:v5',
                                'args': json.dumps(['--foo', 'bar']),
                                'pullImage': False
                            },
                            token=token,
                            body=xml,
                            type='application/xml')
        self.assertStatusOk(resp)

        # We should only be able to see tasks we have read access on
        resp = self.request('/item_task')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json, [])

        resp = self.request('/item_task', user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)

        items = list(Folder().childItems(folder, user=self.admin))
        self.assertEqual(len(items), 1)
        item = items[0]

        # Image name and item task flag should be stored in the item metadata
        self.assertEqual(item['name'], 'PET phantom detector CLI')
        self.assertEqual(
            item['description'],
            u'**Description**: Detects positions of PET/CT pocket phantoms in PET image.\n\n'
            u'**Author(s)**: Girder Developers\n\n**Version**: 1.0\n\n'
            u'**License**: Apache 2.0\n\n**Acknowledgements**: *none*\n\n'
            u'*This description was auto-generated from the Slicer CLI XML specification.*'
        )
        self.assertTrue(item['meta']['isItemTask'])
        self.assertHasKeys(
            item['meta']['itemTaskSpec'],
            ('mode', 'docker_image', 'container_args', 'inputs', 'outputs'))
        self.assertEqual(item['meta']['itemTaskSpec']['mode'], 'docker')
        self.assertEqual(item['meta']['itemTaskSpec']['docker_image'],
                         'johndoe/foo:v5')
        self.assertEqual(item['meta']['itemTaskSlicerCliArgs'],
                         ['--foo', 'bar'])
    def testConfigureItemTaskFromSlicerCli(self):
        # Create a new item that will become a task
        item = Item().createItem(name='placeholder',
                                 creator=self.admin,
                                 folder=self.privateFolder)

        # Create task to introspect container
        with mock.patch(
                'girder_jobs.models.job.Job.scheduleJob') as scheduleMock:
            resp = self.request('/item/%s/item_task_slicer_cli_description' %
                                item['_id'],
                                method='POST',
                                params={
                                    'image': 'johndoe/foo:v5',
                                    'args': json.dumps(['--foo', 'bar'])
                                },
                                user=self.admin)
            self.assertStatusOk(resp)
            self.assertEqual(resp.json['_modelType'], 'job')
            self.assertEqual(len(scheduleMock.mock_calls), 1)
            job = scheduleMock.mock_calls[0][1][0]
            self.assertEqual(job['handler'], 'worker_handler')
            self.assertEqual(job['itemTaskId'], item['_id'])
            self.assertEqual(job['kwargs']['outputs']['_stdout']['method'],
                             'PUT')
            self.assertTrue(
                job['kwargs']['outputs']['_stdout']['url'].endswith(
                    'item/%s/item_task_slicer_cli_xml' % item['_id']))
            token = job['kwargs']['outputs']['_stdout']['headers'][
                'Girder-Token']

        # Task should not be registered until we get the callback
        resp = self.request('/item_task', user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json, [])

        # Image and args should be stored in the item metadata
        item = Item().load(item['_id'], force=True)
        self.assertEqual(item['meta']['itemTaskSpec']['docker_image'],
                         'johndoe/foo:v5')
        self.assertEqual(item['meta']['itemTaskSlicerCliArgs'],
                         ['--foo', 'bar'])

        # Simulate callback from introspection job
        with open(os.path.join(os.path.dirname(__file__),
                               'slicer_cli.xml')) as f:
            xml = f.read()

        resp = self.request('/item/%s/item_task_slicer_cli_xml' % item['_id'],
                            method='PUT',
                            params={
                                'setName': True,
                                'setDescription': True
                            },
                            token=token,
                            body=xml,
                            type='application/xml')
        self.assertStatusOk(resp)

        # We should only be able to see tasks we have read access on
        resp = self.request('/item_task')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json, [])

        resp = self.request('/item_task', user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)
        self.assertEqual(resp.json[0]['_id'], str(item['_id']))

        item = Item().load(item['_id'], force=True)
        self.assertEqual(item['name'], 'PET phantom detector CLI')
        self.assertEqual(
            item['description'],
            u'**Description**: Detects positions of PET/CT pocket phantoms in PET image.\n\n'
            u'**Author(s)**: Girder Developers\n\n**Version**: 1.0\n\n'
            u'**License**: Apache 2.0\n\n**Acknowledgements**: *none*\n\n'
            u'*This description was auto-generated from the Slicer CLI XML specification.*'
        )
        self.assertTrue(item['meta']['isItemTask'])
        self.assertEqual(
            item['meta']['itemTaskSpec'], {
                'mode':
                'docker',
                'docker_image':
                'johndoe/foo:v5',
                'container_args': [
                    '--foo', 'bar', '--InputImage=$input{--InputImage}',
                    '--MaximumLineStraightnessDeviation=$input{--MaximumLineStraightnessDeviation}',
                    '--MaximumRadius=$input{--MaximumRadius}',
                    '--MaximumSphereDistance=$input{--MaximumSphereDistance}',
                    '--MinimumRadius=$input{--MinimumRadius}',
                    '--MinimumSphereActivity=$input{--MinimumSphereActivity}',
                    '--MinimumSphereDistance=$input{--MinimumSphereDistance}',
                    '--SpheresPerPhantom=$input{--SpheresPerPhantom}',
                    '$flag{--StrictSorting}',
                    '--DetectedPoints=$output{--DetectedPoints}'
                ],
                'inputs': [{
                    'description': 'Input image to be analysed.',
                    'format': 'image',
                    'name': 'InputImage',
                    'type': 'image',
                    'id': '--InputImage',
                    'target': 'filepath'
                }, {
                    'description':
                    'Used for eliminating detections which are not in a straight line. '
                    'Unit: multiples of geometric average of voxel spacing',
                    'format':
                    'number',
                    'default': {
                        'data': 1.0
                    },
                    'type':
                    'number',
                    'id':
                    '--MaximumLineStraightnessDeviation',
                    'name':
                    'MaximumLineStraightnessDeviation'
                }, {
                    'description':
                    'Used for eliminating too big blobs. Unit: millimeter [mm]',
                    'format': 'number',
                    'default': {
                        'data': 20.0
                    },
                    'type': 'number',
                    'id': '--MaximumRadius',
                    'name': 'MaximumRadius'
                }, {
                    'description':
                    'Signifies maximum distance between adjacent sphere centers [mm]. '
                    'Used to separate phantoms from tumors.',
                    'format':
                    'number',
                    'default': {
                        'data': 40.0
                    },
                    'type':
                    'number',
                    'id':
                    '--MaximumSphereDistance',
                    'name':
                    'MaximumSphereDistance'
                }, {
                    'description':
                    'Used for eliminating too small blobs. Unit: millimeter [mm]',
                    'format': 'number',
                    'default': {
                        'data': 3.0
                    },
                    'type': 'number',
                    'id': '--MinimumRadius',
                    'name': 'MinimumRadius'
                }, {
                    'description':
                    'Used for thresholding in blob detection. '
                    'Unit: becquerels per milliliter [Bq/ml]',
                    'format':
                    'number',
                    'default': {
                        'data': 5000.0
                    },
                    'type':
                    'number',
                    'id':
                    '--MinimumSphereActivity',
                    'name':
                    'MinimumSphereActivity'
                }, {
                    'description':
                    'Signifies minimum distance between adjacent sphere centers [mm]. '
                    'Used to separate phantoms from tumors.',
                    'format':
                    'number',
                    'default': {
                        'data': 30.0
                    },
                    'type':
                    'number',
                    'id':
                    '--MinimumSphereDistance',
                    'name':
                    'MinimumSphereDistance'
                }, {
                    'description':
                    'What kind of phantom are we working with here?',
                    'format': 'number-enumeration',
                    'default': {
                        'data': 3
                    },
                    'type': 'number-enumeration',
                    'id': '--SpheresPerPhantom',
                    'name': 'SpheresPerPhantom',
                    'values': [2, 3]
                }, {
                    'description':
                    'Controls whether spheres within a phantom must have descending '
                    'activities. If OFF, they can have approximately same activities '
                    '(within 15%).',
                    'format':
                    'boolean',
                    'default': {
                        'data': False
                    },
                    'type':
                    'boolean',
                    'id':
                    '--StrictSorting',
                    'name':
                    'StrictSorting'
                }],
                'outputs': [{
                    'description':
                    'Fiducial points, one for each detected sphere. '
                    'Will be multiple of 3.',
                    'format':
                    'new-file',
                    'name':
                    'DetectedPoints',
                    'type':
                    'new-file',
                    'id':
                    '--DetectedPoints',
                    'target':
                    'filepath'
                }]
            })

        # Shouldn't be able to run the task if we don't have execute permission flag
        Folder().setUserAccess(self.privateFolder,
                               user=self.user,
                               level=AccessType.READ,
                               save=True)
        resp = self.request('/item_task/%s/execution' % item['_id'],
                            method='POST',
                            user=self.user)
        self.assertStatus(resp, 403)

        # Grant the user permission, and run the task
        Folder().setUserAccess(self.privateFolder,
                               user=self.user,
                               level=AccessType.WRITE,
                               flags=ACCESS_FLAG_EXECUTE_TASK,
                               currentUser=self.admin,
                               save=True)

        inputs = {
            '--InputImage': {
                'mode': 'girder',
                'resource_type': 'item',
                'id': str(item['_id'])
            },
            '--MaximumLineStraightnessDeviation': {
                'mode': 'inline',
                'data': 1
            },
            '--MaximumRadius': {
                'mode': 'inline',
                'data': 20
            },
            '--MaximumSphereDistance': {
                'mode': 'inline',
                'data': 40
            },
            '--MinimumRadius': {
                'mode': 'inline',
                'data': 3
            },
            '--MinimumSphereActivity': {
                'mode': 'inline',
                'data': 5000
            },
            '--MinimumSphereDistance': {
                'mode': 'inline',
                'data': 30
            },
            '--SpheresPerPhantom': {
                'mode': 'inline',
                'data': 3
            },
            '--StrictSorting': {
                'mode': 'inline',
                'data': False
            }
        }

        outputs = {
            '--DetectedPoints': {
                'mode': 'girder',
                'parent_id': str(self.privateFolder['_id']),
                'parent_type': 'folder',
                'name': 'test.txt'
            }
        }

        # Ensure task was scheduled
        with mock.patch(
                'girder_jobs.models.job.Job.scheduleJob') as scheduleMock:
            resp = self.request('/item_task/%s/execution' % item['_id'],
                                method='POST',
                                user=self.user,
                                params={
                                    'inputs': json.dumps(inputs),
                                    'outputs': json.dumps(outputs)
                                })
            self.assertEqual(len(scheduleMock.mock_calls), 1)
        self.assertStatusOk(resp)
        job = resp.json
        self.assertEqual(job['_modelType'], 'job')
        self.assertNotIn('kwargs', job)  # ordinary user can't see kwargs

        jobModel = Job()
        job = jobModel.load(job['_id'], force=True)
        output = job['kwargs']['outputs']['--DetectedPoints']

        # Simulate output from the worker
        contents = b'Hello world'
        resp = self.request(path='/file',
                            method='POST',
                            token=output['token'],
                            params={
                                'parentType': output['parent_type'],
                                'parentId': output['parent_id'],
                                'name': output['name'],
                                'size': len(contents),
                                'mimeType': 'text/plain',
                                'reference': output['reference']
                            })
        self.assertStatusOk(resp)

        uploadId = resp.json['_id']
        fields = [('offset', 0), ('uploadId', uploadId)]
        files = [('chunk', output['name'], contents)]
        resp = self.multipartRequest(path='/file/chunk',
                                     fields=fields,
                                     files=files,
                                     token=output['token'])
        self.assertStatusOk(resp)
        file = resp.json
        self.assertEqual(file['_modelType'], 'file')
        self.assertEqual(file['size'], 11)
        self.assertEqual(file['mimeType'], 'text/plain')
        file = File().load(file['_id'], force=True)

        # Make sure temp token is removed once we change job status to final state
        job = jobModel.load(job['_id'], force=True)
        self.assertIn('itemTaskTempToken', job)

        # Transition through states to SUCCESS
        job = jobModel.updateJob(job, status=JobStatus.QUEUED)
        job = jobModel.updateJob(job, status=JobStatus.RUNNING)
        job = jobModel.updateJob(job, status=JobStatus.SUCCESS)

        self.assertNotIn('itemTaskTempToken', job)
        self.assertIn('itemTaskBindings', job)

        # Wait for async data.process event to bind output provenance
        start = time.time()
        while time.time() - start < 15:
            job = jobModel.load(job['_id'], force=True)

            if 'itemId' in job['itemTaskBindings']['outputs'][
                    '--DetectedPoints']:
                break
            else:
                time.sleep(0.2)
        else:
            raise Exception('Output binding did not occur in time')

        self.assertEqual(
            job['itemTaskBindings']['outputs']['--DetectedPoints']['itemId'],
            file['itemId'])
Exemple #12
0
    def testDownload(self):
        collection = Collection().createCollection('collection1', public=True)
        folder = Folder().createFolder(collection, 'folder1', parentType='collection', public=True)
        item = Item().createItem('item1', self.admin, folder)

        # Path to test files
        file1Path = os.path.join(self.filesDir, 'txt1.txt')
        file2Path = os.path.join(self.filesDir, 'txt2.txt')

        # Upload files to item
        with open(file1Path, 'rb') as fp:
            file1 = Upload().uploadFromFile(
                fp, os.path.getsize(file1Path), 'txt1.txt', parentType='item',
                parent=item, user=self.admin)

        with open(file2Path, 'rb') as fp:
            file2 = Upload().uploadFromFile(
                fp, os.path.getsize(file2Path), 'txt2.txt', mimeType='image/jpeg',
                parentType='item', parent=item, user=self.admin)

        # Download item and its files several times and ensure downloads are recorded
        # Each file is downloaded 10 times
        for n in range(0, 5):
            self._downloadItem(item['_id'])
            self._downloadFile(file1['_id'])
            self._downloadFile(file2['_id'])

        # Download each file 1 time by downloading parent folder
        self._downloadFolder(folder['_id'])

        # Download each file over 2 requests
        self._downloadFileInTwoChunks(file1['_id'])
        self._downloadFileInTwoChunks(file2['_id'])

        # Download each file partially, adding 1 to start and 4 to requested
        self._downloadPartialFile(file1['_id'])
        self._downloadPartialFile(file2['_id'])

        # Download entire collection
        # Each file is downloaded 1 additional time
        path = '/collection/%s/download' % collection['_id']
        resp = self.request(path, user=self.admin, isJson=False)

        # Iterate through generator to trigger download events
        for data in resp.body:
            data

        # Download collection filtered by mime type
        # file2 is downloaded one additional time
        path = '/collection/%s/download' % collection['_id']
        resp = self.request(path, user=self.admin, isJson=False, method='GET',
                            params={
                                'id': collection['_id'],
                                'mimeFilter': json.dumps(['image/jpeg'])
                            })

        # iterate through generator to trigger download events
        for data in resp.body:
            data

        self._checkDownloadsCount(file1['_id'], 14, 18, 13)
        self._checkDownloadsCount(file2['_id'], 15, 19, 14)
Exemple #13
0
    def testDeleteUser(self):
        """
        Test the behavior of deleting users.
        """
        # Create a couple of users
        users = [User().createUser(
            'usr%s' % num, 'passwd', 'tst', 'usr', '*****@*****.**' % num)
            for num in [0, 1]]

        # Create a folder and give both users some access on it
        folder = Folder().createFolder(
            parent=users[0], name='x', parentType='user', public=False,
            creator=users[0])
        Folder().setUserAccess(folder, users[0], AccessType.WRITE)
        Folder().setUserAccess(folder, users[1], AccessType.READ)
        folder = Folder().save(folder)

        self.assertEqual(len(folder['access']['users']), 2)

        # Create a token for user 1
        token = Token().createToken(users[1])

        # Create a group, and have user 1 request to join it
        group = Group().createGroup('test', users[0], public=True)
        resp = self.request(path='/group/%s/member' % group['_id'],
                            method='POST', user=users[1])
        self.assertStatusOk(resp)

        # Make sure non-admin users can't delete other users
        resp = self.request(path='/user/%s' % users[0]['_id'], method='DELETE',
                            user=users[1])
        self.assertStatus(resp, 403)

        # Delete user 1 as admin, should work
        resp = self.request(path='/user/%s' % users[1]['_id'], method='DELETE',
                            user=users[0])
        self.assertStatusOk(resp)
        self.assertEqual(
            resp.json['message'], 'Deleted user %s.' % users[1]['login'])

        users[1] = User().load(users[1]['_id'], force=True)
        folder = Folder().load(folder['_id'], force=True)
        token = Token().load(token['_id'], force=True, objectId=False)
        group = Group().load(group['_id'], force=True)

        # Make sure user and token were deleted
        self.assertEqual(users[1], None)
        self.assertEqual(token, None)

        # Make sure pending invite to group was deleted
        self.assertEqual(len(list(Group().getFullRequestList(group))), 0)

        # Make sure access control references for the user were deleted
        self.assertEqual(len(folder['access']['users']), 1)

        # Delete user 0
        resp = self.request(path='/user/%s' % users[0]['_id'], method='DELETE',
                            user=users[0])
        self.assertStatusOk(resp)

        # Make sure the user's folder was deleted
        folder = Folder().load(folder['_id'], force=True)
        self.assertEqual(folder, None)
Exemple #14
0
def training_output_folder(folder, user):
    """Ensure that `folder` has a "Training Output" folder"""
    return Folder().createFolder(
        folder, TrainingOutputFolderName, creator=user, reuseExisting=True
    )
Exemple #15
0
    def stream(self):
        token = 'wholetale'
        container_config = self.image['config']
        rendered_command = container_config.get('command', '').format(
            base_path='',
            port=container_config['port'],
            ip='0.0.0.0',
            token=token)
        urlPath = container_config['urlPath'].format(token=token)
        run_file = run_tpl.format(
            repo2docker=container_config.get('repo2docker_version',
                                             REPO2DOCKER_VERSION),
            user=container_config['user'],
            port=container_config['port'],
            taleId=self.tale['_id'],
            command=rendered_command,
            urlPath=urlPath,
        )
        top_readme = readme_tpl.format(
            title=self.tale['title'],
            description=self.tale['description'],
            port=container_config['port'],
            urlPath=urlPath,
        )
        extra_files = {
            'data/LICENSE': self.tale_license['text'],
        }
        oxum = dict(size=0, num=0)

        # Add files from the workspace computing their checksum
        for path, file_stream in Folder().fileList(self.workspace,
                                                   user=self.user,
                                                   subpath=False):
            yield from self.dump_and_checksum(file_stream,
                                              'data/workspace/' + path)

        # Iterate again to get file sizes this time
        for path, fobj in Folder().fileList(self.workspace,
                                            user=self.user,
                                            subpath=False,
                                            data=False):
            oxum['num'] += 1
            oxum['size'] += fobj['size']

        # Compute checksums for the extrafiles
        for path, content in extra_files.items():
            oxum['num'] += 1
            oxum['size'] += len(content)
            payload = self.stream_string(content)
            yield from self.dump_and_checksum(payload, path)

        # In Bag there's an additional 'data' folder where everything lives
        for i in range(len(self.manifest['aggregates'])):
            uri = self.manifest['aggregates'][i]['uri']
            # Don't touch any of the extra files
            if len([key for key in extra_files.keys() if '../' + key in uri]):
                continue
            if uri.startswith('../'):
                self.manifest['aggregates'][i]['uri'] = uri.replace(
                    '..', '../data')
            if 'bundledAs' in self.manifest['aggregates'][i]:
                folder = self.manifest['aggregates'][i]['bundledAs']['folder']
                self.manifest['aggregates'][i]['bundledAs'][
                    'folder'] = folder.replace('..', '../data')
        # Update manifest with hashes
        self.append_aggergate_checksums()

        # Update manifest with filesizes and mimeTypes for workspace items
        self.append_aggregate_filesize_mimetypes('../data/workspace/')

        # Update manifest with filesizes and mimeTypes for extra items
        self.append_extras_filesize_mimetypes(extra_files)

        # Create the fetch file
        fetch_file = ""
        for bundle in self.manifest['aggregates']:
            if 'bundledAs' not in bundle:
                continue
            folder = bundle['bundledAs']['folder']
            fetch_file += "{uri} {size} {folder}".format(
                uri=bundle['uri'],
                size=bundle['size'],
                folder=folder.replace('../', '')
            )  # fetch.txt is located in the root level, need to adjust paths
            fetch_file += bundle['bundledAs'].get('filename', '')
            fetch_file += '\n'

        now = datetime.now(timezone.utc)
        bag_info = bag_info_tpl.format(
            bag_profile=bag_profile,
            date=now.strftime('%Y-%m-%d'),
            time=now.strftime('%H:%M:%S %Z'),
            oxum="{size}.{num}".format(**oxum),
        )

        def dump_checksums(alg):
            dump = ""
            for path, chksum in self.state[alg]:
                dump += "{} {}\n".format(chksum, path)
            return dump

        tagmanifest = dict(md5="", sha256="")
        for payload, fname in (
            (lambda: top_readme, 'README.md'),
            (lambda: run_file, 'run-local.sh'),
            (lambda: self.default_bagit, 'bagit.txt'),
            (lambda: bag_info, 'bag-info.txt'),
            (lambda: fetch_file, 'fetch.txt'),
            (lambda: dump_checksums('md5'), 'manifest-md5.txt'),
            (lambda: dump_checksums('sha256'), 'manifest-sha256.txt'),
            (
                lambda: json.dumps(
                    self.get_environment(),
                    indent=4,
                    cls=JsonEncoder,
                    sort_keys=True,
                    allow_nan=False,
                ),
                'metadata/environment.json',
            ),
            (lambda: json.dumps(self.manifest, indent=4),
             'metadata/manifest.json'),
        ):
            tagmanifest['md5'] += "{} {}\n".format(
                md5(payload().encode()).hexdigest(), fname)
            tagmanifest['sha256'] += "{} {}\n".format(
                sha256(payload().encode()).hexdigest(), fname)
            yield from self.zip_generator.addFile(payload, fname)

        for payload, fname in (
            (lambda: tagmanifest['md5'], 'tagmanifest-md5.txt'),
            (lambda: tagmanifest['sha256'], 'tagmanifest-sha256.txt'),
        ):
            yield from self.zip_generator.addFile(payload, fname)

        yield self.zip_generator.footer()
Exemple #16
0
    def importData(self, parent, parentType, params, progress, user, **kwargs):
        """
        Import a list of tables, each to a file within a distinct item.  Each
        table specification in the list is an object which must have a 'table'
        key.  It may optionally have other connection information such as
        'database' and 'schema'.  If there is a 'name' key, the name is used
        for the item and file.  If there is a 'database' key, a subfolder is
        created within the specified parent with that name.  If a user or
        collection is specified for the top level and no database key is
        specified, the default database name (from the assetstore) is used.
        If the specific item and file already exists and is from the same
        assetstore, it is updated.  If the specific item already exists and is
        not from the same assetstore (or not marked that it was imported), an
        error is given.

        :param parent: The parent object to import into.  Must be a folder,
            user, collection, item, or file.
        :param parentType: The model type of the parent object.
        :param params: Additional parameters required for the import process:
            tables: a list of tables to add.  If there is already an item with
                    an exact table name, it is updated.
            sort: default sort parameter.  Used in plain downloads.
            fields: default fields parameter.  Used in plain downloads.
            filters: default filters parameter.  Used in plain downloads.
            group: default group parameter.  Used in plain downloads.
            format: default format parameter.  Used in plain downloads.
            replace: if False, don't replace an existing file/item with the
                name, but always create new entries.  A parentType of file
                will always replace the existing data of a file
        :type params: dict
        :param progress: Object on which to record progress if possible.
        :type progress: :py:class:`girder.utility.progress.ProgressContext`
        :param user: The Girder user performing the import.
        :type user: dict or None
        :return: a list of objects, each of which has an item and file entry
            with the items and files that were imported.
        """
        uri = (self.assetstore['database'].get('uri')
               if self.assetstore['database'].get('uri') else params['uri'])
        defaultDatabase = dbs.databaseFromUri(uri)
        response = []
        createdFolder = createdItem = createdFile = False
        for table in params['tables']:
            if isinstance(table, six.string_types):
                dbinfo = {'table': table}
            else:
                dbinfo = table.copy()
            if not self.assetstore['database'].get('uri'):
                dbinfo['uri'] = uri
            name = dbinfo.pop('name', dbinfo['table'])
            progress.update(message='Importing %s' % name)
            # Find or create a folder if needed
            if 'database' not in dbinfo and parentType == 'folder':
                folder = parent
            elif parentType not in ('file', 'item'):
                folderName = dbinfo.get('database', defaultDatabase)
                folder = Folder().findOne({
                    'parentId': parent['_id'],
                    'name': folderName,
                    'parentCollection': parentType
                })
                if folder is None:
                    folder = Folder().createFolder(parent,
                                                   folderName,
                                                   parentType=parentType,
                                                   creator=user)
                    createdFolder = True
            if parentType == 'file':
                # for files, we'll create a provisional file below, then
                # delete the original assetstore entry and modify the
                # existing file entry with the updated values before saving.
                item = Item().load(parent['itemId'], force=True)
            elif parentType == 'item':
                item = parent
            else:
                # Create an item if needed
                item = Item().findOne({
                    'folderId': folder['_id'],
                    'name': name
                })
                if item is None or params.get('replace') is False:
                    item = Item().createItem(name=name,
                                             creator=user,
                                             folder=folder)
                createdItem = True
            # Create a file if needed
            file = File().findOne({'name': name, 'itemId': item['_id']})
            if file is None or params.get(
                    'replace') is False or parentType == 'file':
                file = File().createFile(creator=user,
                                         item=item,
                                         name=name,
                                         size=0,
                                         assetstore=self.assetstore,
                                         mimeType=dbFormatList.get(
                                             preferredFormat(
                                                 params.get('format'))),
                                         saveFile=False)
                createdFile = True
            if file.get(DB_INFO_KEY) and not file[DB_INFO_KEY].get('imported'):
                raise GirderException(
                    'A file for table %s is present but cannot be updated '
                    'because it wasn\'t imported.' % name)
            try:
                file = self._importDataFile(file, parent, parentType, dbinfo,
                                            params)
            except GirderException as exc:
                self._importDataCleanup(file if createdFile else None,
                                        item if createdItem else None,
                                        folder if createdFolder else None)
                raise exc
            response.append({'item': item, 'file': file})
        return response
Exemple #17
0
    def setUp(self):
        base.TestCase.setUp(self, assetstoreType='filesystem')

        # Two users are created (user and otherUser).
        # A hierarchy is created as is:
        #  - user:
        #       |- [Folder (public)] publicFolder:
        #           |- publicFile
        #           |- duplicatePublicFile
        #       |- [Folder (private)] private:
        #           |- privateFile
        #           |- privateOnlyFile
        #
        #  - otherUser:
        #       |- (nothing)
        #
        # In summary, user has access to all the files and otherUser to none.

        self.user = User().createUser(
            login='******',
            password='******',
            firstName='Leeloominai',
            lastName='Sebat',
            email='*****@*****.**'
        )

        for folder in Folder().childFolders(parent=self.user, parentType='user', user=self.user):
            if folder['public'] is True:
                self.publicFolder = folder
            else:
                self.privateFolder = folder

        self.userData = u'\u266a Il dolce suono mi ' \
                        u'colp\u00ec di sua voce! \u266a'.encode('utf8')
        self.privateFile = Upload().uploadFromFile(
            obj=six.BytesIO(self.userData),
            size=len(self.userData),
            name='Il dolce suono - PRIVATE',
            parentType='folder',
            parent=self.privateFolder,
            user=self.user,
            mimeType='audio/mp4'
        )
        self.publicFile = Upload().uploadFromFile(
            obj=six.BytesIO(self.userData),
            size=len(self.userData),
            name='Il dolce suono - PUBLIC',
            parentType='folder',
            parent=self.publicFolder,
            user=self.user,
            mimeType='audio/flac'
        )
        self.duplicatePublicFile = Upload().uploadFromFile(
            obj=six.BytesIO(self.userData),
            size=len(self.userData),
            name='Il dolce suono - PUBLIC DUPLICATE',
            parentType='folder',
            parent=self.publicFolder,
            user=self.user,
            mimeType='audio/mp3'
        )

        self.privateOnlyData =\
            u'\u2641 \u2600 \u2601 \u2614 \u2665'.encode('utf8')
        self.privateOnlyFile = Upload().uploadFromFile(
            obj=six.BytesIO(self.privateOnlyData),
            size=len(self.privateOnlyData),
            name='Powers combined',
            parentType='folder',
            parent=self.privateFolder,
            user=self.user,
            mimeType='image/png'
        )

        self.otherUser = User().createUser(
            login='******',
            password='******',
            firstName='Jean-Baptiste',
            lastName='Zorg',
            email='*****@*****.**'
        )
Exemple #18
0
    def _createFiles(self, user=None):
        """
        Create a set of items, folders, files, metadata, and collections for
        testing.

        :param user: the user who should own these items.
        """
        if user is None:
            user = self.admin
        self.expectedZip = {}
        # Create a collection
        coll = {
            'name': 'Test Collection',
            'description': 'The description',
            'public': True,
            'creator': user
        }
        self.collection = Collection().createCollection(**coll)
        self.collectionPrivateFolder = Folder().createFolder(
            parent=self.collection,
            parentType='collection',
            name='Private',
            creator=user,
            public=False)

        # Get the admin user's folders
        resp = self.request(path='/folder',
                            method='GET',
                            user=user,
                            params={
                                'parentType': 'user',
                                'parentId': user['_id'],
                                'sort': 'name',
                                'sortdir': 1
                            })
        self.adminPrivateFolder = Folder().load(resp.json[0]['_id'], user=user)
        self.adminPublicFolder = Folder().load(resp.json[1]['_id'], user=user)
        # Create a folder within the admin public folder
        resp = self.request(path='/folder',
                            method='POST',
                            user=user,
                            params={
                                'name': 'Folder 1',
                                'parentId': self.adminPublicFolder['_id']
                            })
        self.adminSubFolder = resp.json
        # Create a series of items
        self.items = []
        self.items.append(Item().createItem('Item 1', self.admin,
                                            self.adminPublicFolder))
        self.items.append(Item().createItem('Item 2', self.admin,
                                            self.adminPublicFolder))
        self.items.append(Item().createItem('It\\em/3', self.admin,
                                            self.adminSubFolder))
        self.items.append(Item().createItem('Item 4', self.admin,
                                            self.collectionPrivateFolder))
        self.items.append(Item().createItem('Item 5', self.admin,
                                            self.collectionPrivateFolder))
        # Upload a series of files
        file, path, contents = self._uploadFile('File 1', self.items[0])
        self.file1 = file
        self.expectedZip[path] = contents
        file, path, contents = self._uploadFile('File 2', self.items[0])
        self.expectedZip[path] = contents
        file, path, contents = self._uploadFile('File 3', self.items[1])
        self.expectedZip[path] = contents
        file, path, contents = self._uploadFile('File 4', self.items[2])
        self.expectedZip[path] = contents
        file, path, contents = self._uploadFile('File 5', self.items[3])
        self.expectedZip[path] = contents
        # place some metadata on two of the items and one of the folders
        meta = {'key': 'value'}
        Item().setMetadata(self.items[2], meta)
        parents = Item().parentsToRoot(self.items[2], self.admin)
        path = os.path.join(*([
            part['object'].get('name', part['object'].get('login', ''))
            for part in parents
        ] + [self.items[2]['name'], 'girder-item-metadata.json']))
        self.expectedZip[path] = meta

        meta = {'x': 'y'}
        Item().setMetadata(self.items[4], meta)
        parents = Item().parentsToRoot(self.items[4], self.admin)
        path = os.path.join(*([
            part['object'].get('name', part['object'].get('login', ''))
            for part in parents
        ] + [self.items[4]['name'], 'girder-item-metadata.json']))
        self.expectedZip[path] = meta

        meta = {'key2': 'value2', 'date': datetime.datetime.utcnow()}
        # mongo rounds to millisecond, so adjust our expectations
        meta['date'] -= datetime.timedelta(
            microseconds=meta['date'].microsecond % 1000)
        Folder().setMetadata(self.adminPublicFolder, meta)
        parents = Folder().parentsToRoot(self.adminPublicFolder, user=user)
        path = os.path.join(*([
            part['object'].get('name', part['object'].get('login', ''))
            for part in parents
        ] + [self.adminPublicFolder['name'], 'girder-folder-metadata.json']))
        self.expectedZip[path] = meta
Exemple #19
0
    def testTaleFlow(self):
        resp = self.request(
            path='/tale', method='POST', user=self.user,
            type='application/json',
            body=json.dumps({'imageId': str(self.image['_id'])})
        )
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json, {
            'message': ("Invalid JSON object for parameter tale: "
                        "'involatileData' "
                        "is a required property"),
            'type': 'rest'
        })

        # Grab the default user folders
        resp = self.request(
            path='/folder', method='GET', user=self.user, params={
                'parentType': 'user',
                'parentId': self.user['_id'],
                'sort': 'title',
                'sortdir': 1
            })
        privateFolder = resp.json[0]
        publicFolder = resp.json[1]

        resp = self.request(
            path='/folder', method='GET', user=self.admin, params={
                'parentType': 'user',
                'parentId': self.admin['_id'],
                'sort': 'title',
                'sortdir': 1
            })
        # adminPrivateFolder = resp.json[0]
        adminPublicFolder = resp.json[1]

        resp = self.request(
            path='/tale', method='POST', user=self.user,
            type='application/json',
            body=json.dumps({
                'imageId': str(self.image['_id']),
                'involatileData': [
                    {'type': 'folder', 'id': publicFolder['_id']}
                ]
            })
        )
        self.assertStatusOk(resp)
        tale = resp.json

        # Check that workspace was created

        # Check that data folder was created
        from girder.plugins.wholetale.constants import DATADIRS_NAME
        from girder.utility.path import getResourcePath
        from girder.models.folder import Folder
        sc = {
            '_id': tale['_id'],
            'cname': DATADIRS_NAME,
            'fname': DATADIRS_NAME
        }
        self.assertEqual(
            getResourcePath(
                'folder',
                Folder().load(tale['folderId'], user=self.user),
                user=self.admin),
            '/collection/{cname}/{fname}/{_id}'.format(**sc)
        )

        resp = self.request(
            path='/tale/{_id}'.format(**tale), method='PUT',
            type='application/json',
            user=self.user, body=json.dumps({
                'folderId': tale['folderId'],
                'involatileData': tale['involatileData'],
                'imageId': tale['imageId'],
                'title': 'new name',
                'description': 'new description',
                'config': {'memLimit': '2g'},
                'public': True,
                'published': False
            })
        )
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['title'], 'new name')
        tale = resp.json

        resp = self.request(
            path='/tale', method='POST', user=self.user,
            type='application/json',
            body=json.dumps({
                'imageId': str(self.image['_id']),
                'involatileData': [
                    {'type': 'folder', 'id': privateFolder['_id']}
                ]
            })
        )
        self.assertStatusOk(resp)
        new_tale = resp.json

        resp = self.request(
            path='/tale', method='POST', user=self.admin,
            type='application/json',
            body=json.dumps({
                'imageId': str(self.image['_id']),
                'involatileData': [
                    {'type': 'folder', 'id': adminPublicFolder['_id']}
                ],
                'public': False
            })
        )
        self.assertStatusOk(resp)
        # admin_tale = resp.json

        resp = self.request(
            path='/tale', method='GET', user=self.admin,

            params={}
        )
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 3)

        resp = self.request(
            path='/tale', method='GET', user=self.user,
            params={'imageId': str(self.image['_id'])}
        )
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 2)
        self.assertEqual(set([_['_id'] for _ in resp.json]),
                         {tale['_id'], new_tale['_id']})

        resp = self.request(
            path='/tale', method='GET', user=self.user,
            params={'userId': str(self.user['_id'])}
        )
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 2)
        self.assertEqual(set([_['_id'] for _ in resp.json]),
                         {tale['_id'], new_tale['_id']})

        resp = self.request(
            path='/tale', method='GET', user=self.user,
            params={'text': 'new'}
        )
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)
        self.assertEqual(set([_['_id'] for _ in resp.json]),
                         {tale['_id']})

        resp = self.request(
            path='/tale/{_id}'.format(**new_tale), method='DELETE',
            user=self.admin)
        self.assertStatusOk(resp)

        resp = self.request(
            path='/tale/{_id}'.format(**new_tale), method='GET',
            user=self.user)
        self.assertStatus(resp, 400)

        resp = self.request(
            path='/tale/{_id}'.format(**tale), method='GET',
            user=self.user)
        self.assertStatusOk(resp)
        for key in tale.keys():
            if key in ('access', 'updated', 'created'):
                continue
            self.assertEqual(resp.json[key], tale[key])

        resp = self.request(
            path='/tale/{_id}/export'.format(**tale),
            method='GET',
            user=self.user,
            type='application/octet-stream',
            isJson=False)

        self.assertStatus(resp, 200)
Exemple #20
0
    def testUploadCallbacks(self):
        callbackUser = User().createUser(
            firstName='Callback', lastName='Last', login='******',
            password='******', email='*****@*****.**')
        callbackPublicFolder = next(Folder().childFolders(
            parentType='user', parent=callbackUser, user=None, limit=1))
        callbackCounts = {'folder': 0, 'item': 0}
        folders = {}
        items = {}
        folders[self.libTestDir] = False
        folderCount = 1     # 1 for self.libTestDir
        item_count = 0
        for root, dirs, files in os.walk(self.libTestDir):
            for name in files:
                items[os.path.join(root, name)] = False
                item_count += 1
            for name in dirs:
                folders[os.path.join(root, name)] = False
                folderCount += 1

        def folderCallback(folder, filepath):
            self.assertIn(filepath, six.viewkeys(folders))
            folders[filepath] = True
            callbackCounts['folder'] += 1

        def itemCallback(item, filepath):
            self.assertIn(filepath, six.viewkeys(items))
            items[filepath] = True
            callbackCounts['item'] += 1

        self.client.addFolderUploadCallback(folderCallback)
        self.client.addItemUploadCallback(itemCallback)
        self.client.upload(self.libTestDir, callbackPublicFolder['_id'])

        # make sure counts are the same (callbacks not called more than once)
        # and that all folders and files have callbacks called on them
        self.assertEqual(folderCount, callbackCounts['folder'])
        self.assertEqual(item_count, callbackCounts['item'])
        self.assertTrue(all(six.viewvalues(items)))
        self.assertTrue(all(six.viewvalues(folders)))

        # Upload again with reuseExisting on
        existingList = list(Folder().childFolders(
            parentType='folder', parent=callbackPublicFolder,
            user=callbackUser, limit=0))
        self.client.upload(self.libTestDir, callbackPublicFolder['_id'],
                           reuseExisting=True)
        newList = list(Folder().childFolders(
            parentType='folder', parent=callbackPublicFolder,
            user=callbackUser, limit=0))
        self.assertEqual(existingList, newList)
        self.assertEqual(len(newList), 1)
        self.assertEqual([f['name'] for f in Folder().childFolders(
            parentType='folder', parent=newList[0],
            user=callbackUser, limit=0)], ['sub0', 'sub1', 'sub2'])

        # Test upload via a file-like object into a folder
        callbacks = []
        path = os.path.join(self.libTestDir, 'sub0', 'f')
        size = os.path.getsize(path)

        def progressCallback(info):
            callbacks.append(info)

        with open(path) as f:
            with self.assertRaises(girder_client.IncorrectUploadLengthError):
                try:
                    self.client.uploadFile(
                        callbackPublicFolder['_id'], stream=f, name='test',
                        size=size + 1, parentType='folder')
                except girder_client.IncorrectUploadLengthError as exc:
                    self.assertEqual(
                        exc.upload['received'], exc.upload['size'] - 1)
                    upload = Upload().load(exc.upload['_id'])
                    self.assertEqual(upload, None)
                    raise

        with open(path) as f:
            file = self.client.uploadFile(
                callbackPublicFolder['_id'], stream=f, name='test',
                size=size, parentType='folder',
                progressCallback=progressCallback)

        self.assertEqual(len(callbacks), 1)
        self.assertEqual(callbacks[0]['current'], size)
        self.assertEqual(callbacks[0]['total'], size)
        self.assertEqual(file['name'], 'test')
        self.assertEqual(file['size'], size)
        # Files with no extension should fallback to the default MIME type
        self.assertEqual(file['mimeType'], 'application/octet-stream')

        items = list(
            Folder().childItems(folder=callbackPublicFolder))
        self.assertEqual(len(items), 1)
        self.assertEqual(items[0]['name'], 'test')

        files = list(Item().childFiles(items[0]))
        self.assertEqual(len(files), 1)

        # Make sure MIME type propagates correctly when explicitly passed
        with open(path) as f:
            file = self.client.uploadFile(
                callbackPublicFolder['_id'], stream=f, name='test',
                size=size, parentType='folder', mimeType='image/jpeg')
            self.assertEqual(file['mimeType'], 'image/jpeg')

        # Make sure MIME type is guessed based on file name if not passed
        with open(path) as f:
            file = self.client.uploadFile(
                callbackPublicFolder['_id'], stream=f, name='test.txt',
                size=size, parentType='folder')
            self.assertEqual(file['mimeType'], 'text/plain')
Exemple #21
0
    def importData(self, parent, parentType, params, progress, user, **kwargs):
        importPath = params.get('importPath', '').strip().lstrip('/')

        if importPath and not importPath.endswith('/'):
            importPath += '/'

        bucket = self.assetstore['bucket']
        paginator = self.client.get_paginator('list_objects')
        pageIterator = paginator.paginate(Bucket=bucket,
                                          Prefix=importPath,
                                          Delimiter='/')
        for resp in pageIterator:
            # Start with objects
            for obj in resp.get('Contents', []):
                if progress:
                    progress.update(message=obj['Key'])

                name = obj['Key'].rsplit('/', 1)[-1]
                if not name:
                    continue

                if parentType != 'folder':
                    raise ValidationException(
                        'Keys cannot be imported directly underneath a %s.' %
                        parentType)

                if self.shouldImportFile(obj['Key'], params):
                    item = Item().createItem(name=name,
                                             creator=user,
                                             folder=parent,
                                             reuseExisting=True)
                    # Create a file record; delay saving it until we have added
                    # the import information.
                    file = File().createFile(name=name,
                                             creator=user,
                                             item=item,
                                             reuseExisting=True,
                                             assetstore=self.assetstore,
                                             mimeType=None,
                                             size=obj['Size'],
                                             saveFile=False)
                    file['s3Key'] = obj['Key']
                    file['imported'] = True
                    File().save(file)

            # Now recurse into subdirectories
            for obj in resp.get('CommonPrefixes', []):
                if progress:
                    progress.update(message=obj['Prefix'])

                name = obj['Prefix'].rstrip('/').rsplit('/', 1)[-1]

                folder = Folder().createFolder(parent=parent,
                                               name=name,
                                               parentType=parentType,
                                               creator=user,
                                               reuseExisting=True)
                self.importData(parent=folder,
                                parentType='folder',
                                params={'importPath': obj['Prefix']},
                                progress=progress,
                                user=user,
                                **kwargs)
Exemple #22
0
def list_datasets(
    user: types.GirderUserModel,
    published: bool,
    shared: bool,
    limit: int,
    offset: int,
    sortParams: Tuple[Tuple[str, int]],
):
    """Enumerate all public and private data the user can access"""
    sort, sortDir = (sortParams or [['created', 1]])[0]
    # based on https://stackoverflow.com/a/49483919
    pipeline = [
        {
            '$match': get_dataset_query(user, published, shared)
        },
        {
            '$facet': {
                'results': [
                    {
                        '$sort': {
                            sort: sortDir
                        }
                    },
                    {
                        '$skip': offset
                    },
                    {
                        '$limit': limit
                    },
                    {
                        '$lookup': {
                            'from': 'user',
                            'localField': 'creatorId',
                            'foreignField': '_id',
                            'as': 'ownerLogin',
                        },
                    },
                    {
                        '$set': {
                            'ownerLogin': {
                                '$first': '$ownerLogin'
                            }
                        }
                    },
                    {
                        '$set': {
                            'ownerLogin': '******'
                        }
                    },
                ],
                'totalCount': [{
                    '$count': 'count'
                }],
            },
        },
    ]
    response = next(Folder().collection.aggregate(pipeline))
    total = response['totalCount'][0]['count'] if len(
        response['results']) > 0 else 0
    cherrypy.response.headers['Girder-Total-Count'] = total
    return [
        Folder().filter(doc, additionalKeys=['ownerLogin'])
        for doc in response['results']
    ]
Exemple #23
0
    def testFullCopy(self):
        tale = self._create_example_tale(self.get_dataset([0]))
        workspace = Folder().load(tale["workspaceId"], force=True)

        with open(os.path.join(workspace["fsPath"], "entrypoint.sh"), "wb") as fp:
            fp.write(b"echo 'Performed a run!'")

        resp = self.request(
            path="/version",
            method="POST",
            user=self.user_one,
            params={"name": "First Version", "taleId": tale["_id"]},
        )
        self.assertStatusOk(resp)
        version = resp.json

        resp = self.request(
            path="/run",
            method="POST",
            user=self.user_one,
            params={"versionId": version["_id"], "name": "test run (failed)"},
        )
        self.assertStatusOk(resp)
        run = resp.json

        resp = self.request(
            path=f"/run/{run['_id']}/status",
            method="PATCH",
            user=self.user_one,
            params={"status": 4},
        )

        resp = self.request(
            path="/run",
            method="POST",
            user=self.user_one,
            params={"versionId": version["_id"], "name": "test run (success)"},
        )
        self.assertStatusOk(resp)
        run = resp.json

        resp = self.request(
            path=f"/run/{run['_id']}/status",
            method="PATCH",
            user=self.user_one,
            params={"status": 3},
        )

        # 1. Make it public
        resp = self.request(
            path=f"/tale/{tale['_id']}/access", method="GET", user=self.user_one
        )
        self.assertStatusOk(resp)
        tale_access = resp.json

        resp = self.request(
            path=f"/tale/{tale['_id']}/access",
            method="PUT",
            user=self.user_one,
            params={"access": json.dumps(tale_access), "public": True},
        )
        self.assertStatusOk(resp)

        # 2. Perform copy as user2
        resp = self.request(
            path=f"/tale/{tale['_id']}/copy", method="POST", user=self.user_two
        )
        self.assertStatusOk(resp)
        copied_tale = resp.json

        retries = 10
        while copied_tale["status"] < TaleStatus.READY or retries > 0:
            time.sleep(0.5)
            resp = self.request(
                path=f"/tale/{copied_tale['_id']}", method="GET", user=self.user_two
            )
            self.assertStatusOk(resp)
            copied_tale = resp.json
            retries -= 1
        self.assertEqual(copied_tale["status"], TaleStatus.READY)

        resp = self.request(
            path="/version",
            method="GET",
            user=self.user_two,
            params={"taleId": copied_tale["_id"]},
        )
        self.assertStatusOk(resp)
        self.assertTrue(len(resp.json), 1)
        copied_version = resp.json[0]
        self.assertEqual(copied_version["name"], version["name"])

        resp = self.request(
            path="/run",
            method="GET",
            user=self.user_two,
            params={"taleId": copied_tale["_id"]},
        )
        self.assertStatusOk(resp)
        self.assertTrue(len(resp.json), 2)
        copied_runs = resp.json
        print(copied_runs)

        self.assertEqual(
            {_["runVersionId"] for _ in copied_runs}, {copied_version["_id"]}
        )
        self.assertEqual(
            {_["name"] for _ in copied_runs},
            {"test run (success)", "test run (failed)"},
        )
        self.assertEqual({_["runStatus"] for _ in copied_runs}, {3, 4})

        # Clean up
        resp = self.request(
            path=f"/tale/{copied_tale['_id']}",
            method="DELETE",
            user=self.user_two,
        )
        self.assertStatusOk(resp)
        self._remove_example_tale(tale)
Exemple #24
0
    def testFilesystemAssetstoreImport(self):
        folder = six.next(Folder().childFolders(self.admin,
                                                parentType='user',
                                                force=True,
                                                filters={'name': 'Public'}))

        params = {
            'importPath': '/nonexistent/dir',
            'destinationType': 'folder',
            'destinationId': folder['_id']
        }
        path = '/assetstore/%s/import' % str(self.assetstore['_id'])

        resp = self.request(path, method='POST', params=params)
        self.assertStatus(resp, 401)

        resp = self.request(path,
                            method='POST',
                            params=params,
                            user=self.admin)
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json['message'], 'Not found: /nonexistent/dir.')

        # Test importing a single file
        params['importPath'] = os.path.join(ROOT_DIR, 'tests', 'cases',
                                            'py_client', 'testdata',
                                            'world.txt')
        resp = self.request(path,
                            method='POST',
                            params=params,
                            user=self.admin)
        self.assertStatusOk(resp)
        resp = self.request(
            '/resource/lookup',
            user=self.admin,
            params={'path': '/user/admin/Public/world.txt/world.txt'})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['_modelType'], 'file')
        file = File().load(resp.json['_id'], force=True, exc=True)
        self.assertTrue(os.path.isfile(file['path']))

        File().remove(file)

        # Test importing directory with include & exclude regexes; file should be excluded
        params['importPath'] = os.path.join(ROOT_DIR, 'tests', 'cases',
                                            'py_client')
        resp = self.request(path,
                            method='POST',
                            user=self.admin,
                            params=dict(params,
                                        fileIncludeRegex='world.*',
                                        fileExcludeRegex='world.*'))
        self.assertStatusOk(resp)

        resp = self.request(
            '/resource/lookup',
            user=self.admin,
            params={'path': '/user/admin/Public/world.txt/world.txt'})
        self.assertStatus(resp, 400)

        # Do the import with the include regex on
        resp = self.request(path,
                            method='POST',
                            params=dict(params, fileIncludeRegex='hello.*'),
                            user=self.admin)
        self.assertStatusOk(resp)

        # hello.txt should have been imported
        resp = self.request(
            '/resource/lookup',
            user=self.admin,
            params={'path': '/user/admin/Public/testdata/hello.txt/hello.txt'})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['_modelType'], 'file')
        file = File().load(resp.json['_id'], force=True, exc=True)

        self.assertTrue(os.path.isfile(file['path']))

        # world.txt should not
        resp = self.request(
            '/resource/lookup',
            user=self.admin,
            params={'path': '/user/admin/Public/world.txt/world.txt'})
        self.assertStatus(resp, 400)

        # Run import without any regexes specified, all files should be imported
        resp = self.request(path,
                            method='POST',
                            params=params,
                            user=self.admin)
        self.assertStatusOk(resp)

        resp = self.request(
            '/resource/lookup',
            user=self.admin,
            params={'path': '/user/admin/Public/testdata/world.txt/world.txt'})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['_modelType'], 'file')

        # Make sure downloading the file works
        resp = self.request('/file/%s/download' % str(file['_id']),
                            isJson=False)
        self.assertStatusOk(resp)
        self.assertEqual(self.getBody(resp), 'hello\n')

        # Deleting the file should not actually remove the file on disk
        resp = self.request('/file/' + str(file['_id']),
                            method='DELETE',
                            user=self.admin)
        self.assertStatusOk(resp)

        self.assertIsNone(File().load(file['_id'], force=True))
        self.assertTrue(os.path.isfile(file['path']))

        # Attempt to import a folder with an item directly into user; should fail
        resp = self.request('/assetstore/%s/import' % self.assetstore['_id'],
                            method='POST',
                            params={
                                'importPath':
                                os.path.join(ROOT_DIR, 'tests', 'cases',
                                             'py_client', 'testdata'),
                                'destinationType':
                                'user',
                                'destinationId':
                                self.admin['_id']
                            },
                            user=self.admin)
        self.assertStatus(resp, 400)
        self.assertEqual(
            resp.json['message'],
            'Files cannot be imported directly underneath a user.')
Exemple #25
0
def testParentsToRootAdmin(parentChain, admin):
    # Get the parent chain for a user who has access rights
    parents = Folder().parentsToRoot(parentChain['folder4'], user=admin)
    assert parents[1]['object']['name'] == 'F1'
    assert parents[2]['object']['name'] == 'F2'
    assert parents[3]['object']['name'] == 'F3'
Exemple #26
0
    def testS3AssetstoreAdapter(self):
        # Delete the default assetstore
        Assetstore().remove(self.assetstore)
        s3Regex = (r'^(https://s3.amazonaws.com(:443)?/bucketname/foo/bar|'
                   'https://bucketname.s3.amazonaws.com(:443)?/foo/bar)')

        params = {
            'name': 'S3 Assetstore',
            'type': AssetstoreType.S3,
            'bucket': '',
            'accessKeyId': 'someKey',
            'secret': 'someSecret',
            'prefix': '/foo/bar/'
        }

        # Validation should fail with empty bucket name
        resp = self.request(path='/assetstore',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatus(resp, 400)
        self.assertEqual(
            resp.json, {
                'type': 'validation',
                'field': 'bucket',
                'message': 'Bucket must not be empty.'
            })

        params['bucket'] = 'bucketname'
        # Validation should fail with a missing bucket
        resp = self.request(path='/assetstore',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatus(resp, 400)
        self.assertEqual(
            resp.json, {
                'type': 'validation',
                'field': 'bucket',
                'message': 'Unable to write into bucket "bucketname".'
            })

        # Validation should fail with a bogus service name
        params['service'] = 'ftp://nowhere'
        resp = self.request(path='/assetstore',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatus(resp, 400)
        del params['service']

        # Create a bucket (mocked using moto), so that we can create an assetstore in it
        botoParams = makeBotoConnectParams(params['accessKeyId'],
                                           params['secret'])
        client = mock_s3.createBucket(botoParams, 'bucketname')

        # Create an assetstore
        resp = self.request(path='/assetstore',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        assetstore = Assetstore().load(resp.json['_id'])

        # Set the assetstore to current.  This is really to test the edit assetstore code.
        params['current'] = True
        resp = self.request(path='/assetstore/%s' % assetstore['_id'],
                            method='PUT',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)

        # Test init for a single-chunk upload
        folders = Folder().childFolders(self.admin, 'user')
        parentFolder = six.next(folders)
        params = {
            'parentType': 'folder',
            'parentId': parentFolder['_id'],
            'name': 'My File.txt',
            'size': 1024,
            'mimeType': 'text/plain'
        }
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['received'], 0)
        self.assertEqual(resp.json['size'], 1024)
        self.assertEqual(resp.json['behavior'], 's3')

        singleChunkUpload = resp.json
        s3Info = singleChunkUpload['s3']
        self.assertEqual(s3Info['chunked'], False)
        self.assertIsInstance(s3Info['chunkLength'], int)
        self.assertEqual(s3Info['request']['method'], 'PUT')
        six.assertRegex(self, s3Info['request']['url'], s3Regex)
        self.assertEqual(s3Info['request']['headers']['x-amz-acl'], 'private')

        # Test resume of a single-chunk upload
        resp = self.request(path='/file/offset',
                            method='GET',
                            user=self.admin,
                            params={'uploadId': resp.json['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['method'], 'PUT')
        self.assertTrue('headers' in resp.json)
        six.assertRegex(self, resp.json['url'], s3Regex)

        # Test finalize for a single-chunk upload
        resp = self.request(path='/file/completion',
                            method='POST',
                            user=self.admin,
                            params={'uploadId': singleChunkUpload['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['size'], 1024)
        self.assertEqual(resp.json['assetstoreId'], str(assetstore['_id']))
        self.assertFalse('s3Key' in resp.json)
        self.assertFalse('relpath' in resp.json)

        file = File().load(resp.json['_id'], force=True)
        self.assertTrue('s3Key' in file)
        six.assertRegex(self, file['relpath'], '^/bucketname/foo/bar/')

        # Test init for a multi-chunk upload
        params['size'] = 1024 * 1024 * 1024 * 5
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)

        multiChunkUpload = resp.json
        s3Info = multiChunkUpload['s3']
        self.assertEqual(s3Info['chunked'], True)
        self.assertIsInstance(s3Info['chunkLength'], int)
        self.assertEqual(s3Info['request']['method'], 'POST')
        six.assertRegex(self, s3Info['request']['url'], s3Regex)

        # Test uploading a chunk
        resp = self.request(path='/file/chunk',
                            method='POST',
                            user=self.admin,
                            params={
                                'uploadId':
                                multiChunkUpload['_id'],
                                'offset':
                                0,
                                'chunk':
                                json.dumps({
                                    'partNumber': 1,
                                    's3UploadId': 'abcd'
                                })
                            })
        self.assertStatusOk(resp)
        six.assertRegex(self, resp.json['s3']['request']['url'], s3Regex)
        self.assertEqual(resp.json['s3']['request']['method'], 'PUT')

        # We should not be able to call file/offset with multi-chunk upload
        resp = self.request(path='/file/offset',
                            method='GET',
                            user=self.admin,
                            params={'uploadId': multiChunkUpload['_id']})
        self.assertStatus(resp, 400)
        self.assertEqual(
            resp.json, {
                'type':
                'validation',
                'message':
                'You should not call requestOffset on a chunked '
                'direct-to-S3 upload.'
            })

        # Test finalize for a multi-chunk upload
        resp = self.request(path='/file/completion',
                            method='POST',
                            user=self.admin,
                            params={'uploadId': multiChunkUpload['_id']})
        largeFile = resp.json
        self.assertStatusOk(resp)
        six.assertRegex(self, resp.json['s3FinalizeRequest']['url'], s3Regex)
        self.assertEqual(resp.json['s3FinalizeRequest']['method'], 'POST')

        # Test init for an empty file (should be no-op)
        params['size'] = 0
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        emptyFile = resp.json
        self.assertStatusOk(resp)
        self.assertFalse('behavior' in resp.json)
        self.assertFalse('s3' in resp.json)

        # Test download for an empty file
        resp = self.request(path='/file/%s/download' % emptyFile['_id'],
                            user=self.admin,
                            method='GET',
                            isJson=False)
        self.assertStatusOk(resp)
        self.assertEqual(self.getBody(resp), '')
        self.assertEqual(resp.headers['Content-Length'], 0)
        self.assertEqual(resp.headers['Content-Disposition'],
                         'attachment; filename="My File.txt"')

        # Test download of a non-empty file
        resp = self.request(path='/file/%s/download' % largeFile['_id'],
                            user=self.admin,
                            method='GET',
                            isJson=False)
        self.assertStatus(resp, 303)
        six.assertRegex(self, resp.headers['Location'], s3Regex)

        # Test download of a non-empty file, with Content-Disposition=inline.
        # Expect the special S3 header response-content-disposition.
        params = {'contentDisposition': 'inline'}
        inlineRegex = r'response-content-disposition=inline%3B%20filename%3D%22My%20File.txt%22'
        resp = self.request(path='/file/%s/download' % largeFile['_id'],
                            user=self.admin,
                            method='GET',
                            isJson=False,
                            params=params)
        self.assertStatus(resp, 303)
        six.assertRegex(self, resp.headers['Location'], s3Regex)
        six.assertRegex(self, resp.headers['Location'], inlineRegex)

        # Test download as part of a streaming zip
        @httmock.all_requests
        def s3_pipe_mock(url, request):
            if 's3.amazonaws.com' in url.netloc and url.scheme == 'https':
                return 'dummy file contents'
            else:
                raise Exception('Unexpected url %s' % url)

        with httmock.HTTMock(s3_pipe_mock):
            resp = self.request('/folder/%s/download' % parentFolder['_id'],
                                method='GET',
                                user=self.admin,
                                isJson=False)
            self.assertStatusOk(resp)
            zip = zipfile.ZipFile(io.BytesIO(self.getBody(resp, text=False)),
                                  'r')
            self.assertTrue(zip.testzip() is None)

            extracted = zip.read('Public/My File.txt')
            self.assertEqual(extracted, b'dummy file contents')

        # Create a "test" key for importing
        client.put_object(Bucket='bucketname', Key='foo/bar/test', Body=b'')

        # Attempt to import item directly into user; should fail
        resp = self.request('/assetstore/%s/import' % assetstore['_id'],
                            method='POST',
                            params={
                                'importPath': '/foo/bar',
                                'destinationType': 'user',
                                'destinationId': self.admin['_id']
                            },
                            user=self.admin)
        self.assertStatus(resp, 400)
        self.assertEqual(
            resp.json['message'],
            'Keys cannot be imported directly underneath a user.')

        # Import existing data from S3
        resp = self.request('/folder',
                            method='POST',
                            params={
                                'parentType': 'folder',
                                'parentId': parentFolder['_id'],
                                'name': 'import destinaton'
                            },
                            user=self.admin)
        self.assertStatusOk(resp)
        importFolder = resp.json

        resp = self.request('/assetstore/%s/import' % assetstore['_id'],
                            method='POST',
                            params={
                                'importPath': '',
                                'destinationType': 'folder',
                                'destinationId': importFolder['_id'],
                            },
                            user=self.admin)
        self.assertStatusOk(resp)

        # Data should now appear in the tree
        resp = self.request('/folder',
                            user=self.admin,
                            params={
                                'parentId': importFolder['_id'],
                                'parentType': 'folder'
                            })
        self.assertStatusOk(resp)
        children = resp.json
        self.assertEqual(len(children), 1)
        self.assertEqual(children[0]['name'], 'foo')

        resp = self.request('/folder',
                            user=self.admin,
                            params={
                                'parentId': children[0]['_id'],
                                'parentType': 'folder'
                            })
        self.assertStatusOk(resp)
        children = resp.json
        self.assertEqual(len(children), 1)
        self.assertEqual(children[0]['name'], 'bar')

        resp = self.request('/item',
                            user=self.admin,
                            params={'folderId': children[0]['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)
        item = resp.json[0]
        self.assertEqual(item['name'], 'test')
        self.assertEqual(item['size'], 0)

        resp = self.request('/item/%s/files' % item['_id'], user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)
        self.assertFalse('imported' in resp.json[0])
        self.assertFalse('relpath' in resp.json[0])
        file = File().load(resp.json[0]['_id'], force=True)
        self.assertTrue(file['imported'])
        self.assertFalse('relpath' in file)
        self.assertEqual(file['size'], 0)
        self.assertEqual(file['assetstoreId'], assetstore['_id'])
        self.assertTrue(
            client.get_object(Bucket='bucketname', Key='foo/bar/test')
            is not None)

        # Deleting an imported file should not delete it from S3
        with mock.patch('girder.events.daemon.trigger') as daemon:
            resp = self.request('/item/%s' % str(item['_id']),
                                method='DELETE',
                                user=self.admin)
            self.assertStatusOk(resp)
            self.assertEqual(len(daemon.mock_calls), 0)

        # Create the file key in the moto s3 store so that we can test that it gets deleted.
        file = File().load(largeFile['_id'], user=self.admin)
        client.create_multipart_upload(Bucket='bucketname', Key=file['s3Key'])
        client.put_object(Bucket='bucketname', Key=file['s3Key'], Body=b'test')

        # Test delete for a non-empty file
        resp = self.request(path='/file/%s' % largeFile['_id'],
                            user=self.admin,
                            method='DELETE')
        self.assertStatusOk(resp)

        # The file should be gone now
        resp = self.request(path='/file/%s/download' % largeFile['_id'],
                            user=self.admin,
                            isJson=False)
        self.assertStatus(resp, 400)
        # The actual delete may still be in the event queue, so we want to
        # check the S3 bucket directly.
        startTime = time.time()
        while True:
            try:
                client.get_object(Bucket='bucketname', Key=file['s3Key'])
            except botocore.exceptions.ClientError:
                break
            if time.time() - startTime > 15:
                break  # give up and fail
            time.sleep(0.1)
        with self.assertRaises(botocore.exceptions.ClientError):
            client.get_object(Bucket='bucketname', Key=file['s3Key'])

        resp = self.request(path='/folder/%s' % parentFolder['_id'],
                            method='DELETE',
                            user=self.admin)
        self.assertStatusOk(resp)
# If there is are no users, create an admin user
if User().findOne() is None:
    User().createUser('admin', 'password', 'Admin', 'Admin',
                      '*****@*****.**')
adminUser = User().findOne({'admin': True})
# Make sure we have an assetstore
if Assetstore().findOne() is None:
    Assetstore().createFilesystemAssetstore('Assetstore', '/assetstore')
# If we don't have a default task folder, make a task collection and folder
if not Setting().get('slicer_cli_web.task_folder'):
    # Make sure we have a Tasks collection with a Slicer CLI Web Tasks folder
    if Collection().findOne({'name': 'Tasks'}) is None:
        Collection().createCollection('Tasks', adminUser)
    tasksCollection = Collection().findOne({'name': 'Tasks'})
    taskFolderName = 'Slicer CLI Web Tasks'
    if Folder().findOne({
            'name': taskFolderName,
            'parentId': tasksCollection['_id']
    }) is None:
        Folder().createFolder(tasksCollection,
                              taskFolderName,
                              parentType='collection',
                              public=True,
                              creator=adminUser)
    taskFolder = Folder().findOne({
        'name': taskFolderName,
        'parentId': tasksCollection['_id']
    })
    Setting().set('slicer_cli_web.task_folder', str(taskFolder['_id']))
Exemple #28
0
    def testMoveBetweenAssetstores(self):
        folder = six.next(Folder().childFolders(self.admin,
                                                parentType='user',
                                                force=True,
                                                filters={'name': 'Public'}))

        resp = self.request(path='/assetstore', method='GET', user=self.admin)
        self.assertStatusOk(resp)
        fs_assetstore = resp.json[0]

        # Clear any old DB data
        base.dropGridFSDatabase('girder_test_assetstore_move_assetstore')
        params = {
            'name': 'New Name',
            'type': AssetstoreType.GRIDFS,
            'db': 'girder_test_assetstore_move_assetstore'
        }
        resp = self.request(path='/assetstore',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        gridfs_assetstore = resp.json

        # Upload a file - it should go to the fs assetstore
        uploadData = 'helloworld'
        params = {
            'parentType': 'folder',
            'parentId': folder['_id'],
            'name': 'sample1',
            'size': len(uploadData),
            'mimeType': 'text/plain'
        }
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        upload = resp.json
        resp = self.request(path='/file/chunk',
                            method='POST',
                            user=self.admin,
                            body=uploadData,
                            params={'uploadId': upload['_id']},
                            type='text/plain')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles = [resp.json]

        # Upload it again targetting a different assetstore
        params['assetstoreId'] = gridfs_assetstore['_id']
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        upload = resp.json
        resp = self.request(path='/file/chunk',
                            method='POST',
                            user=self.admin,
                            body=uploadData,
                            params={'uploadId': upload['_id']},
                            type='text/plain')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles.append(resp.json)

        # Replace the first file, directing the replacement to a different
        # assetstore
        replaceParams = {
            'size': len(uploadData),
            'assetstoreId': gridfs_assetstore['_id'],
        }
        resp = self.request(path='/file/%s/contents' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params=replaceParams)
        self.assertStatusOk(resp)
        upload = resp.json

        resp = self.request(path='/file/chunk',
                            method='POST',
                            user=self.admin,
                            body=uploadData,
                            params={'uploadId': upload['_id']},
                            type='text/plain')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Move a file from the gridfs assetstore to the filesystem assetstore
        resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Doing it again shouldn't change it.
        resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # We should be able to move it back
        resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': gridfs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Test moving a file of zero length
        params['size'] = 0
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        uploadedFiles.append(resp.json)

        resp = self.request(path='/file/%s/move' % uploadedFiles[2]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[2] = resp.json

        # Test preventing the move via an event
        def stopMove(event):
            event.preventDefault()

        events.bind('model.upload.movefile', 'assetstore_test', stopMove)
        try:
            resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                                method='PUT',
                                user=self.admin,
                                params={'assetstoreId': fs_assetstore['_id']},
                                isJson=False)
            self.assertFalse('Move should have been prevented')
        except AssertionError as exc:
            self.assertIn('could not be moved to assetstore', str(exc))
        events.unbind('model.upload.movefile', 'assetstore_test')

        # Test files big enough to be multi-chunk
        chunkSize = Upload()._getChunkSize()
        data = io.BytesIO(b' ' * chunkSize * 2)
        uploadedFiles.append(Upload().uploadFromFile(data,
                                                     chunkSize * 2,
                                                     'sample',
                                                     parentType='folder',
                                                     parent=folder,
                                                     assetstore=fs_assetstore))
        resp = self.request(path='/file/%s/move' % uploadedFiles[3]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': gridfs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[3] = resp.json

        # Test progress
        size = chunkSize * 2
        data = io.BytesIO(b' ' * size)
        upload = Upload().uploadFromFile(data,
                                         size,
                                         'progress',
                                         parentType='folder',
                                         parent=folder,
                                         assetstore=fs_assetstore)
        params = {'assetstoreId': gridfs_assetstore['_id'], 'progress': True}
        resp = self.request(path='/file/%s/move' % upload['_id'],
                            method='PUT',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])

        resp = self.request(path='/notification/stream',
                            method='GET',
                            user=self.admin,
                            isJson=False,
                            params={'timeout': 1})
        messages = self.getSseMessages(resp)
        self.assertEqual(len(messages), 1)
        self.assertEqual(messages[0]['type'], 'progress')
        self.assertEqual(messages[0]['data']['current'], size)

        # Test moving imported file

        # Create assetstore to import file into
        params = {
            'name': 'ImportTest',
            'type': AssetstoreType.FILESYSTEM,
            'root': os.path.join(fs_assetstore['root'], 'import')
        }
        resp = self.request(path='/assetstore',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        import_assetstore = resp.json

        # Import file
        params = {
            'importPath':
            os.path.join(ROOT_DIR, 'tests', 'cases', 'py_client', 'testdata',
                         'world.txt'),
            'destinationType':
            'folder',
        }

        Assetstore().importData(import_assetstore,
                                parent=folder,
                                parentType='folder',
                                params=params,
                                progress=ProgressContext(False),
                                user=self.admin,
                                leafFoldersAsItems=False)

        file = path_util.lookUpPath('/user/admin/Public/world.txt/world.txt',
                                    self.admin)['document']

        # Move file
        params = {
            'assetstoreId': fs_assetstore['_id'],
        }
        resp = self.request(path='/file/%s/move' % file['_id'],
                            method='PUT',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])

        # Check that we can still download the file
        resp = self.request(path='/file/%s/download' % file['_id'],
                            user=self.admin,
                            isJson=False)
        self.assertStatusOk(resp)
Exemple #29
0
    def testAccessFlags(self):
        resp = self.request('/system/access_flag')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json, {})

        registerAccessFlag('my_key', name='hello', description='a custom flag')

        resp = self.request('/system/access_flag')
        self.assertStatusOk(resp)
        self.assertEqual(
            resp.json, {
                'my_key': {
                    'name': 'hello',
                    'description': 'a custom flag',
                    'admin': False
                }
            })

        self.users[1] = User().load(self.users[1]['_id'], force=True)
        user = self.users[1]

        # Manage custom access flags on an access controlled resource
        self.assertFalse(User().hasAccessFlags(user, user, flags=['my_key']))

        # Admin should always have permission
        self.assertTrue(User().hasAccessFlags(user,
                                              self.users[0],
                                              flags=['my_key']))

        # Test the requireAccessFlags method
        with self.assertRaises(AccessException):
            User().requireAccessFlags(user, user=user, flags='my_key')

        User().requireAccessFlags(user, user=self.users[0], flags='my_key')

        acl = User().getFullAccessList(user)
        self.assertEqual(acl['users'][0]['flags'], [])

        # Test loadmodel requiredFlags argument via REST endpoint
        resp = self.request('/test_endpoints/loadmodel_with_flags/%s' %
                            user['_id'],
                            user=self.users[1])
        self.assertStatus(resp, 403)

        user = User().setAccessList(
            self.users[0],
            access={
                'users': [{
                    'id': self.users[1]['_id'],
                    'level': AccessType.ADMIN,
                    'flags': ['my_key', 'not a registered flag']
                }],
                'groups': [{
                    'id': self.group['_id'],
                    'level': AccessType.ADMIN,
                    'flags': ['my_key']
                }]
            },
            save=True)

        resp = self.request('/test_endpoints/loadmodel_with_flags/%s' %
                            user['_id'],
                            user=self.users[1])
        self.assertStatusOk(resp)
        self.assertEqual(resp.json, 'success')

        # Only registered flags should be stored
        acl = User().getFullAccessList(user)
        self.assertEqual(acl['users'][0]['flags'], ['my_key'])
        self.assertTrue(User().hasAccessFlags(user, user, flags=['my_key']))

        # Create an admin-only access flag
        registerAccessFlag('admin_flag', name='admin flag', admin=True)

        # Non-admin shouldn't be able to set it
        user = User().setAccessList(self.users[0],
                                    access={
                                        'users': [{
                                            'id': self.users[1]['_id'],
                                            'level': AccessType.ADMIN,
                                            'flags': ['admin_flag']
                                        }],
                                        'groups': []
                                    },
                                    save=True,
                                    user=self.users[1])

        acl = User().getFullAccessList(user)
        self.assertEqual(acl['users'][0]['flags'], [])

        # Admin user should be able to set it
        user = User().setAccessList(self.users[1],
                                    access={
                                        'users': [{
                                            'id': self.users[1]['_id'],
                                            'level': AccessType.ADMIN,
                                            'flags': ['admin_flag']
                                        }],
                                        'groups': [{
                                            'id': self.group['_id'],
                                            'level': AccessType.ADMIN,
                                            'flags': ['admin_flag']
                                        }]
                                    },
                                    save=True,
                                    user=self.users[0])

        acl = User().getFullAccessList(user)
        self.assertEqual(acl['users'][0]['flags'], ['admin_flag'])

        # An already-enabled admin-only flag should stay enabled for non-admin user
        user = User().setAccessList(self.users[1],
                                    access={
                                        'users': [{
                                            'id':
                                            self.users[1]['_id'],
                                            'level':
                                            AccessType.ADMIN,
                                            'flags': ['my_key', 'admin_flag']
                                        }],
                                        'groups': [{
                                            'id': self.group['_id'],
                                            'level': AccessType.ADMIN,
                                            'flags': ['admin_flag']
                                        }]
                                    },
                                    save=True,
                                    user=self.users[1])

        acl = User().getFullAccessList(user)
        self.assertEqual(set(acl['users'][0]['flags']),
                         {'my_key', 'admin_flag'})
        self.assertEqual(acl['groups'][0]['flags'], ['admin_flag'])

        # Test setting public flags on a collection and folder
        collectionModel = Collection()
        folderModel = Folder()
        itemModel = Item()
        collection = collectionModel.createCollection('coll',
                                                      creator=self.users[0],
                                                      public=True)
        folder = folderModel.createFolder(collection,
                                          'folder',
                                          parentType='collection',
                                          creator=self.users[0])

        # Add an item to the folder so we can test AclMixin flag behavior
        item = itemModel.createItem(folder=folder,
                                    name='test',
                                    creator=self.users[0])

        folder = folderModel.setUserAccess(folder,
                                           self.users[1],
                                           level=AccessType.ADMIN,
                                           save=True,
                                           currentUser=self.users[0])

        with self.assertRaises(AccessException):
            collectionModel.requireAccessFlags(collection,
                                               user=None,
                                               flags='my_key')

        # Test AclMixin flag behavior
        with self.assertRaises(AccessException):
            itemModel.requireAccessFlags(item, user=None, flags='my_key')

        self.assertFalse(
            itemModel.hasAccessFlags(item, user=None, flags='my_key'))

        collection = collectionModel.setAccessList(collection,
                                                   access=collection['access'],
                                                   save=True,
                                                   recurse=True,
                                                   user=self.users[0],
                                                   publicFlags=['my_key'])
        collectionModel.requireAccessFlags(collection,
                                           user=None,
                                           flags='my_key')

        # Make sure recursive setting of public flags worked
        folder = folderModel.load(folder['_id'], force=True)
        self.assertEqual(folder['publicFlags'], ['my_key'])

        itemModel.requireAccessFlags(item, user=None, flags='my_key')

        # Non-admin shouldn't be able to set admin-only public flags
        folder = folderModel.setPublicFlags(folder,
                                            flags=['admin_flag'],
                                            user=self.users[1],
                                            save=True)
        self.assertEqual(folder['publicFlags'], [])

        # Admin users should be able to set admin-only public flags
        folder = folderModel.setPublicFlags(folder,
                                            flags=['admin_flag'],
                                            user=self.users[0],
                                            save=True,
                                            append=True)
        self.assertEqual(folder['publicFlags'], ['admin_flag'])

        # Non-admin users can set admin-only public flags if they are already enabled
        folder = folderModel.setPublicFlags(folder,
                                            flags=['admin_flag', 'my_key'],
                                            user=self.users[1],
                                            save=True)
        self.assertEqual(set(folder['publicFlags']), {'admin_flag', 'my_key'})

        # Test "force" options
        folder = folderModel.setPublicFlags(folder,
                                            flags='admin_flag',
                                            force=True,
                                            save=True)
        self.assertEqual(folder['publicFlags'], ['admin_flag'])

        folder = folderModel.setAccessList(folder,
                                           access={
                                               'users': [{
                                                   'id':
                                                   self.users[1]['_id'],
                                                   'level':
                                                   AccessType.ADMIN,
                                                   'flags':
                                                   ['my_key', 'admin_flag']
                                               }],
                                               'groups': []
                                           },
                                           save=True,
                                           force=True)
        folderModel.requireAccessFlags(folder,
                                       user=self.users[1],
                                       flags='my_key')

        folder = folderModel.setUserAccess(folder,
                                           self.users[1],
                                           level=AccessType.READ,
                                           save=True,
                                           force=True,
                                           flags=[])
        self.assertFalse(
            folderModel.hasAccessFlags(folder, self.users[1], flags='my_key'))

        folder = folderModel.setGroupAccess(folder,
                                            self.group,
                                            level=AccessType.READ,
                                            save=True,
                                            force=True,
                                            flags='my_key')
        folderModel.requireAccessFlags(folder,
                                       user=self.users[1],
                                       flags='my_key')

        # Testing with flags=None should give sensible behavior
        folderModel.requireAccessFlags(folder, user=None, flags=None)

        # Test filtering results by access flags (both ACModel and AclMixin)
        for model, doc in ((folderModel, folder), (itemModel, item)):
            cursor = model.find({})
            self.assertGreater(len(list(cursor)), 0)

            cursor = model.find({})
            filtered = list(
                model.filterResultsByPermission(cursor,
                                                user=None,
                                                level=AccessType.READ,
                                                flags='my_key'))
            self.assertEqual(len(filtered), 0)

            cursor = model.find({})
            filtered = list(
                model.filterResultsByPermission(cursor,
                                                user=self.users[1],
                                                level=AccessType.READ,
                                                flags=('my_key',
                                                       'admin_flag')))
            self.assertEqual(len(filtered), 1)
            self.assertEqual(filtered[0]['_id'], doc['_id'])
Exemple #30
0
 def prereviewFolder(self, dataset):
     return Folder().findOne({
         'name': 'Pre-review',
         'parentId': dataset['folderId']
     })