Esempio n. 1
0
    def tearDown(self):
        if self.testForFinalizeUpload:
            self.assertTrue(self.finalizeUploadBeforeCalled)
            self.assertTrue(self.finalizeUploadAfterCalled)

        events.unbind('model.file.finalizeUpload.before', '_testFinalizeUploadBefore')
        events.unbind('model.file.finalizeUpload.after', '_testFinalizeUploadAfter')
Esempio n. 2
0
    def setUp(self):
        base.TestCase.setUp(self)

        # Create some test documents with an item
        admin = {
            'email': '*****@*****.**',
            'login': '******',
            'firstName': 'Admin',
            'lastName': 'Last',
            'password': '******',
            'admin': True
        }
        self.admin = User().createUser(**admin)

        user = {
            'email': '*****@*****.**',
            'login': '******',
            'firstName': 'First',
            'lastName': 'Last',
            'password': '******',
            'admin': False
        }
        self.user = User().createUser(**user)

        folders = Folder().childFolders(parent=self.admin, parentType='user', user=self.admin)
        for folder in folders:
            if folder['public'] is True:
                self.publicFolder = folder
            else:
                self.privateFolder = folder

        path = os.path.join(ROOT_DIR, 'clients', 'web', 'src', 'assets', 'Girder_Mark.png')
        with open(path, 'rb') as file:
            self.image = file.read()
        events.unbind('thumbnails.create', 'test')
Esempio n. 3
0
    def setUp(self):
        base.TestCase.setUp(self)

        # Create some test documents with an item
        admin = {
            'email': '*****@*****.**',
            'login': '******',
            'firstName': 'Admin',
            'lastName': 'Last',
            'password': '******',
            'admin': True
        }
        self.admin = User().createUser(**admin)

        user = {
            'email': '*****@*****.**',
            'login': '******',
            'firstName': 'First',
            'lastName': 'Last',
            'password': '******',
            'admin': False
        }
        self.user = User().createUser(**user)

        folders = Folder().childFolders(parent=self.admin, parentType='user', user=self.admin)
        for folder in folders:
            if folder['public'] is True:
                self.publicFolder = folder
            else:
                self.privateFolder = folder

        path = os.path.join(ROOT_DIR, 'girder', 'web_client', 'src', 'assets', 'Girder_Mark.png')
        with open(path, 'rb') as file:
            self.image = file.read()
        events.unbind('thumbnails.create', 'test')
Esempio n. 4
0
    def tearDown(self):
        if self.testForFinalizeUpload:
            self.assertTrue(self.finalizeUploadBeforeCalled)
            self.assertTrue(self.finalizeUploadAfterCalled)

            events.unbind("model.file.finalizeUpload.before", "_testFinalizeUploadBefore")
            events.unbind("model.file.finalizeUpload.after", "_testFinalizeUploadAfter")
Esempio n. 5
0
    def testModelSaveHooks(self):
        """
        This tests the general correctness of the model save hooks
        """
        self.ctr = 0

        def preSave(event):
            if '_id' not in event.info:
                self.ctr += 1

        def postSave(event):
            self.ctr += 2

        events.bind('model.user.save', 'test', preSave)

        user = self.model('user').createUser(login='******',
                                             password='******',
                                             firstName='A',
                                             lastName='A',
                                             email='*****@*****.**')
        self.assertEqual(self.ctr, 1)

        events.bind('model.user.save.after', 'test', postSave)
        self.ctr = 0

        user = self.model('user').save(user, triggerEvents=False)
        self.assertEqual(self.ctr, 0)

        self.model('user').save(user)
        self.assertEqual(self.ctr, 2)

        events.unbind('model.user.save', 'test')
        events.unbind('model.user.save.after', 'test')
Esempio n. 6
0
    def testModelSaveHooks(self):
        """
        This tests the general correctness of the model save hooks
        """
        self.ctr = 0

        def preSave(event):
            if '_id' not in event.info:
                self.ctr += 1

        def postSave(event):
            self.ctr += 2

        events.bind('model.user.save', 'test', preSave)

        user = self.model('user').createUser(
            login='******', password='******', firstName='A', lastName='A',
            email='*****@*****.**')
        self.assertEqual(self.ctr, 1)

        events.bind('model.user.save.after', 'test', postSave)
        self.ctr = 0

        user = self.model('user').save(user, triggerEvents=False)
        self.assertEqual(self.ctr, 0)

        self.model('user').save(user)
        self.assertEqual(self.ctr, 2)

        events.unbind('model.user.save', 'test')
        events.unbind('model.user.save.after', 'test')
Esempio n. 7
0
def load(info):
    setDefaults()

    settings = Setting()

    homeDirsRoot = settings.get(PluginSettings.HOME_DIRS_ROOT)
    logger.info('WT Home Dirs root: %s' % homeDirsRoot)
    startDAVServer(homeDirsRoot, HomeDirectoryInitializer, HomeAuthorizer, HomePathMapper())

    taleDirsRoot = settings.get(PluginSettings.TALE_DIRS_ROOT)
    logger.info('WT Tale Dirs root: %s' % taleDirsRoot)
    startDAVServer(taleDirsRoot, TaleDirectoryInitializer, TaleAuthorizer, TalePathMapper())

    runsDirsRoot = settings.get(PluginSettings.RUNS_DIRS_ROOT)
    if runsDirsRoot:
        logger.info('WT Runs Dirs root: %s' % runsDirsRoot)
        startDAVServer(runsDirsRoot, RunsDirectoryInitializer, RunsAuthorizer, RunsPathMapper())

    events.unbind('model.user.save.created', CoreEventHandler.USER_DEFAULT_FOLDERS)
    events.bind('model.user.save.created', 'wt_home_dirs', setHomeFolderMapping)
    events.bind('model.tale.save.created', 'wt_home_dirs', setTaleFolderMapping)
    events.bind('model.tale.remove', 'wt_home_dirs', deleteWorkspace)

    hdp = Homedirpass()
    info['apiRoot'].homedirpass = hdp
    info['apiRoot'].homedirpass.route('GET', ('generate',), hdp.generatePassword)
    info['apiRoot'].homedirpass.route('PUT', ('set',), hdp.setPassword)

    Tale().exposeFields(level=AccessType.READ, fields={"workspaceId"})
Esempio n. 8
0
    def testSynchronousEvents(self):
        name = '_test.event'
        handlerName = '_test.handler'
        events.bind(name, handlerName, self._increment)

        # Bind an event to increment the counter
        self.assertEqual(self.ctr, 0)
        event = events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 2)
        self.assertTrue(event.propagate)
        self.assertFalse(event.defaultPrevented)
        self.assertEqual(event.responses, [])

        # The event should still be bound here if a different handler unbinds
        events.unbind(name, 'not the handler name')
        events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 4)

        # Actually unbind the event, it show now no longer execute
        events.unbind(name, handlerName)
        events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 4)

        # Bind an event that prevents the default action and passes a response
        events.bind(name, handlerName, self._eatEvent)
        events.bind(name, 'other handler name', self._shouldNotBeCalled)
        event = events.trigger(name)
        self.assertTrue(event.defaultPrevented)
        self.assertFalse(event.propagate)
        self.assertEqual(event.responses, [{'foo': 'bar'}])
Esempio n. 9
0
    def __init__(self, templatePath=None):
        if not templatePath:
            templatePath = os.path.join(constants.PACKAGE_DIR, 'api',
                                        'api_docs.mako')
        super(ApiDocs, self).__init__(templatePath)

        curConfig = config.getConfig()
        mode = curConfig['server'].get('mode', '')

        self.vars = {
            'apiRoot': '',
            'staticRoot': '',
            'brandName': Setting().get(SettingKey.BRAND_NAME),
            'mode': mode
        }

        events.unbind('model.setting.save.after',
                      CoreEventHandler.WEBROOT_SETTING_CHANGE)
        events.bind('model.setting.save.after',
                    CoreEventHandler.WEBROOT_SETTING_CHANGE,
                    self._onSettingSave)
        events.unbind('model.setting.remove',
                      CoreEventHandler.WEBROOT_SETTING_CHANGE)
        events.bind('model.setting.remove',
                    CoreEventHandler.WEBROOT_SETTING_CHANGE,
                    self._onSettingRemove)
Esempio n. 10
0
    def setUp(self):
        base.TestCase.setUp(self)
        # Create some test documents with an item
        admin = {
            'email': '*****@*****.**',
            'login': '******',
            'firstName': 'Admin',
            'lastName': 'Last',
            'password': '******',
            'admin': True
        }
        self.admin = self.model('user').createUser(**admin)

        user = {
            'email': '*****@*****.**',
            'login': '******',
            'firstName': 'First',
            'lastName': 'Last',
            'password': '******',
            'admin': False
        }
        self.user = self.model('user').createUser(**user)

        folders = self.model('folder').childFolders(
            parent=self.admin, parentType='user', user=self.admin)
        for folder in folders:
            if folder['public'] is True:
                self.publicFolder = folder
            else:
                self.privateFolder = folder

        events.unbind('thumbnails.create', 'test')
Esempio n. 11
0
    def setUp(self):
        base.TestCase.setUp(self)
        # Create some test documents with an item
        admin = {
            'email': '*****@*****.**',
            'login': '******',
            'firstName': 'Admin',
            'lastName': 'Last',
            'password': '******',
            'admin': True
        }
        self.admin = self.model('user').createUser(**admin)

        user = {
            'email': '*****@*****.**',
            'login': '******',
            'firstName': 'First',
            'lastName': 'Last',
            'password': '******',
            'admin': False
        }
        self.user = self.model('user').createUser(**user)

        folders = self.model('folder').childFolders(parent=self.admin,
                                                    parentType='user',
                                                    user=self.admin)
        for folder in folders:
            if folder['public'] is True:
                self.publicFolder = folder
            else:
                self.privateFolder = folder

        events.unbind('thumbnails.create', 'test')
Esempio n. 12
0
    def testModelSaveHooks(self):
        """
        This tests the general correctness of the model save hooks
        """
        self.ctr = 0

        def preSave(event):
            if "_id" not in event.info:
                self.ctr += 1

        def postSave(event):
            self.ctr += 2

        events.bind("model.user.save", "test", preSave)

        user = self.model("user").createUser(
            login="******", password="******", firstName="A", lastName="A", email="*****@*****.**"
        )
        self.assertEqual(self.ctr, 1)

        events.bind("model.user.save.after", "test", postSave)
        self.ctr = 0

        user = self.model("user").save(user, triggerEvents=False)
        self.assertEqual(self.ctr, 0)

        self.model("user").save(user)
        self.assertEqual(self.ctr, 2)

        events.unbind("model.user.save", "test")
        events.unbind("model.user.save.after", "test")
Esempio n. 13
0
    def testFilesystemAssetstoreUpload(self):
        self._testUpload()
        # Test that a delete during an upload still results in one file
        adapter = assetstore_utilities.getAssetstoreAdapter(self.assetstore)
        size = 101
        data = six.BytesIO(b' ' * size)
        files = []
        files.append(
            self.model('upload').uploadFromFile(data,
                                                size,
                                                'progress',
                                                parentType='folder',
                                                parent=self.folder,
                                                assetstore=self.assetstore))
        fullPath0 = adapter.fullPath(files[0])
        conditionRemoveDone = threading.Condition()
        conditionInEvent = threading.Condition()

        def waitForCondition(*args, **kwargs):
            # Single that we are in the event and then wait to be told that
            # the delete has occured before returning.
            with conditionInEvent:
                conditionInEvent.notify()
            with conditionRemoveDone:
                conditionRemoveDone.wait()

        def uploadFileWithWait():
            size = 101
            data = six.BytesIO(b' ' * size)
            files.append(
                self.model('upload').uploadFromFile(
                    data,
                    size,
                    'progress',
                    parentType='folder',
                    parent=self.folder,
                    assetstore=self.assetstore))

        events.bind('model.file.finalizeUpload.before', 'waitForCondition',
                    waitForCondition)
        # We create an upload that is bound to an event that waits during the
        # finalizeUpload.before event so that the remove will be executed
        # during this time.
        with conditionInEvent:
            t = threading.Thread(target=uploadFileWithWait)
            t.start()
            conditionInEvent.wait()
        self.assertTrue(os.path.exists(fullPath0))
        self.model('file').remove(files[0])
        # We shouldn't actually remove the file here
        self.assertTrue(os.path.exists(fullPath0))
        with conditionRemoveDone:
            conditionRemoveDone.notify()
        t.join()

        events.unbind('model.file.finalizeUpload.before', 'waitForCondition')
        fullPath1 = adapter.fullPath(files[0])
        self.assertEqual(fullPath0, fullPath1)
        self.assertTrue(os.path.exists(fullPath1))
Esempio n. 14
0
def testDeleteIncompleteTile(server, admin, user, fsAssetstore,
                             unavailableWorker):  # noqa
    # Test the large_image/settings end point
    resp = server.request(method='DELETE',
                          path='/large_image/tiles/incomplete',
                          user=user)
    assert utilities.respStatus(resp) == 403
    resp = server.request(method='DELETE',
                          path='/large_image/tiles/incomplete',
                          user=admin)
    assert utilities.respStatus(resp) == 200
    results = resp.json
    assert results['removed'] == 0

    file = utilities.uploadTestFile('yb10kx5k.png', admin, fsAssetstore)
    itemId = str(file['itemId'])
    resp = server.request(method='POST',
                          path='/item/%s/tiles' % itemId,
                          user=admin)
    resp = server.request(method='DELETE',
                          path='/large_image/tiles/incomplete',
                          user=admin)
    assert utilities.respStatus(resp) == 200
    results = resp.json
    assert results['removed'] == 1

    def preventCancel(evt):
        job = evt.info['job']
        params = evt.info['params']
        if (params.get('status') and params.get('status') != job['status']
                and params['status']
                in (JobStatus.CANCELED, CustomJobStatus.CANCELING)):
            evt.preventDefault()

    # Prevent a job from cancelling
    events.bind('jobs.job.update', 'testDeleteIncompleteTile', preventCancel)
    # Create a job and mark it as running
    resp = server.request(method='POST',
                          path='/item/%s/tiles' % itemId,
                          user=admin)
    job = Job().load(id=resp.json['_id'], force=True)
    Job().updateJob(job, status=JobStatus.RUNNING)

    resp = server.request(method='DELETE',
                          path='/large_image/tiles/incomplete',
                          user=admin)
    events.unbind('jobs.job.update', 'testDeleteIncompleteTile')
    assert utilities.respStatus(resp) == 200
    results = resp.json
    assert results['removed'] == 0
    assert 'could not be canceled' in results['message']
    # Now we should be able to cancel the job
    resp = server.request(method='DELETE',
                          path='/large_image/tiles/incomplete',
                          user=admin)
    assert utilities.respStatus(resp) == 200
    results = resp.json
    assert results['removed'] == 1
Esempio n. 15
0
            def tempListener(self, girderEvent):
                job = girderEvent.info['job']

                if (job['type'] == 'slicer_cli_web_job' and
                        job['status'] in (JobStatus.SUCCESS, JobStatus.ERROR)):
                    self.assertEqual(job['status'], status,
                                     'The status of the job should match')
                    events.unbind('jobs.job.update.after', 'slicer_cli_web_del')
                    job_status[0] = job['status']
                    event.set()
            def tempListener(self, girderEvent):
                job = girderEvent.info['job']

                if (job['type'] == 'slicer_cli_web_job' and
                        job['status'] in (JobStatus.SUCCESS, JobStatus.ERROR)):
                    self.assertEqual(job['status'], job['status'],
                                     'The status of the job should match')
                    events.unbind('jobs.job.update.after', 'HistomicsTK_del')
                    # del self.delHandler
                    event.set()
Esempio n. 17
0
        def tempListener(self, girderEvent):
            job = girderEvent.info['job']

            if (job['type'] == 'slicer_cli_web_job'
                    and job['status'] in (JobStatus.SUCCESS, JobStatus.ERROR)):
                assert job[
                    'status'] == status, 'The status of the job should match'
                job_status[0] = job['status']

                events.unbind('jobs.job.update.after', 'slicer_cli_web_add')

                # wait 10sec before continue
                threading.Timer(5, lambda: event.set()).start()
Esempio n. 18
0
    def testDeleteIncompleteTile(self):
        # Test the large_image/settings end point
        resp = self.request(method='DELETE',
                            path='/large_image/tiles/incomplete',
                            user=self.user)
        self.assertStatus(resp, 403)
        resp = self.request(method='DELETE',
                            path='/large_image/tiles/incomplete',
                            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 0)

        file = self._uploadFile(
            os.path.join(os.path.dirname(__file__), 'test_files',
                         'yb10kx5k.png'))
        itemId = str(file['itemId'])
        # Use a simulated cherrypy request, as it won't complete properly
        resp = self.request(method='POST',
                            path='/item/%s/tiles' % itemId,
                            user=self.admin)
        resp = self.request(method='DELETE',
                            path='/large_image/tiles/incomplete',
                            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 1)

        def preventCancel(evt):
            evt.preventDefault()

        # Prevent a job from cancelling
        events.bind('jobs.cancel', 'testDeleteIncompleteTile', preventCancel)
        resp = self.request(method='POST',
                            path='/item/%s/tiles' % itemId,
                            user=self.admin)
        resp = self.request(method='DELETE',
                            path='/large_image/tiles/incomplete',
                            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 0)
        self.assertIn('could not be canceled', results['message'])
        events.unbind('jobs.cancel', 'testDeleteIncompleteTile')
        # Now we should be able to cancel the job
        resp = self.request(method='DELETE',
                            path='/large_image/tiles/incomplete',
                            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 1)
Esempio n. 19
0
    def testFilesystemAssetstoreUpload(self):
        self._testUpload()
        # Test that a delete during an upload still results in one file
        adapter = assetstore_utilities.getAssetstoreAdapter(self.assetstore)
        size = 101
        data = six.BytesIO(b' ' * size)
        files = []
        files.append(Upload().uploadFromFile(
            data, size, 'progress', parentType='folder', parent=self.folder,
            assetstore=self.assetstore))
        fullPath0 = adapter.fullPath(files[0])
        conditionRemoveDone = threading.Condition()
        conditionInEvent = threading.Condition()

        def waitForCondition(*args, **kwargs):
            # Single that we are in the event and then wait to be told that
            # the delete has occured before returning.
            with conditionInEvent:
                conditionInEvent.notify()
            with conditionRemoveDone:
                conditionRemoveDone.wait()

        def uploadFileWithWait():
            size = 101
            data = six.BytesIO(b' ' * size)
            files.append(Upload().uploadFromFile(
                data, size, 'progress', parentType='folder', parent=self.folder,
                assetstore=self.assetstore))

        events.bind('model.file.finalizeUpload.before', 'waitForCondition',
                    waitForCondition)
        # We create an upload that is bound to an event that waits during the
        # finalizeUpload.before event so that the remove will be executed
        # during this time.
        with conditionInEvent:
            t = threading.Thread(target=uploadFileWithWait)
            t.start()
            conditionInEvent.wait()
        self.assertTrue(os.path.exists(fullPath0))
        File().remove(files[0])
        # We shouldn't actually remove the file here
        self.assertTrue(os.path.exists(fullPath0))
        with conditionRemoveDone:
            conditionRemoveDone.notify()
        t.join()

        events.unbind('model.file.finalizeUpload.before', 'waitForCondition')
        fullPath1 = adapter.fullPath(files[0])
        self.assertEqual(fullPath0, fullPath1)
        self.assertTrue(os.path.exists(fullPath1))
Esempio n. 20
0
 def unbindModels(self, resources={}):
     """Unbind any models that were bound and aren't listed as needed.
     :param resources: resources that shouldn't be unbound."""
     # iterate over a list so that we can change the dictionary as we use it
     for oldresource in list(six.viewkeys(self.boundResources)):
         if oldresource not in resources:
             # Unbind this and remove it from the api
             events.unbind('model.{}.save'.format(oldresource), 'provenance')
             events.unbind('model.{}.copy.prepare'.format(oldresource),
                           'provenance')
             if hasattr(self.loadInfo['apiRoot'], oldresource):
                 getattr(self.loadInfo['apiRoot'], oldresource).removeRoute(
                     'GET', (':id', 'provenance'),
                     self.getGetHandler(oldresource))
             del self.boundResources[oldresource]
Esempio n. 21
0
 def unbindModels(self, resources={}):
     """Unbind any models that were bound and aren't listed as needed.
     :param resources: resources that shouldn't be unbound."""
     # iterate over a list so that we can change the dictionary as we use it
     for oldresource in list(six.viewkeys(self.boundResources)):
         if oldresource not in resources:
             # Unbind this and remove it from the api
             events.unbind('model.%s.save' % oldresource, 'provenance')
             events.unbind('model.%s.copy.prepare' % oldresource,
                           'provenance')
             if hasattr(self.loadInfo['apiRoot'], oldresource):
                 getattr(self.loadInfo['apiRoot'], oldresource).removeRoute(
                     'GET', (':id', 'provenance'),
                     self.getGetHandler(oldresource))
             del self.boundResources[oldresource]
    def testDeleteIncompleteTile(self):
        # Test the large_image/settings end point
        resp = self.request(
            method='DELETE', path='/large_image/tiles/incomplete',
            user=self.user)
        self.assertStatus(resp, 403)
        resp = self.request(
            method='DELETE', path='/large_image/tiles/incomplete',
            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 0)

        file = self._uploadFile(os.path.join(
            os.path.dirname(__file__), 'test_files', 'yb10kx5k.png'))
        itemId = str(file['itemId'])
        # Use a simulated cherrypy request, as it won't complete properly
        resp = self.request(
            method='POST', path='/item/%s/tiles' % itemId, user=self.admin)
        resp = self.request(
            method='DELETE', path='/large_image/tiles/incomplete',
            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 1)

        def preventCancel(evt):
            evt.preventDefault()

        # Prevent a job from cancelling
        events.bind('jobs.cancel', 'testDeleteIncompleteTile', preventCancel)
        resp = self.request(
            method='POST', path='/item/%s/tiles' % itemId, user=self.admin)
        resp = self.request(
            method='DELETE', path='/large_image/tiles/incomplete',
            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 0)
        self.assertIn('could not be canceled', results['message'])
        events.unbind('jobs.cancel', 'testDeleteIncompleteTile')
        # Now we should be able to cancel the job
        resp = self.request(
            method='DELETE', path='/large_image/tiles/incomplete',
            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 1)
Esempio n. 23
0
 def unbindModels(self, resources={}):
     """
     Unbind any models that were bound and aren't listed as needed.
     :param resources: resources that shouldn't be unbound.
     """
     # iterate over a list so that we can change the dictionary as we use it
     for oldresource in list(six.viewkeys(self.boundResources)):
         if oldresource not in resources:
             # Unbind this and remove it from the api
             events.unbind("model.%s.save" % oldresource, "provenance")
             events.unbind("model.%s.copy.prepare" % oldresource, "provenance")
             if hasattr(self.loadInfo["apiRoot"], oldresource):
                 getattr(self.loadInfo["apiRoot"], oldresource).removeRoute(
                     "GET", (":id", "provenance"), self.getGetHandler(oldresource)
                 )
             del self.boundResources[oldresource]
Esempio n. 24
0
    def testUploadWithInitialChunk(self):
        """
        Upload a file to the server and several partial files.  Test that we
        can delete a partial upload but not a completed upload. Test that we
        can delete partial uploads that are older than a certain date.
        """
        self._uploadFileWithInitialChunk('upload1')
        self._uploadFileWithInitialChunk('upload2', oneChunk=True)
        # test uploading large files
        self._uploadFileWithInitialChunk('upload3', largeFile=True)
        partialUploads = []
        for largeFile in (False, True):
            for partial in range(1, 3):
                partialUploads.append(
                    self._uploadFileWithInitialChunk(
                        'partial_upload_%d_%s' % (partial, str(largeFile)),
                        partial, largeFile))
        # check that a user cannot list partial uploads
        resp = self.request(path='/system/uploads',
                            method='GET',
                            user=self.user)
        self.assertStatus(resp, 403)
        # The admin user should see all of the partial uploads, but not the
        # complete upload
        resp = self.request(path='/system/uploads',
                            method='GET',
                            user=self.admin)
        self.assertStatusOk(resp)
        foundUploads = resp.json
        self.assertEqual(len(foundUploads), len(partialUploads))
        # Check that the upload model is saved when we are using one chunk
        self._uploadWasSaved = 0

        def trackUploads(*args, **kwargs):
            self._uploadWasSaved += 1

        events.bind('model.upload.save', 'uploadWithInitialChunk',
                    trackUploads)
        self._uploadFileWithInitialChunk('upload4', oneChunk=True)
        # This can be changed to assertEqual if one chunk uploads aren't saved
        self.assertGreater(self._uploadWasSaved, 0)
        self._uploadWasSaved = 0
        # But that it is saved when using multiple chunks
        self._uploadFileWithInitialChunk('upload5')
        self.assertGreater(self._uploadWasSaved, 0)
        events.unbind('model.upload.save', 'uploadWithInitialChunk')
Esempio n. 25
0
    def testSynchronousEvents(self):
        name, failname = '_test.event', '_test.failure'
        handlerName = '_test.handler'
        with events.bound(name, handlerName, self._increment), \
                events.bound(failname, handlerName, self._raiseException):
            # Make sure our exception propagates out of the handler
            try:
                events.trigger(failname)
                self.assertTrue(False)
            except Exception as e:
                self.assertEqual(e.args[0], 'Failure condition')

            # Bind an event to increment the counter
            self.assertEqual(self.ctr, 0)
            event = events.trigger(name, {'amount': 2})
            self.assertEqual(self.ctr, 2)
            self.assertTrue(event.propagate)
            self.assertFalse(event.defaultPrevented)
            self.assertEqual(event.responses, [])

            # The event should still be bound here if another handler unbinds
            events.unbind(name, 'not the handler name')
            events.trigger(name, {'amount': 2})
            self.assertEqual(self.ctr, 4)

        # Actually unbind the event, by going out of scope of "bound"
        events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 4)

        # Bind an event that prevents the default action and passes a response
        with events.bound(name, handlerName, self._eatEvent), \
                events.bound(name, 'other handler name',
                             self._shouldNotBeCalled):
            event = events.trigger(name)
            self.assertTrue(event.defaultPrevented)
            self.assertFalse(event.propagate)
            self.assertEqual(event.responses, [{'foo': 'bar'}])

        # Test that the context manager unbinds after an unhandled exception
        try:
            with events.bound(failname, handlerName, self._raiseException):
                events.trigger(failname)
        except Exception:
            # The event should should be unbound at this point
            events.trigger(failname)
Esempio n. 26
0
    def testSynchronousEvents(self):
        name, failname = '_test.event', '_test.failure'
        handlerName = '_test.handler'
        with events.bound(name, handlerName, self._increment), \
                events.bound(failname, handlerName, self._raiseException):
            # Make sure our exception propagates out of the handler
            try:
                events.trigger(failname)
                self.assertTrue(False)
            except Exception as e:
                self.assertEqual(e.args[0], 'Failure condition')

            # Bind an event to increment the counter
            self.assertEqual(self.ctr, 0)
            event = events.trigger(name, {'amount': 2})
            self.assertEqual(self.ctr, 2)
            self.assertTrue(event.propagate)
            self.assertFalse(event.defaultPrevented)
            self.assertEqual(event.responses, [])

            # The event should still be bound here if another handler unbinds
            events.unbind(name, 'not the handler name')
            events.trigger(name, {'amount': 2})
            self.assertEqual(self.ctr, 4)

        # Actually unbind the event, by going out of scope of "bound"
        events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 4)

        # Bind an event that prevents the default action and passes a response
        with events.bound(name, handlerName, self._eatEvent), \
                events.bound(name, 'other handler name',
                             self._shouldNotBeCalled):
            event = events.trigger(name)
            self.assertTrue(event.defaultPrevented)
            self.assertFalse(event.propagate)
            self.assertEqual(event.responses, [{'foo': 'bar'}])

        # Test that the context manager unbinds after an unhandled exception
        try:
            with events.bound(failname, handlerName, self._raiseException):
                events.trigger(failname)
        except Exception:
            # The event should should be unbound at this point
            events.trigger(failname)
def waitForProcessing(server, hachoirLogger):
    counter = {
        'count': 0
    }

    def postUpload(event):
        counter['count'] += 1

    def wait(count):
        startTime = time.time()
        while time.time()-startTime < 1:
            if counter['count'] >= count:
                break
            time.sleep(0.1)
        assert counter['count'] >= count

    events.bind('data.process', 'metadata_extractor_test', postUpload)
    yield wait
    events.unbind('data.process', 'metadata_extractor_test')
Esempio n. 28
0
def testSynchronousEvents(eventsHelper):
    name, failname = '_test.event', '_test.failure'
    handlerName = '_test.handler'
    with events.bound(name, handlerName, eventsHelper._increment), \
            events.bound(failname, handlerName, eventsHelper._raiseException):
        # Make sure our exception propagates out of the handler
        with pytest.raises(Exception, match='^Failure condition$'):
            events.trigger(failname)

        # Bind an event to increment the counter
        assert eventsHelper.ctr == 0
        event = events.trigger(name, {'amount': 2})
        assert eventsHelper.ctr == 2
        assert event.propagate
        assert not event.defaultPrevented
        assert event.responses == []

        # The event should still be bound here if another handler unbinds
        events.unbind(name, 'not the handler name')
        events.trigger(name, {'amount': 2})
        assert eventsHelper.ctr == 4

    # Actually unbind the event, by going out of scope of "bound"
    events.trigger(name, {'amount': 2})
    assert eventsHelper.ctr == 4

    # Bind an event that prevents the default action and passes a response
    with events.bound(name, handlerName, eventsHelper._eatEvent), \
            events.bound(name, 'other handler name',
                         eventsHelper._shouldNotBeCalled):
        event = events.trigger(name)
        assert event.defaultPrevented
        assert not event.propagate
        assert event.responses == [{'foo': 'bar'}]

    # Test that the context manager unbinds after an unhandled exception
    try:
        with events.bound(failname, handlerName, eventsHelper._raiseException):
            events.trigger(failname)
    except Exception:
        # The event should should be unbound at this point
        events.trigger(failname)
Esempio n. 29
0
def testSynchronousEvents(eventsHelper):
    name, failname = '_test.event', '_test.failure'
    handlerName = '_test.handler'
    with events.bound(name, handlerName, eventsHelper._increment), \
            events.bound(failname, handlerName, eventsHelper._raiseException):
        # Make sure our exception propagates out of the handler
        with pytest.raises(Exception, match='^Failure condition$'):
            events.trigger(failname)

        # Bind an event to increment the counter
        assert eventsHelper.ctr == 0
        event = events.trigger(name, {'amount': 2})
        assert eventsHelper.ctr == 2
        assert event.propagate
        assert not event.defaultPrevented
        assert event.responses == []

        # The event should still be bound here if another handler unbinds
        events.unbind(name, 'not the handler name')
        events.trigger(name, {'amount': 2})
        assert eventsHelper.ctr == 4

    # Actually unbind the event, by going out of scope of "bound"
    events.trigger(name, {'amount': 2})
    assert eventsHelper.ctr == 4

    # Bind an event that prevents the default action and passes a response
    with events.bound(name, handlerName, eventsHelper._eatEvent), \
            events.bound(name, 'other handler name',
                         eventsHelper._shouldNotBeCalled):
        event = events.trigger(name)
        assert event.defaultPrevented
        assert not event.propagate
        assert event.responses == [{'foo': 'bar'}]

    # Test that the context manager unbinds after an unhandled exception
    try:
        with events.bound(failname, handlerName, eventsHelper._raiseException):
            events.trigger(failname)
    except Exception:
        # The event should should be unbound at this point
        events.trigger(failname)
Esempio n. 30
0
    def testUploadWithInitialChunk(self):
        """
        Upload a file to the server and several partial files.  Test that we
        can delete a partial upload but not a completed upload. Test that we
        can delete partial uploads that are older than a certain date.
        """
        self._uploadFileWithInitialChunk('upload1')
        self._uploadFileWithInitialChunk('upload2', oneChunk=True)
        # test uploading large files
        self._uploadFileWithInitialChunk('upload3', largeFile=True)
        partialUploads = []
        for largeFile in (False, True):
            for partial in range(1, 3):
                partialUploads.append(self._uploadFileWithInitialChunk(
                    'partial_upload_%d_%s' % (partial, str(largeFile)),
                    partial, largeFile))
        # check that a user cannot list partial uploads
        resp = self.request(path='/system/uploads', method='GET',
                            user=self.user)
        self.assertStatus(resp, 403)
        # The admin user should see all of the partial uploads, but not the
        # complete upload
        resp = self.request(path='/system/uploads', method='GET',
                            user=self.admin)
        self.assertStatusOk(resp)
        foundUploads = resp.json
        self.assertEqual(len(foundUploads), len(partialUploads))
        # Check that the upload model is saved when we are using one chunk
        self._uploadWasSaved = 0

        def trackUploads(*args, **kwargs):
            self._uploadWasSaved += 1

        events.bind('model.upload.save', 'uploadWithInitialChunk', trackUploads)
        self._uploadFileWithInitialChunk('upload4', oneChunk=True)
        # This can be changed to assertEqual if one chunk uploads aren't saved
        self.assertGreater(self._uploadWasSaved, 0)
        self._uploadWasSaved = 0
        # But that it is saved when using multiple chunks
        self._uploadFileWithInitialChunk('upload5')
        self.assertGreater(self._uploadWasSaved, 0)
        events.unbind('model.upload.save', 'uploadWithInitialChunk')
Esempio n. 31
0
    def callback(event):
        job = event.info['job']

        if job['kwargs'].get('fileId') != str(file_id):
            return

        SUCCESS = JobStatus.SUCCESS
        ERROR = JobStatus.ERROR
        CANCELED = JobStatus.CANCELED

        if job['status'] == SUCCESS:
            item_id = job['kwargs']['attachToId']
            item = Item().load(item_id, user=user, level=AccessType.READ)
            thumbnails = item.get("_thumbnails", [])

            if len(thumbnails) > 0:
                thumbnail_id = thumbnails.pop()

                # remove old thumbnails
                if len(thumbnails) > 0:
                    Item().update({'_id': item_id},
                                  {'$set': {
                                      '_thumbnails': [thumbnail_id]
                                  }})
                    for thumb_id in thumbnails:
                        file = File().load(thumb_id,
                                           user=user,
                                           level=AccessType.WRITE)
                        File().remove(file)

                query = {'_id': model_id}
                updates = {}
                updates.setdefault('$set', {})[prop_name] = thumbnail_id
                update_result = super(Base, self).update(query, updates)
                if update_result.matched_count == 0:
                    raise ValidationException('Invalid id (%s)' % model_id)

        if job['status'] in [SUCCESS, ERROR, CANCELED]:
            events.unbind('jobs.job.update.after', str(file_id))

        return
Esempio n. 32
0
def load(info):
    info['apiRoot'].wholetale = wholeTale()
    info['apiRoot'].instance = Instance()
    info['apiRoot'].tale = Tale()

    from girder.plugins.wholetale.models.tale import Tale as TaleModel
    from girder.plugins.wholetale.models.tale import _currentTaleFormat
    q = {
        '$or': [{
            'format': {
                '$exists': False
            }
        }, {
            'format': {
                '$lt': _currentTaleFormat
            }
        }]
    }
    for obj in TaleModel().find(q):
        TaleModel().save(obj, validate=True)

    info['apiRoot'].recipe = Recipe()
    info['apiRoot'].dataset = Dataset()
    image = Image()
    info['apiRoot'].image = image
    events.bind('jobs.job.update.after', 'wholetale', image.updateImageStatus)
    events.unbind('model.user.save.created',
                  CoreEventHandler.USER_DEFAULT_FOLDERS)
    events.bind('model.user.save.created', 'wholetale', addDefaultFolders)
    info['apiRoot'].repository = Repository()
    info['apiRoot'].folder.route('GET', ('registered', ), listImportedData)
    info['apiRoot'].folder.route('GET', (':id', 'listing'), listFolder)
    info['apiRoot'].folder.route('GET', (':id', 'dataset'), getDataSet)
    info['apiRoot'].item.route('GET', (':id', 'listing'), listItem)
    info['apiRoot'].resource.route('GET', (), listResources)

    info['apiRoot'].user.route('PUT', ('settings', ), setUserMetadata)
    info['apiRoot'].user.route('GET', ('settings', ), getUserMetadata)
    ModelImporter.model('user').exposeFields(level=AccessType.WRITE,
                                             fields=('meta', ))
Esempio n. 33
0
    def testSynchronousEvents(self):
        name, failname = '_test.event', '_test.failure'
        handlerName = '_test.handler'
        events.bind(name, handlerName, self._increment)
        events.bind(failname, handlerName, self._raiseException)

        # Make sure our exception propagates out of the handler
        try:
            events.trigger(failname)
            self.assertTrue(False)
        except Exception as e:
            self.assertEqual(e.args[0], 'Failure condition')

        # Bind an event to increment the counter
        self.assertEqual(self.ctr, 0)
        event = events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 2)
        self.assertTrue(event.propagate)
        self.assertFalse(event.defaultPrevented)
        self.assertEqual(event.responses, [])

        # The event should still be bound here if a different handler unbinds
        events.unbind(name, 'not the handler name')
        events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 4)

        # Actually unbind the event, it show now no longer execute
        events.unbind(name, handlerName)
        events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 4)

        # Bind an event that prevents the default action and passes a response
        events.bind(name, handlerName, self._eatEvent)
        events.bind(name, 'other handler name', self._shouldNotBeCalled)
        event = events.trigger(name)
        self.assertTrue(event.defaultPrevented)
        self.assertFalse(event.propagate)
        self.assertEqual(event.responses, [{'foo': 'bar'}])
Esempio n. 34
0
    def testSynchronousEvents(self):
        name, failname = '_test.event', '_test.failure'
        handlerName = '_test.handler'
        events.bind(name, handlerName, self._increment)
        events.bind(failname, handlerName, self._raiseException)

        # Make sure our exception propagates out of the handler
        try:
            events.trigger(failname)
            self.assertTrue(False)
        except Exception as e:
            self.assertEqual(e.message, 'Failure condition')

        # Bind an event to increment the counter
        self.assertEqual(self.ctr, 0)
        event = events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 2)
        self.assertTrue(event.propagate)
        self.assertFalse(event.defaultPrevented)
        self.assertEqual(event.responses, [])

        # The event should still be bound here if a different handler unbinds
        events.unbind(name, 'not the handler name')
        events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 4)

        # Actually unbind the event, it show now no longer execute
        events.unbind(name, handlerName)
        events.trigger(name, {'amount': 2})
        self.assertEqual(self.ctr, 4)

        # Bind an event that prevents the default action and passes a response
        events.bind(name, handlerName, self._eatEvent)
        events.bind(name, 'other handler name', self._shouldNotBeCalled)
        event = events.trigger(name)
        self.assertTrue(event.defaultPrevented)
        self.assertFalse(event.propagate)
        self.assertEqual(event.responses, [{'foo': 'bar'}])
Esempio n. 35
0
def load(info):
    info['apiRoot'].wholetale = wholeTale()
    info['apiRoot'].instance = Instance()
    info['apiRoot'].tale = Tale()
    info['apiRoot'].recipe = Recipe()
    info['apiRoot'].dataset = Dataset()
    image = Image()
    info['apiRoot'].image = image
    events.bind('jobs.job.update.after', 'wholetale', image.updateImageStatus)
    events.unbind('model.user.save.created',
                  CoreEventHandler.USER_DEFAULT_FOLDERS)
    events.bind('model.user.save.created', 'wholetale', addDefaultFolders)
    info['apiRoot'].repository = Repository()
    info['apiRoot'].folder.route('GET', ('registered', ), listImportedData)
    info['apiRoot'].folder.route('GET', (':id', 'listing'), listFolder)
    info['apiRoot'].folder.route('GET', (':id', 'dataset'), getDataSet)
    info['apiRoot'].item.route('GET', (':id', 'listing'), listItem)
    info['apiRoot'].resource.route('GET', (), listResources)

    info['apiRoot'].user.route('PUT', ('settings', ), setUserMetadata)
    info['apiRoot'].user.route('GET', ('settings', ), getUserMetadata)
    ModelImporter.model('user').exposeFields(level=AccessType.WRITE,
                                             fields=('meta', ))
Esempio n. 36
0
def unbindGirderEventsByHandlerName(handlerName):
    for eventName in events._mapping:
        events.unbind(eventName, handlerName)
Esempio n. 37
0
    def testMoveBetweenAssetstores(self):
        folder = six.next(self.model('folder').childFolders(
            self.admin, parentType='user', force=True, filters={
                'name': 'Public'
            }))

        resp = self.request(path='/assetstore', method='GET', user=self.admin)
        self.assertStatusOk(resp)
        fs_assetstore = resp.json[0]

        # Clear any old DB data
        base.dropGridFSDatabase('girder_test_assetstore_move_assetstore')
        params = {
            'name': 'New Name',
            'type': AssetstoreType.GRIDFS,
            'db': 'girder_test_assetstore_move_assetstore'
        }
        resp = self.request(path='/assetstore', method='POST', user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        gridfs_assetstore = resp.json

        # Upload a file - it should go to the fs assetstore
        uploadData = 'helloworld'
        params = {
            'parentType': 'folder',
            'parentId': folder['_id'],
            'name': 'sample1',
            'size': len(uploadData),
            'mimeType': 'text/plain'
        }
        resp = self.request(
            path='/file', method='POST', user=self.admin, params=params)
        self.assertStatusOk(resp)
        upload = resp.json
        fields = [('offset', 0), ('uploadId', upload['_id'])]
        files = [('chunk', 'helloWorld.txt', uploadData)]
        resp = self.multipartRequest(
            path='/file/chunk', user=self.admin, fields=fields, files=files)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles = [resp.json]

        # Upload it again targetting a different assetstore
        params['assetstoreId'] = gridfs_assetstore['_id']
        resp = self.request(
            path='/file', method='POST', user=self.admin, params=params)
        self.assertStatusOk(resp)
        upload = resp.json
        fields = [('offset', 0), ('uploadId', upload['_id'])]
        files = [('chunk', 'helloWorld.txt', uploadData)]
        resp = self.multipartRequest(
            path='/file/chunk', user=self.admin, fields=fields, files=files)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles.append(resp.json)

        # Replace the first file, directing the replacement to a different
        # assetstore
        replaceParams = {
            'size': len(uploadData),
            'assetstoreId': gridfs_assetstore['_id'],
        }
        resp = self.request(
            path='/file/%s/contents' % uploadedFiles[0]['_id'], method='PUT',
            user=self.admin, params=replaceParams)
        self.assertStatusOk(resp)
        upload = resp.json
        fields = [('offset', 0), ('uploadId', upload['_id'])]
        resp = self.multipartRequest(
            path='/file/chunk', user=self.admin, fields=fields, files=files)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Move a file from the gridfs assetstore to the filesystem assetstore
        resp = self.request(
            path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT',
            user=self.admin, params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Doing it again shouldn't change it.
        resp = self.request(
            path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT',
            user=self.admin, params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # We should be able to move it back
        resp = self.request(
            path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT',
            user=self.admin, params={'assetstoreId': gridfs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Test moving a file of zero length
        params['size'] = 0
        resp = self.request(
            path='/file', method='POST', user=self.admin, params=params)
        self.assertStatusOk(resp)
        uploadedFiles.append(resp.json)

        resp = self.request(
            path='/file/%s/move' % uploadedFiles[2]['_id'], method='PUT',
            user=self.admin, params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[2] = resp.json

        # Test preventing the move via an event
        def stopMove(event):
            event.preventDefault()

        events.bind('model.upload.movefile', 'assetstore_test', stopMove)
        try:
            resp = self.request(
                path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT',
                user=self.admin, params={'assetstoreId': fs_assetstore['_id']},
                isJson=False)
            self.assertFalse('Move should have been prevented')
        except AssertionError as exc:
            self.assertIn('could not be moved to assetstore', str(exc))
        events.unbind('model.upload.movefile', 'assetstore_test')

        # Test files big enough to be multi-chunk
        chunkSize = self.model('upload')._getChunkSize()
        data = six.BytesIO(b' ' * chunkSize * 2)
        uploadedFiles.append(self.model('upload').uploadFromFile(
            data, chunkSize * 2, 'sample', parentType='folder',
            parent=folder, assetstore=fs_assetstore))
        resp = self.request(
            path='/file/%s/move' % uploadedFiles[3]['_id'], method='PUT',
            user=self.admin, params={'assetstoreId': gridfs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[3] = resp.json
def load(info):
    info['apiRoot'].wholetale = wholeTale()
    info['apiRoot'].instance = Instance()

    tale = Tale()
    info['apiRoot'].tale = tale

    from girder.plugins.wholetale.models.tale import Tale as TaleModel
    from girder.plugins.wholetale.models.tale import _currentTaleFormat
    q = {
        '$or': [{
            'format': {
                '$exists': False
            }
        }, {
            'format': {
                '$lt': _currentTaleFormat
            }
        }]
    }
    for obj in TaleModel().find(q):
        try:
            TaleModel().save(obj, validate=True)
        except GirderException as exc:
            logprint(exc)

    info['apiRoot'].dataset = Dataset()
    info['apiRoot'].image = Image()
    events.bind('jobs.job.update.after', 'wholetale', tale.updateBuildStatus)
    events.bind('jobs.job.update.after', 'wholetale', finalizeInstance)
    events.bind('jobs.job.update.after', 'wholetale', updateNotification)
    events.bind('model.file.validate', 'wholetale', validateFileLink)
    events.unbind('model.user.save.created',
                  CoreEventHandler.USER_DEFAULT_FOLDERS)
    events.bind('model.user.save.created', 'wholetale', addDefaultFolders)
    events.bind('model.file.save', 'wholetale', tale.updateWorkspaceModTime)
    events.bind('model.file.save.created', 'wholetale',
                tale.updateWorkspaceModTime)
    events.bind('model.file.remove', 'wholetale', tale.updateWorkspaceModTime)
    events.bind('oauth.auth_callback.after', 'wholetale',
                store_other_globus_tokens)
    info['apiRoot'].account = Account()
    info['apiRoot'].repository = Repository()
    info['apiRoot'].license = License()
    info['apiRoot'].integration = Integration()
    info['apiRoot'].workspace = Workspace()
    info['apiRoot'].folder.route('GET', ('registered', ), listImportedData)
    info['apiRoot'].folder.route('GET', (':id', 'listing'), listFolder)
    info['apiRoot'].folder.route('GET', (':id', 'dataset'), getDataSet)
    info['apiRoot'].job.route('GET', (':id', 'result'), getJobResult)
    info['apiRoot'].item.route('GET', (':id', 'listing'), listItem)
    info['apiRoot'].resource.route('GET', (), listResources)

    info['apiRoot'].user.route('PUT', ('settings', ), setUserMetadata)
    info['apiRoot'].user.route('GET', ('settings', ), getUserMetadata)
    ModelImporter.model('user').exposeFields(level=AccessType.WRITE,
                                             fields=('meta', 'myData'))
    ModelImporter.model('user').exposeFields(level=AccessType.ADMIN,
                                             fields=('otherTokens', ))

    path_to_assets = os.path.join(
        os.path.dirname(os.path.dirname(__file__)),
        "web_client/extra/img",
    )
    for ext_provider in SettingDefault.defaults[
            PluginSettings.EXTERNAL_AUTH_PROVIDERS]:
        logo_path = os.path.join(path_to_assets,
                                 ext_provider["name"] + '_logo.jpg')
        print(logo_path)
        if os.path.isfile(logo_path):
            with open(logo_path, "rb") as image_file:
                ext_provider["logo"] = base64.b64encode(
                    image_file.read()).decode()
Esempio n. 39
0
 def __exit__(self, *args):
     events.unbind(self.eventName, self.handlerName)
Esempio n. 40
0
    def testDeleteIncompleteTile(self):
        from girder.plugins.jobs.constants import JobStatus
        from girder.plugins.jobs.models.job import Job
        from girder.plugins.worker import CustomJobStatus

        # Test the large_image/settings end point
        resp = self.request(method='DELETE',
                            path='/large_image/tiles/incomplete',
                            user=self.user)
        self.assertStatus(resp, 403)
        resp = self.request(method='DELETE',
                            path='/large_image/tiles/incomplete',
                            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 0)

        file = self._uploadFile(
            os.path.join(os.path.dirname(__file__), 'test_files',
                         'yb10kx5k.png'))
        itemId = str(file['itemId'])
        # Use a simulated cherrypy request, as it won't complete properly
        resp = self.request(method='POST',
                            path='/item/%s/tiles' % itemId,
                            user=self.admin)
        resp = self.request(method='DELETE',
                            path='/large_image/tiles/incomplete',
                            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 1)

        def preventCancel(evt):
            job = evt.info['job']
            params = evt.info['params']
            if (params.get('status') and params.get('status') != job['status']
                    and params['status']
                    in (JobStatus.CANCELED, CustomJobStatus.CANCELING)):
                evt.preventDefault()

        # Prevent a job from cancelling
        events.bind('jobs.job.update', 'testDeleteIncompleteTile',
                    preventCancel)
        # Create a job and mark it as running
        resp = self.request(method='POST',
                            path='/item/%s/tiles' % itemId,
                            user=self.admin)
        job = Job().load(id=resp.json['_id'], force=True)
        Job().updateJob(job, status=JobStatus.RUNNING)

        resp = self.request(method='DELETE',
                            path='/large_image/tiles/incomplete',
                            user=self.admin)
        events.unbind('jobs.job.update', 'testDeleteIncompleteTile')
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 0)
        self.assertIn('could not be canceled', results['message'])
        # Now we should be able to cancel the job
        resp = self.request(method='DELETE',
                            path='/large_image/tiles/incomplete',
                            user=self.admin)
        self.assertStatusOk(resp)
        results = resp.json
        self.assertEqual(results['removed'], 1)
Esempio n. 41
0
    def testMoveBetweenAssetstores(self):
        folder = six.next(
            self.model('folder').childFolders(self.admin,
                                              parentType='user',
                                              force=True,
                                              filters={'name': 'Public'}))

        resp = self.request(path='/assetstore', method='GET', user=self.admin)
        self.assertStatusOk(resp)
        fs_assetstore = resp.json[0]

        # Clear any old DB data
        base.dropGridFSDatabase('girder_test_assetstore_move_assetstore')
        params = {
            'name': 'New Name',
            'type': AssetstoreType.GRIDFS,
            'db': 'girder_test_assetstore_move_assetstore'
        }
        resp = self.request(path='/assetstore',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        gridfs_assetstore = resp.json

        # Upload a file - it should go to the fs assetstore
        uploadData = 'helloworld'
        params = {
            'parentType': 'folder',
            'parentId': folder['_id'],
            'name': 'sample1',
            'size': len(uploadData),
            'mimeType': 'text/plain'
        }
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        upload = resp.json
        fields = [('offset', 0), ('uploadId', upload['_id'])]
        files = [('chunk', 'helloWorld.txt', uploadData)]
        resp = self.multipartRequest(path='/file/chunk',
                                     user=self.admin,
                                     fields=fields,
                                     files=files)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles = [resp.json]

        # Upload it again targetting a different assetstore
        params['assetstoreId'] = gridfs_assetstore['_id']
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        upload = resp.json
        fields = [('offset', 0), ('uploadId', upload['_id'])]
        files = [('chunk', 'helloWorld.txt', uploadData)]
        resp = self.multipartRequest(path='/file/chunk',
                                     user=self.admin,
                                     fields=fields,
                                     files=files)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles.append(resp.json)

        # Replace the first file, directing the replacement to a different
        # assetstore
        replaceParams = {
            'size': len(uploadData),
            'assetstoreId': gridfs_assetstore['_id'],
        }
        resp = self.request(path='/file/%s/contents' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params=replaceParams)
        self.assertStatusOk(resp)
        upload = resp.json
        fields = [('offset', 0), ('uploadId', upload['_id'])]
        resp = self.multipartRequest(path='/file/chunk',
                                     user=self.admin,
                                     fields=fields,
                                     files=files)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Move a file from the gridfs assetstore to the filesystem assetstore
        resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Doing it again shouldn't change it.
        resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # We should be able to move it back
        resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': gridfs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Test moving a file of zero length
        params['size'] = 0
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        uploadedFiles.append(resp.json)

        resp = self.request(path='/file/%s/move' % uploadedFiles[2]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[2] = resp.json

        # Test preventing the move via an event
        def stopMove(event):
            event.preventDefault()

        events.bind('model.upload.movefile', 'assetstore_test', stopMove)
        try:
            resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                                method='PUT',
                                user=self.admin,
                                params={'assetstoreId': fs_assetstore['_id']},
                                isJson=False)
            self.assertFalse('Move should have been prevented')
        except AssertionError as exc:
            self.assertIn('could not be moved to assetstore', str(exc))
        events.unbind('model.upload.movefile', 'assetstore_test')

        # Test files big enough to be multi-chunk
        chunkSize = self.model('upload')._getChunkSize()
        data = six.BytesIO(b' ' * chunkSize * 2)
        uploadedFiles.append(
            self.model('upload').uploadFromFile(data,
                                                chunkSize * 2,
                                                'sample',
                                                parentType='folder',
                                                parent=folder,
                                                assetstore=fs_assetstore))
        resp = self.request(path='/file/%s/move' % uploadedFiles[3]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': gridfs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[3] = resp.json
Esempio n. 42
0
 def __exit__(self, *args):
     events.unbind(self.eventName, self.handlerName)
Esempio n. 43
0
    def createOrUpdateExtension(self, app_id, os, arch, baseName,
                                repository_type, repository_url, revision,
                                app_revision, packagetype, codebase,
                                description, release, icon_url,
                                development_status, category, enabled,
                                homepage, screenshots, contributors):
        """
        Upload an extension package in the database, in a specific release with providing
        ``release_id``. Or by default in the **'Nightly'** folder.

        :param app_id: The ID of the application.
        :param os: The operation system used for the extension.
        :param arch: The architecture compatible with the extension.
        :param baseName: The base name of the extension.
        :param repository_type: The type of repository (github, gitlab, ...).
        :param repository_url: The Url of the repository.
        :param revision: The revision of the extension.
        :param app_revision: The revision of the application.
        :param packagetype: Type of the extension.
        :param codebase: The codebase baseName.
        :param description: The description of the extension.
        :return: The status of the upload.
        """
        creator = self.getCurrentUser()
        application = self._model.load(app_id, user=creator)
        release_folder = None
        # Find the release by metadata revision
        releases = self._model.childFolders(application,
                                            'Folder',
                                            user=creator)
        for folder in releases:
            if 'meta' in folder:
                if folder['meta']['revision'] == app_revision:
                    release_folder = folder
                    break
        if not release_folder:
            # Only the nightly folder in the list
            release_folder = list(
                self._model.childFolders(
                    application,
                    'Folder',
                    user=creator,
                    filters={'name': constants.NIGHTLY_RELEASE_NAME}))
            if not release_folder:
                raise Exception('The %s folder not found.' %
                                constants.NIGHTLY_RELEASE_NAME)
            release_folder = release_folder[0]

        params = {
            'app_id': app_id,
            'baseName': baseName,
            'os': os,
            'arch': arch,
            'repository_type': repository_type,
            'repository_url': repository_url,
            'revision': revision,
            'app_revision': app_revision,
            'packagetype': packagetype,
            'codebase': codebase,
            'description': description
        }
        if release:
            params['release'] = release
        if icon_url:
            params['icon_url'] = icon_url
        if development_status:
            params['development_status'] = development_status
        if category:
            params['category'] = category
        if enabled:
            params['enabled'] = enabled
        if homepage:
            params['homepage'] = homepage
        if screenshots:
            params['screenshots'] = screenshots
        if contributors:
            params['contributors'] = contributors

        name = application['meta']['extensionNameTemplate'].format(**params)
        filters = {'name': name}
        # Only one extensions should be in this list
        extensions = list(ExtensionModel().get(release_folder,
                                               filters=filters))
        if not len(extensions):
            # The extension doesn't exist yet:
            extension = ExtensionModel().createExtension(
                name, creator, release_folder, params)
        elif len(extensions) == 1:
            extension = extensions[0]
        else:
            raise Exception(
                'Too many extensions found for the same name :"%s"' % name)

        # Check the file inside the extension Item
        files = Item().childFiles(extension)
        if files.count() == 1:
            old_file = files.next()
            # catch the event of upload success and remove the file
            events.bind('model.file.finalizeUpload.after', 'application',
                        File().remove(old_file))
        elif not files.count():
            # Extension new or empty
            pass
        else:
            raise Exception("More than 1 binary file in the extension.")

        old_meta = {
            'baseName': extension['meta']['baseName'],
            'os': extension['meta']['os'],
            'arch': extension['meta']['arch'],
            'revision': extension['meta']['revision'],
            'app_revision': extension['meta']['app_revision']
        }
        identifier_meta = {
            'baseName': baseName,
            'os': os,
            'arch': arch,
            'revision': revision,
            'app_revision': app_revision
        }
        if identifier_meta == old_meta and len(extensions):
            # The revision is the same than these before, no need to upload
            extension = ExtensionModel().setMetadata(extension, params)
            events.unbind('model.file.finalizeUpload.after', 'application')

        # Ready to upload the binary file
        return extension
Esempio n. 44
0
    def testMoveBetweenAssetstores(self):
        folder = six.next(self.model('folder').childFolders(
            self.admin, parentType='user', force=True, filters={
                'name': 'Public'
            }))

        resp = self.request(path='/assetstore', method='GET', user=self.admin)
        self.assertStatusOk(resp)
        fs_assetstore = resp.json[0]

        # Clear any old DB data
        base.dropGridFSDatabase('girder_test_assetstore_move_assetstore')
        params = {
            'name': 'New Name',
            'type': AssetstoreType.GRIDFS,
            'db': 'girder_test_assetstore_move_assetstore'
        }
        resp = self.request(path='/assetstore', method='POST', user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        gridfs_assetstore = resp.json

        # Upload a file - it should go to the fs assetstore
        uploadData = 'helloworld'
        params = {
            'parentType': 'folder',
            'parentId': folder['_id'],
            'name': 'sample1',
            'size': len(uploadData),
            'mimeType': 'text/plain'
        }
        resp = self.request(
            path='/file', method='POST', user=self.admin, params=params)
        self.assertStatusOk(resp)
        upload = resp.json
        fields = [('offset', 0), ('uploadId', upload['_id'])]
        files = [('chunk', 'helloWorld.txt', uploadData)]
        resp = self.multipartRequest(
            path='/file/chunk', user=self.admin, fields=fields, files=files)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles = [resp.json]

        # Upload it again targetting a different assetstore
        params['assetstoreId'] = gridfs_assetstore['_id']
        resp = self.request(
            path='/file', method='POST', user=self.admin, params=params)
        self.assertStatusOk(resp)
        upload = resp.json
        fields = [('offset', 0), ('uploadId', upload['_id'])]
        files = [('chunk', 'helloWorld.txt', uploadData)]
        resp = self.multipartRequest(
            path='/file/chunk', user=self.admin, fields=fields, files=files)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles.append(resp.json)

        # Replace the first file, directing the replacement to a different
        # assetstore
        replaceParams = {
            'size': len(uploadData),
            'assetstoreId': gridfs_assetstore['_id'],
        }
        resp = self.request(
            path='/file/%s/contents' % uploadedFiles[0]['_id'], method='PUT',
            user=self.admin, params=replaceParams)
        self.assertStatusOk(resp)
        upload = resp.json
        fields = [('offset', 0), ('uploadId', upload['_id'])]
        resp = self.multipartRequest(
            path='/file/chunk', user=self.admin, fields=fields, files=files)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Move a file from the gridfs assetstore to the filesystem assetstore
        resp = self.request(
            path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT',
            user=self.admin, params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Doing it again shouldn't change it.
        resp = self.request(
            path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT',
            user=self.admin, params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # We should be able to move it back
        resp = self.request(
            path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT',
            user=self.admin, params={'assetstoreId': gridfs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Test moving a file of zero length
        params['size'] = 0
        resp = self.request(
            path='/file', method='POST', user=self.admin, params=params)
        self.assertStatusOk(resp)
        uploadedFiles.append(resp.json)

        resp = self.request(
            path='/file/%s/move' % uploadedFiles[2]['_id'], method='PUT',
            user=self.admin, params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[2] = resp.json

        # Test preventing the move via an event
        def stopMove(event):
            event.preventDefault()

        events.bind('model.upload.movefile', 'assetstore_test', stopMove)
        try:
            resp = self.request(
                path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT',
                user=self.admin, params={'assetstoreId': fs_assetstore['_id']},
                isJson=False)
            self.assertFalse('Move should have been prevented')
        except AssertionError as exc:
            self.assertIn('could not be moved to assetstore', str(exc))
        events.unbind('model.upload.movefile', 'assetstore_test')

        # Test files big enough to be multi-chunk
        chunkSize = self.model('upload')._getChunkSize()
        data = six.BytesIO(b' ' * chunkSize * 2)
        uploadedFiles.append(self.model('upload').uploadFromFile(
            data, chunkSize * 2, 'sample', parentType='folder',
            parent=folder, assetstore=fs_assetstore))
        resp = self.request(
            path='/file/%s/move' % uploadedFiles[3]['_id'], method='PUT',
            user=self.admin, params={'assetstoreId': gridfs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[3] = resp.json

        # Test progress
        size = chunkSize * 2
        chunkSize = self.model('upload')._getChunkSize()
        data = six.BytesIO(b' ' * size)
        upload = self.model('upload').uploadFromFile(
            data, size, 'progress', parentType='folder',
            parent=folder, assetstore=fs_assetstore)
        params = {
            'assetstoreId': gridfs_assetstore['_id'],
            'progress': True
        }
        resp = self.request(
            path='/file/%s/move' % upload['_id'], method='PUT',
            user=self.admin, params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])

        resp = self.request(
            path='/notification/stream', method='GET', user=self.admin,
            isJson=False, params={'timeout': 1})
        messages = self.getSseMessages(resp)
        self.assertEqual(len(messages), 1)
        self.assertEqual(messages[0]['type'], 'progress')
        self.assertEqual(messages[0]['data']['current'], size)

        # Test moving imported file

        # Create assetstore to import file into
        params = {
            'name': 'ImportTest',
            'type': AssetstoreType.FILESYSTEM,
            'root': os.path.join(fs_assetstore['root'], 'import')
        }
        resp = self.request(path='/assetstore', method='POST', user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        import_assetstore = resp.json

        # Import file
        params = {
            'importPath': os.path.join(ROOT_DIR, 'tests', 'cases', 'py_client',
                                       'testdata', 'world.txt'),
            'destinationType': 'folder',
        }

        self.model('assetstore').importData(
            import_assetstore, parent=folder, parentType='folder', params=params,
            progress=ProgressContext(False), user=self.admin, leafFoldersAsItems=False)

        file = path_util.lookUpPath('/user/admin/Public/world.txt/world.txt',
                                    self.admin, False)['document']

        # Move file
        params = {
            'assetstoreId': fs_assetstore['_id'],
        }
        resp = self.request(
            path='/file/%s/move' % file['_id'], method='PUT',
            user=self.admin, params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])

        # Check that we can still download the file
        resp = self.request(
            path='/file/%s/download' % file['_id'], user=self.admin, isJson=False)
        self.assertStatusOk(resp)
Esempio n. 45
0
    def testMoveBetweenAssetstores(self):
        folder = six.next(Folder().childFolders(self.admin,
                                                parentType='user',
                                                force=True,
                                                filters={'name': 'Public'}))

        resp = self.request(path='/assetstore', method='GET', user=self.admin)
        self.assertStatusOk(resp)
        fs_assetstore = resp.json[0]

        # Clear any old DB data
        base.dropGridFSDatabase('girder_test_assetstore_move_assetstore')
        params = {
            'name': 'New Name',
            'type': AssetstoreType.GRIDFS,
            'db': 'girder_test_assetstore_move_assetstore'
        }
        resp = self.request(path='/assetstore',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        gridfs_assetstore = resp.json

        # Upload a file - it should go to the fs assetstore
        uploadData = 'helloworld'
        params = {
            'parentType': 'folder',
            'parentId': folder['_id'],
            'name': 'sample1',
            'size': len(uploadData),
            'mimeType': 'text/plain'
        }
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        upload = resp.json
        resp = self.request(path='/file/chunk',
                            method='POST',
                            user=self.admin,
                            body=uploadData,
                            params={'uploadId': upload['_id']},
                            type='text/plain')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles = [resp.json]

        # Upload it again targetting a different assetstore
        params['assetstoreId'] = gridfs_assetstore['_id']
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        upload = resp.json
        resp = self.request(path='/file/chunk',
                            method='POST',
                            user=self.admin,
                            body=uploadData,
                            params={'uploadId': upload['_id']},
                            type='text/plain')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles.append(resp.json)

        # Replace the first file, directing the replacement to a different
        # assetstore
        replaceParams = {
            'size': len(uploadData),
            'assetstoreId': gridfs_assetstore['_id'],
        }
        resp = self.request(path='/file/%s/contents' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params=replaceParams)
        self.assertStatusOk(resp)
        upload = resp.json

        resp = self.request(path='/file/chunk',
                            method='POST',
                            user=self.admin,
                            body=uploadData,
                            params={'uploadId': upload['_id']},
                            type='text/plain')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Move a file from the gridfs assetstore to the filesystem assetstore
        resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Doing it again shouldn't change it.
        resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # We should be able to move it back
        resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': gridfs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[0] = resp.json

        # Test moving a file of zero length
        params['size'] = 0
        resp = self.request(path='/file',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        uploadedFiles.append(resp.json)

        resp = self.request(path='/file/%s/move' % uploadedFiles[2]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': fs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])
        uploadedFiles[2] = resp.json

        # Test preventing the move via an event
        def stopMove(event):
            event.preventDefault()

        events.bind('model.upload.movefile', 'assetstore_test', stopMove)
        try:
            resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'],
                                method='PUT',
                                user=self.admin,
                                params={'assetstoreId': fs_assetstore['_id']},
                                isJson=False)
            self.assertFalse('Move should have been prevented')
        except AssertionError as exc:
            self.assertIn('could not be moved to assetstore', str(exc))
        events.unbind('model.upload.movefile', 'assetstore_test')

        # Test files big enough to be multi-chunk
        chunkSize = Upload()._getChunkSize()
        data = io.BytesIO(b' ' * chunkSize * 2)
        uploadedFiles.append(Upload().uploadFromFile(data,
                                                     chunkSize * 2,
                                                     'sample',
                                                     parentType='folder',
                                                     parent=folder,
                                                     assetstore=fs_assetstore))
        resp = self.request(path='/file/%s/move' % uploadedFiles[3]['_id'],
                            method='PUT',
                            user=self.admin,
                            params={'assetstoreId': gridfs_assetstore['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])
        uploadedFiles[3] = resp.json

        # Test progress
        size = chunkSize * 2
        data = io.BytesIO(b' ' * size)
        upload = Upload().uploadFromFile(data,
                                         size,
                                         'progress',
                                         parentType='folder',
                                         parent=folder,
                                         assetstore=fs_assetstore)
        params = {'assetstoreId': gridfs_assetstore['_id'], 'progress': True}
        resp = self.request(path='/file/%s/move' % upload['_id'],
                            method='PUT',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id'])

        resp = self.request(path='/notification/stream',
                            method='GET',
                            user=self.admin,
                            isJson=False,
                            params={'timeout': 1})
        messages = self.getSseMessages(resp)
        self.assertEqual(len(messages), 1)
        self.assertEqual(messages[0]['type'], 'progress')
        self.assertEqual(messages[0]['data']['current'], size)

        # Test moving imported file

        # Create assetstore to import file into
        params = {
            'name': 'ImportTest',
            'type': AssetstoreType.FILESYSTEM,
            'root': os.path.join(fs_assetstore['root'], 'import')
        }
        resp = self.request(path='/assetstore',
                            method='POST',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        import_assetstore = resp.json

        # Import file
        params = {
            'importPath':
            os.path.join(ROOT_DIR, 'tests', 'cases', 'py_client', 'testdata',
                         'world.txt'),
            'destinationType':
            'folder',
        }

        Assetstore().importData(import_assetstore,
                                parent=folder,
                                parentType='folder',
                                params=params,
                                progress=ProgressContext(False),
                                user=self.admin,
                                leafFoldersAsItems=False)

        file = path_util.lookUpPath('/user/admin/Public/world.txt/world.txt',
                                    self.admin)['document']

        # Move file
        params = {
            'assetstoreId': fs_assetstore['_id'],
        }
        resp = self.request(path='/file/%s/move' % file['_id'],
                            method='PUT',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id'])

        # Check that we can still download the file
        resp = self.request(path='/file/%s/download' % file['_id'],
                            user=self.admin,
                            isJson=False)
        self.assertStatusOk(resp)