def testTaleImportBinderFail(self):
        with mock.patch("girder.plugins.wholetale.lib.pids_to_entities") as mock_pids:
            mock_pids.side_effect = ValueError
            resp = self.request(
                path="/tale/import",
                method="POST",
                user=self.user,
                params={
                    "url": "http://use.yt/upload/ef4cd901",
                    "spawn": False,
                    "imageId": self.image["_id"],
                    "asTale": True,
                    "taleKwargs": json.dumps({"title": "tale should fail"}),
                },
            )
            self.assertStatusOk(resp)
            tale = resp.json

            job = Job().findOne({"type": "wholetale.import_binder"})
            self.assertEqual(
                json.loads(job["kwargs"])["taleId"]["$oid"], tale["_id"]
            )

            for i in range(300):
                if job["status"] in {JobStatus.SUCCESS, JobStatus.ERROR}:
                    break
                time.sleep(0.1)
                job = Job().load(job["_id"], force=True)
            self.assertEqual(job["status"], JobStatus.ERROR)
            Job().remove(job)
        tale = Tale().load(tale["_id"], force=True)
        self.assertEqual(tale["status"], TaleStatus.ERROR)
        Tale().remove(tale)
Example #2
0
def _onUpload(event):
    """
    Look at uploads containing references related to this plugin. If found,
    they are used to link item task outputs back to a job document.
    """
    try:
        ref = json.loads(event.info.get('reference', ''))
    except ValueError:
        return

    if isinstance(ref, dict) and ref.get('type') == 'item_tasks.output':
        jobModel = Job()
        tokenModel = Token()
        token = event.info['currentToken']

        if tokenModel.hasScope(token, 'item_tasks.job_write:%s' % ref['jobId']):
            job = jobModel.load(ref['jobId'], force=True, exc=True)
        else:
            job = jobModel.load(
                ref['jobId'], level=AccessType.WRITE, user=event.info['currentUser'], exc=True)

        file = event.info['file']
        item = Item().load(file['itemId'], force=True)

        # Add link to job model to the output item
        jobModel.updateJob(job, otherFields={
            'itemTaskBindings.outputs.%s.itemId' % ref['id']: item['_id']
        })

        # Also a link in the item to the job that created it
        item['createdByJob'] = job['_id']
        Item().save(item)
Example #3
0
def scheduleThumbnailJob(file,
                         attachToType,
                         attachToId,
                         user,
                         width=0,
                         height=0,
                         crop=True):
    """
    Schedule a local thumbnail creation job and return it.
    """
    job = Job().createLocalJob(title='Generate thumbnail for %s' %
                               file['name'],
                               user=user,
                               type='thumbnails.create',
                               public=False,
                               module='girder.plugins.thumbnails.worker',
                               kwargs={
                                   'fileId': str(file['_id']),
                                   'width': width,
                                   'height': height,
                                   'crop': crop,
                                   'attachToType': attachToType,
                                   'attachToId': str(attachToId)
                               })
    Job().scheduleJob(job)
    return job
Example #4
0
    def testWorkerStatusEndpoint(self):
        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        job = Job().createJob(title='title',
                              type='foo',
                              handler='worker_handler',
                              user=self.admin,
                              public=False,
                              args=(),
                              kwargs={})

        job['kwargs'] = {
            'jobInfo':
            utils.jobInfoSpec(job),
            'inputs':
            [utils.girderInputSpec(self.adminFolder, resourceType='folder')],
            'outputs':
            [utils.girderOutputSpec(self.adminFolder, token=self.adminToken)]
        }
        job = Job().save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            Job().scheduleJob(job)

        # Call the worker status endpoint
        resp = self.request('/worker/status', method='GET', user=self.admin)
        self.assertStatusOk(resp)
        self.assertHasKeys(resp.json,
                           ['report', 'stats', 'ping', 'active', 'reserved'])
Example #5
0
def schedule(event):
    """
    This is bound to the "jobs.schedule" event, and will be triggered any time
    a job is scheduled. This handler will process any job that has the
    handler field set to "worker_handler".
    """
    job = event.info
    if job['handler'] == 'worker_handler':
        task = job.get('celeryTaskName', 'girder_worker.run')

        # Set the job status to queued
        Job().updateJob(job, status=JobStatus.QUEUED)

        # Send the task to celery
        asyncResult = getCeleryApp().send_task(
            task, job['args'], job['kwargs'], queue=job.get('celeryQueue'), headers={
                'jobInfoSpec': jobInfoSpec(job, job.get('token', None)),
                'apiUrl': getWorkerApiUrl()
            })

        # Record the task ID from celery.
        Job().updateJob(job, otherFields={
            'celeryTaskId': asyncResult.task_id
        })

        # Stop event propagation since we have taken care of scheduling.
        event.stopPropagation()
Example #6
0
 def createThumbnails(self, params):
     self.requireParams(['spec'], params)
     try:
         spec = json.loads(params['spec'])
         if not isinstance(spec, list):
             raise ValueError()
     except ValueError:
         raise RestException('The spec parameter must be a JSON list.')
     maxThumbnailFiles = int(Setting().get(
         constants.PluginSettings.LARGE_IMAGE_MAX_THUMBNAIL_FILES))
     if maxThumbnailFiles <= 0:
         raise RestException('Thumbnail files are not enabled.')
     jobKwargs = {'spec': spec}
     if params.get('logInterval') is not None:
         jobKwargs['logInterval'] = float(params['logInterval'])
     job = Job().createLocalJob(
         module='girder.plugins.large_image.rest.large_image',
         function='createThumbnailsJob',
         kwargs=jobKwargs,
         title='Create large image thumbnail files.',
         type='large_image_create_thumbnails',
         user=self.getCurrentUser(),
         public=True,
         async=True,
     )
     Job().scheduleJob(job)
     return job
    def testCopyWorkspaceFail(self):
        tale = Tale().createTale(
            self.image,
            [],
            creator=self.admin,
            title="tale one",
            public=True,
            config={"memLimit": "2g"},
        )

        job = Job().createLocalJob(
            title='Copy "{title}" workspace'.format(**tale),
            user=self.user,
            type="wholetale.copy_workspace",
            public=False,
            async=True,
            module="girder.plugins.wholetale.tasks.copy_workspace",
            args=(tale["workspaceId"], "non_existing"),
            kwargs={"user": self.user, "tale": tale},
        )
        Job().scheduleJob(job)
        for i in range(300):
            if job["status"] in {JobStatus.SUCCESS, JobStatus.ERROR}:
                break
            time.sleep(0.1)
            job = Job().load(job["_id"], force=True)
        self.assertEqual(job["status"], JobStatus.ERROR)
        Job().remove(job)
        tale = Tale().load(tale["_id"], force=True)
        self.assertEqual(tale["status"], TaleStatus.ERROR)
        Tale().remove(tale)
    def testBuildFail(self):
        from girder.plugins.jobs.models.job import Job
        resp = self.request(path='/instance',
                            method='POST',
                            user=self.user,
                            params={
                                'taleId': str(self.tale_one['_id']),
                                'name': 'tale that will fail',
                                'spawn': False
                            })
        self.assertStatusOk(resp)
        instance = resp.json

        job = Job().createJob(title='Fake build job',
                              type='celery',
                              handler='worker_handler',
                              user=self.user,
                              public=False,
                              args=[str(self.tale_one['_id']), False],
                              kwargs={},
                              otherFields={
                                  'wt_notification_id': 'nonexisting',
                                  'instance_id': instance['_id']
                              })
        job = Job().save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)
        Job().updateJob(job, log='job queued', status=JobStatus.QUEUED)
        Job().updateJob(job, log='job running', status=JobStatus.RUNNING)
        Job().updateJob(job, log='job failed', status=JobStatus.ERROR)
        instance = Instance().load(instance['_id'], force=True)
        self.assertEqual(instance['status'], InstanceStatus.ERROR)
        Instance().remove(instance)
Example #9
0
 def _waitForJobToBeRunning(self, job):
     from girder.plugins.jobs.constants import JobStatus
     from girder.plugins.jobs.models.job import Job
     job = Job().load(id=job['_id'], force=True)
     while job['status'] != JobStatus.RUNNING:
         time.sleep(0.01)
         job = Job().load(id=job['_id'], force=True)
     return job
Example #10
0
def execGraph(yaml_graph, username):
    # Write YAML graph to a file
    #yaml_path = "graph.yaml";f = open(yaml_path,"w");f.write(yaml_graph);f.close()

    # TODO: How do we cache/lookup this execution again later? Hash the command?

    # Give our job a unique name
    job_name = username + "-" + str(
        datetime.datetime.now().strftime('%Y%m%d-%H%M%S-%f'))
    job_type = 'k8s.io/yggdrasil'

    # Specify the Docker image and command(s) to run
    docker_image = "cropsinsilico/jupyterlab:latest"
    init_command = "mkdir -p /pvc/" + job_name + " && cp -R /pvc/models/* /pvc/" + job_name + " && chown -R 1000:100 /pvc/" + job_name
    command = "echo '" + str(
        yaml_graph
    ) + "' > graph.yml && echo Running in $(pwd): && ls -al && yggrun graph.yml"

    # Encode our username with Jupyter's special homebrew recipe
    username = jupyterUserEncode(username)

    # Job must run in same namespace as the PVC
    namespace = "hub"

    # Specify some arbitrary limits
    timeout = 300
    num_cpus = 2
    max_ram_mb = 8384

    # Create a record in the Job database
    jobModel = JobModel()
    job_model = jobModel.createJob(job_name,
                                   job_type,
                                   async=True,
                                   kwargs={
                                       'name': job_name,
                                       'type': job_type,
                                       'namespace': namespace,
                                       'username': username,
                                       'init_command': init_command,
                                       'command': command,
                                       'image': docker_image,
                                       'timeout': timeout,
                                       'num_cpus': num_cpus,
                                       'max_ram_mb': max_ram_mb,
                                   })

    jobModel.save(job_model)

    # Create and run the job
    k8s_job = KubernetesJob(username, job_name, namespace, timeout,
                            init_command, command, docker_image, num_cpus,
                            max_ram_mb)
    if not k8s_job.is_running():
        jobModel.scheduleJob(job_model)
        k8s_job.submit()

    return job_name
Example #11
0
    def setUp(self):
        base.TestCase.setUp(self)

        self.users = [User().createUser(
            'usr' + str(n), 'passwd', 'tst', 'usr', '*****@*****.**' % n)
            for n in range(3)]

        from girder.plugins.jobs.models.job import Job
        self.jobModel = Job()
Example #12
0
    def testWorkerDifferentTask(self):
        # Test the settings
        resp = self.request('/system/setting', method='PUT', params={
            'key': worker.PluginSettings.API_URL,
            'value': 'bad value'
        }, user=self.admin)
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json['message'], 'API URL must start with http:// or https://.')

        resp = self.request('/system/setting', method='PUT', params={
            'list': json.dumps([{
                'key': worker.PluginSettings.BROKER,
                'value': 'amqp://[email protected]'
            }, {
                'key': worker.PluginSettings.BACKEND,
                'value': 'amqp://[email protected]'
            }])
        }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={},
            otherFields={
                'celeryTaskName': 'some_other.task',
                'celeryQueue': 'my_other_q'
            })

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls
            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], (
                'some_other.task', job['args'], job['kwargs']))
            self.assertIn('queue', sendTaskCalls[0][2])
            self.assertEqual(sendTaskCalls[0][2]['queue'], 'my_other_q')
Example #13
0
    def _postTileViaHttp(self, itemId, fileId, jobAction=None):
        """
        When we know we need to process a job, we have to use an actual http
        request rather than the normal simulated request to cherrypy.  This is
        required because cherrypy needs to know how it was reached so that
        girder_worker can reach it when done.

        :param itemId: the id of the item with the file to process.
        :param fileId: the id of the file that should be processed.
        :param jobAction: if 'delete', delete the job immediately.
        :returns: metadata from the tile if the conversion was successful,
                  False if it converted but didn't result in useable tiles, and
                  None if it failed.
        """
        from girder.plugins.jobs.models.job import Job

        headers = [('Accept', 'application/json')]
        self._buildHeaders(headers, None, self.admin, None, None, None)
        headers = {header[0]: header[1] for header in headers}
        req = requests.post('http://127.0.0.1:%d/api/v1/item/%s/tiles' %
                            (int(os.environ['GIRDER_PORT']), itemId),
                            headers=headers,
                            data={'fileId': fileId})
        self.assertEqual(req.status_code, 200)
        # If we ask to create the item again right away, we should be told that
        # either there is already a job running or the item has already been
        # added
        req = requests.post('http://127.0.0.1:%d/api/v1/item/%s/tiles' %
                            (int(os.environ['GIRDER_PORT']), itemId),
                            headers=headers,
                            data={'fileId': fileId})
        self.assertEqual(req.status_code, 400)
        self.assertTrue('Item already has' in req.json()['message']
                        or 'Item is scheduled' in req.json()['message'])

        if jobAction == 'delete':
            Job().remove(Job().find({}, sort=[('_id', SortDir.DESCENDING)])[0])

        starttime = time.time()
        resp = None
        while time.time() - starttime < 30:
            try:
                resp = self.request(path='/item/%s/tiles' % itemId,
                                    user=self.admin)
                self.assertStatusOk(resp)
                break
            except AssertionError as exc:
                if 'didn\'t meet requirements' in exc.args[0]:
                    return False
                if 'No large image file' in exc.args[0]:
                    return None
                self.assertIn('is still pending creation', exc.args[0])
            time.sleep(0.1)
        self.assertStatusOk(resp)
        return resp.json
Example #14
0
    def testTaleCopy(self):
        from girder.plugins.wholetale.models.tale import Tale
        from girder.plugins.wholetale.constants import TaleStatus
        from girder.plugins.jobs.models.job import Job
        from girder.plugins.jobs.constants import JobStatus
        tale = Tale().createTale(self.image, [],
                                 creator=self.admin,
                                 public=True)
        workspace = Tale().createWorkspace(tale)
        # Below workarounds a bug, it will be addressed elsewhere.
        workspace = Folder().setPublic(workspace, True, save=True)

        adapter = assetstore_utilities.getAssetstoreAdapter(self.ws_assetstore)
        size = 101
        data = BytesIO(b' ' * size)
        files = []
        files.append(Upload().uploadFromFile(data,
                                             size,
                                             'file01.txt',
                                             parentType='folder',
                                             parent=workspace,
                                             assetstore=self.ws_assetstore))
        fullPath = adapter.fullPath(files[0])

        # Create a copy
        resp = self.request(path='/tale/{_id}/copy'.format(**tale),
                            method='POST',
                            user=self.user)
        self.assertStatusOk(resp)

        new_tale = resp.json
        self.assertFalse(new_tale['public'])
        self.assertEqual(new_tale['dataSet'], tale['dataSet'])
        self.assertEqual(new_tale['copyOfTale'], str(tale['_id']))
        self.assertEqual(new_tale['imageId'], str(tale['imageId']))
        self.assertEqual(new_tale['creatorId'], str(self.user['_id']))
        self.assertEqual(new_tale['status'], TaleStatus.PREPARING)

        copied_file_path = re.sub(workspace['name'], new_tale['_id'], fullPath)
        job = Job().findOne({'type': 'wholetale.copy_workspace'})
        for i in range(10):
            job = Job().load(job['_id'], force=True)
            if job['status'] == JobStatus.SUCCESS:
                break
            time.sleep(0.1)
        self.assertTrue(os.path.isfile(copied_file_path))
        resp = self.request(path='/tale/{_id}'.format(**new_tale),
                            method='GET',
                            user=self.user)
        self.assertStatusOk(resp)
        new_tale = resp.json
        self.assertEqual(new_tale['status'], TaleStatus.READY)

        Tale().remove(new_tale)
        Tale().remove(tale)
Example #15
0
    def testWorkerWithParent(self):
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        parentJob = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, otherFields={'celeryTaskId': '1234'})
        childJob = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, otherFields={'celeryTaskId': '5678',
                                                        'celeryParentTaskId': '1234'})

        self.assertEqual(parentJob['_id'], childJob['parentId'])
Example #16
0
    def testLocalJob(self):
        # Make sure local jobs still work
        from girder.plugins.jobs.models.job import Job
        job = Job().createLocalJob(title='local',
                                   type='local',
                                   user=self.users[0],
                                   module='plugin_tests.worker_test',
                                   function='local_job')

        Job().scheduleJob(job)

        job = Job().load(job['_id'], force=True, includeLog=True)
        self.assertIn('job ran', job['log'])
Example #17
0
    def testWorkerCancel(self):
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={})

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock, \
                mock.patch('girder.plugins.worker.AsyncResult') as asyncResult:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)
            jobModel.cancelJob(job)

            asyncResult.assert_called_with('fake_id', app=mock.ANY)
            # Check we called revoke
            asyncResult.return_value.revoke.assert_called_once()
            job = jobModel.load(job['_id'], force=True)
            self.assertEqual(job['status'], CustomJobStatus.CANCELING)
def run(job):
    jobModel = Job()
    jobModel.updateJob(job, status=JobStatus.RUNNING)

    src_workspace_id, dest_workspace_id = job["args"]
    user = job["kwargs"]["user"]
    tale = job["kwargs"]["tale"]

    try:
        parent = Folder().load(src_workspace_id,
                               user=user,
                               exc=True,
                               level=AccessType.READ)
        workspace = Folder().load(dest_workspace_id, user=user, exc=True)
        Folder().copyFolderComponents(parent, workspace, user, None)
        tale["status"] = TaleStatus.READY
        Tale().updateTale(tale)
        jobModel.updateJob(job,
                           status=JobStatus.SUCCESS,
                           log="Copying finished")
    except Exception:
        tale["status"] = TaleStatus.ERROR
        Tale().updateTale(tale)
        t, val, tb = sys.exc_info()
        log = "%s: %s\n%s" % (t.__name__, repr(val), traceback.extract_tb(tb))
        jobModel.updateJob(job, status=JobStatus.ERROR, log=log)
        raise
Example #19
0
    def delete(self, item, skipFileIds=None):
        deleted = False
        if 'largeImage' in item:
            job = None
            if 'jobId' in item['largeImage']:
                try:
                    job = Job().load(item['largeImage']['jobId'],
                                     force=True,
                                     exc=True)
                except ValidationException:
                    # The job has been deleted, but we still need to clean up
                    # the rest of the tile information
                    pass
            if (item['largeImage'].get('expected') and job
                    and job.get('status')
                    in (JobStatus.QUEUED, JobStatus.RUNNING)):
                # cannot cleanly remove the large image, since a conversion
                # job is currently in progress
                # TODO: cancel the job
                # TODO: return a failure error code
                return False

            # If this file was created by the worker job, delete it
            if 'jobId' in item['largeImage']:
                if job:
                    # TODO: does this eliminate all traces of the job?
                    # TODO: do we want to remove the original job?
                    Job().remove(job)
                del item['largeImage']['jobId']

            if 'originalId' in item['largeImage']:
                # The large image file should not be the original file
                assert item['largeImage']['originalId'] != \
                    item['largeImage'].get('fileId')

                if ('fileId' in item['largeImage'] and
                    (not skipFileIds
                     or item['largeImage']['fileId'] not in skipFileIds)):
                    file = File().load(id=item['largeImage']['fileId'],
                                       force=True)
                    if file:
                        File().remove(file)
                del item['largeImage']['originalId']

            del item['largeImage']

            item = self.save(item)
            deleted = True
        self.removeThumbnailFiles(item)
        return deleted
Example #20
0
    def executeTask(self, item, jobTitle, includeJobInfo, inputs, outputs):
        user = self.getCurrentUser()
        if jobTitle is None:
            jobTitle = item['name']
        task, handler = self._validateTask(item)

        if task.get('mode') == 'girder_worker':
            return runCeleryTask(item['meta']['itemTaskImport'], inputs)

        jobModel = self.model('job', 'jobs')
        jobModel = Job()
        job = jobModel.createJob(
            title=jobTitle, type='item_task', handler=handler, user=user)

        # If this is a user auth token, we make an IO-enabled token
        token = self.getCurrentToken()
        tokenModel = Token()
        if tokenModel.hasScope(token, TokenScope.USER_AUTH):
            token = tokenModel.createToken(
                user=user, days=7, scope=(TokenScope.DATA_READ, TokenScope.DATA_WRITE))
            job['itemTaskTempToken'] = token['_id']

        token = tokenModel.addScope(token, 'item_tasks.job_write:%s' % job['_id'])

        job.update({
            'itemTaskId': item['_id'],
            'itemTaskBindings': {
                'inputs': inputs,
                'outputs': outputs
            },
            'kwargs': {
                'task': task,
                'inputs': self._transformInputs(inputs, token),
                'outputs': self._transformOutputs(outputs, token, job, task, item['_id']),
                'validate': False,
                'auto_convert': False,
                'cleanup': True
            }
        })

        if includeJobInfo:
            job['kwargs']['jobInfo'] = utils.jobInfoSpec(job)

        if 'itemTaskCeleryQueue' in item.get('meta', {}):
            job['celeryQueue'] = item['meta']['itemTaskCeleryQueue']

        job = jobModel.save(job)
        jobModel.scheduleJob(job)

        return job
 def copyTale(self, tale):
     user = self.getCurrentUser()
     image = self.model('image', 'wholetale').load(tale['imageId'],
                                                   user=user,
                                                   level=AccessType.READ,
                                                   exc=True)
     default_author = ' '.join((user['firstName'], user['lastName']))
     new_tale = self._model.createTale(
         image,
         tale['dataSet'],
         creator=user,
         save=True,
         title=tale.get('title'),
         description=tale.get('description'),
         public=False,
         config=tale.get('config'),
         icon=image.get('icon', ('https://raw.githubusercontent.com/'
                                 'whole-tale/dashboard/master/public/'
                                 'images/whole_tale_logo.png')),
         illustration=tale.get('illustration',
                               ('https://raw.githubusercontent.com/'
                                'whole-tale/dashboard/master/public/'
                                'images/demo-graph2.jpg')),
         authors=tale.get('authors', default_author),
         category=tale.get('category', 'science'),
         narrative=tale.get('narrative'),
         licenseSPDX=tale.get('licenseSPDX'),
         status=TaleStatus.PREPARING,
         relatedIdentifiers=tale.get('relatedIdentifiers'),
     )
     new_tale['copyOfTale'] = tale['_id']
     new_tale = self._model.save(new_tale)
     # asynchronously copy the workspace of a source Tale
     tale_workspaceId = self._model.createWorkspace(tale)['_id']
     new_tale_workspaceId = self._model.createWorkspace(new_tale)['_id']
     job = Job().createLocalJob(
         title='Copy "{title}" workspace'.format(**tale),
         user=user,
         type='wholetale.copy_workspace',
         public=False,
         async=True,
         module='girder.plugins.wholetale.tasks.copy_workspace',
         args=(tale_workspaceId, new_tale_workspaceId),
         kwargs={
             'user': user,
             'tale': new_tale
         })
     Job().scheduleJob(job)
     return new_tale
Example #22
0
def runSlicerCliTasksDescriptionForFolder(self, folder, image, args, pullImage, params):
    jobModel = Job()
    token = Token().createToken(
        days=3, scope='item_task.set_task_spec.%s' % folder['_id'], user=self.getCurrentUser())
    job = jobModel.createJob(
        title='Read docker task specs: %s' % image, type='folder.item_task_slicer_cli_description',
        handler='worker_handler', user=self.getCurrentUser())

    if args[-1:] == ['--xml']:
        args = args[:-1]

    jobOptions = {
        'itemTaskId': folder['_id'],
        'kwargs': {
            'task': {
                'mode': 'docker',
                'docker_image': image,
                'container_args': args + ['--xml'],
                'pull_image': pullImage,
                'outputs': [{
                    'id': '_stdout',
                    'format': 'text'
                }],
            },
            'outputs': {
                '_stdout': {
                    'mode': 'http',
                    'method': 'POST',
                    'format': 'text',
                    'url': '/'.join((utils.getWorkerApiUrl(), 'folder', str(folder['_id']),
                                     'item_task_slicer_cli_xml')),
                    'headers': {'Girder-Token': token['_id']},
                    'params': {
                        'image': image,
                        'args': json.dumps(args),
                        'pullImage': pullImage
                    }
                }
            },
            'jobInfo': utils.jobInfoSpec(job),
            'validate': False,
            'auto_convert': False
        }
    }
    job.update(jobOptions)

    job = jobModel.save(job)
    jobModel.scheduleJob(job)
    return job
Example #23
0
def wait_for_status(user, job, status):
    """
    Utility to wait for a job model to move into a particular state.
    :param job: The job model to wait on
    :param status: The state to wait for.
    :returns: True if the job model moved into the requested state, False otherwise.
    """
    retries = 0
    jobModel = Job()
    while retries < 10:
        job = jobModel.load(job['_id'], user=user)
        if job['status'] == status:
            return True

    return False
Example #24
0
def attachParentJob(event):
    """Attach parentJob before a model is saved."""
    job = event.info
    if job.get('celeryParentTaskId'):
        celeryParentTaskId = job['celeryParentTaskId']
        parentJob = Job().findOne({'celeryTaskId': celeryParentTaskId})
        event.info['parentId'] = parentJob['_id']
Example #25
0
def jobInfoSpec(job, token=None, logPrint=True):
    """
    Build the jobInfo specification for a task to write status and log output
    back to a Girder job.

    :param job: The job document representing the worker task.
    :type job: dict
    :param token: The token to use. Creates a job token if not passed.
    :type token: str or dict
    :param logPrint: Whether standard output from the job should be
    """
    if token is None:
        token = Job().createJobToken(job)

    if isinstance(token, dict):
        token = token['_id']

    return {
        'method': 'PUT',
        'url': '/'.join((getWorkerApiUrl(), 'job', str(job['_id']))),
        'reference': str(job['_id']),
        'headers': {
            'Girder-Token': token
        },
        'logPrint': logPrint
    }
Example #26
0
    def _createThumbnails(self, spec, cancel=False):
        from girder.plugins.jobs.constants import JobStatus
        from girder.plugins.jobs.models.job import Job

        params = {'spec': json.dumps(spec)}
        if cancel:
            params['logInterval'] = 0
        resp = self.request(method='PUT',
                            path='/large_image/thumbnails',
                            user=self.admin,
                            params=params)
        self.assertStatusOk(resp)
        job = resp.json
        if cancel:
            job = self._waitForJobToBeRunning(job)
            job = Job().cancelJob(job)

        starttime = time.time()
        while True:
            self.assertTrue(time.time() - starttime < 30)
            resp = self.request('/job/%s' % str(job['_id']))
            self.assertStatusOk(resp)
            if resp.json.get('status') == JobStatus.SUCCESS:
                return True
            if resp.json.get('status') == JobStatus.ERROR:
                return False
            if resp.json.get('status') == JobStatus.CANCELED:
                return 'canceled'
            time.sleep(0.1)
Example #27
0
    def startRun(self, run, entrypoint):
        user = self.getCurrentUser()

        if not entrypoint:
            entrypoint = "run.sh"

        runRoot = Folder().load(run['parentId'],
                                user=user,
                                level=AccessType.WRITE)
        tale = Tale().load(runRoot['meta']['taleId'],
                           user=user,
                           level=AccessType.READ)

        resource = {
            'type': 'wt_recorded_run',
            'tale_id': tale['_id'],
            'tale_title': tale['title']
        }

        token = Token().createToken(user=user, days=0.5)

        notification = init_progress(resource, user, 'Recorded run',
                                     'Initializing', RECORDED_RUN_STEP_TOTAL)

        rrTask = recorded_run.signature(
            args=[str(run['_id']),
                  str(tale['_id']), entrypoint],
            girder_job_other_fields={
                'wt_notification_id': str(notification['_id']),
            },
            girder_client_token=str(token['_id']),
        ).apply_async()

        return Job().filter(rrTask.job, user=user)
Example #28
0
def cancel(event):
    """
    This is bound to the "jobs.cancel" event, and will be triggered any time
    a job is canceled. This handler will process any job that has the
    handler field set to "worker_handler".
    """
    job = event.info
    if job['handler'] in ['worker_handler', 'celery_handler']:
        # Stop event propagation and prevent default, we are using a custom state
        event.stopPropagation().preventDefault()

        celeryTaskId = job.get('celeryTaskId')

        if celeryTaskId is None:
            msg = ("Unable to cancel Celery task. Job '%s' doesn't have a Celery task id."
                   % job['_id'])
            logger.warn(msg)
            return

        if job['status'] not in [CustomJobStatus.CANCELING, JobStatus.CANCELED,
                                 JobStatus.SUCCESS, JobStatus.ERROR]:
            # Set the job status to canceling
            Job().updateJob(job, status=CustomJobStatus.CANCELING)

            # Send the revoke request.
            asyncResult = AsyncResult(celeryTaskId, app=getCeleryApp())
            asyncResult.revoke()
Example #29
0
def load(info):
    events.bind('jobs.schedule', 'worker', schedule)
    events.bind('jobs.status.validate', 'worker', validateJobStatus)
    events.bind('jobs.status.validTransitions', 'worker', validTransitions)
    events.bind('jobs.cancel', 'worker', cancel)
    events.bind('model.job.save.after', 'worker', attachJobInfoSpec)
    events.bind('model.job.save', 'worker', attachParentJob)
    Job().exposeFields(AccessType.SITE_ADMIN, {'celeryTaskId', 'celeryQueue'})
    def testTaleImportZip(self):
        image = self.model("image", "wholetale").createImage(
            name="Jupyter Classic",
            creator=self.user,
            public=True,
            config=dict(
                template="base.tpl",
                buildpack="PythonBuildPack",
                user="******",
                port=8888,
                urlPath="",
            ),
        )
        with mock.patch("fs.copy.copy_fs") as mock_copy:
            with open(
                os.path.join(DATA_PATH, "5c92fbd472a9910001fbff72.zip"), "rb"
            ) as fp:
                resp = self.request(
                    path="/tale/import",
                    method="POST",
                    user=self.user,
                    type="application/zip",
                    body=fp.read(),
                )

            self.assertStatusOk(resp)
            tale = resp.json

            from girder.plugins.jobs.models.job import Job

            job = Job().findOne({"type": "wholetale.import_tale"})
            self.assertEqual(
                json.loads(job["kwargs"])["taleId"]["$oid"], tale["_id"]
            )
            for i in range(300):
                if job["status"] in {JobStatus.SUCCESS, JobStatus.ERROR}:
                    break
                time.sleep(0.1)
                job = Job().load(job["_id"], force=True)
            self.assertEqual(job["status"], JobStatus.SUCCESS)
        mock_copy.assert_called_once()
        # TODO: make it more extensive...
        self.assertTrue(
            self.model("tale", "wholetale").findOne({"title": "Water Tale"}) is not None
        )
        self.model("image", "wholetale").remove(image)
Example #31
0
def _updateJob(event):
    """
    Called when a job is saved, updated, or removed.  If this is a large image
    job and it is ended, clean up after it.
    """
    from girder.plugins.jobs.constants import JobStatus
    from girder.plugins.jobs.models.job import Job

    job = event.info[
        'job'] if event.name == 'jobs.job.update.after' else event.info
    meta = job.get('meta', {})
    if (meta.get('creator') != 'large_image' or not meta.get('itemId')
            or meta.get('task') != 'createImageItem'):
        return
    status = job['status']
    if event.name == 'model.job.remove' and status not in (JobStatus.ERROR,
                                                           JobStatus.CANCELED,
                                                           JobStatus.SUCCESS):
        status = JobStatus.CANCELED
    if status not in (JobStatus.ERROR, JobStatus.CANCELED, JobStatus.SUCCESS):
        return
    item = Item().load(meta['itemId'], force=True)
    if not item or 'largeImage' not in item:
        return
    if item.get('largeImage', {}).get('expected'):
        # We can get a SUCCESS message before we get the upload message, so
        # don't clear the expected status on success.
        if status != JobStatus.SUCCESS:
            del item['largeImage']['expected']
    notify = item.get('largeImage', {}).get('notify')
    msg = None
    if notify:
        del item['largeImage']['notify']
        if status == JobStatus.SUCCESS:
            msg = 'Large image created'
        elif status == JobStatus.CANCELED:
            msg = 'Large image creation canceled'
        else:  # ERROR
            msg = 'FAILED: Large image creation failed'
        msg += ' for item %s' % item['name']
    if (status in (JobStatus.ERROR, JobStatus.CANCELED)
            and 'largeImage' in item):
        del item['largeImage']
    Item().save(item)
    if msg and event.name != 'model.job.remove':
        Job().updateJob(job, progressMessage=msg)
    if notify:
        Notification().createNotification(
            type='large_image.finished_image_item',
            data={
                'job_id': job['_id'],
                'item_id': item['_id'],
                'success': status == JobStatus.SUCCESS,
                'status': status
            },
            user={'_id': job.get('userId')},
            expires=datetime.datetime.utcnow() +
            datetime.timedelta(seconds=30))
Example #32
0
    def setUp(self):
        base.TestCase.setUp(self)

        self.users = [User().createUser(
            'usr' + str(n), 'passwd', 'tst', 'usr', '*****@*****.**' % n)
            for n in range(3)]

        from girder.plugins.jobs.models.job import Job
        self.jobModel = Job()
    def testTaleImportZipFail(self):
        image = Image().createImage(
            name="Jupyter Classic",
            creator=self.user,
            public=True,
            config=dict(
                template="base.tpl",
                buildpack="PythonBuildPack",
                user="******",
                port=8888,
                urlPath="",
            ),
        )
        with mock.patch("girder.plugins.wholetale.lib.pids_to_entities") as mock_pids:
            mock_pids.side_effect = ValueError
            with open(
                os.path.join(DATA_PATH, "5c92fbd472a9910001fbff72.zip"), "rb"
            ) as fp:
                resp = self.request(
                    path="/tale/import",
                    method="POST",
                    user=self.user,
                    type="application/zip",
                    body=fp.read(),
                )

            self.assertStatusOk(resp)
            tale = resp.json

            job = Job().findOne({"type": "wholetale.import_tale"})
            self.assertEqual(
                json.loads(job["kwargs"])["taleId"]["$oid"], tale["_id"]
            )
            for i in range(300):
                if job["status"] in {JobStatus.SUCCESS, JobStatus.ERROR}:
                    break
                time.sleep(0.1)
                job = Job().load(job["_id"], force=True)
            self.assertEqual(job["status"], JobStatus.ERROR)
            Job().remove(job)
        tale = Tale().load(tale["_id"], force=True)
        self.assertEqual(tale["status"], TaleStatus.ERROR)
        Tale().remove(tale)
        Image().remove(image)
Example #34
0
    def testWorkerWithParent(self):
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        parentJob = jobModel.createJob(title='title',
                                       type='foo',
                                       handler='worker_handler',
                                       user=self.admin,
                                       public=False,
                                       otherFields={'celeryTaskId': '1234'})
        childJob = jobModel.createJob(title='title',
                                      type='foo',
                                      handler='worker_handler',
                                      user=self.admin,
                                      public=False,
                                      otherFields={
                                          'celeryTaskId': '5678',
                                          'celeryParentTaskId': '1234'
                                      })

        self.assertEqual(parentJob['_id'], childJob['parentId'])
Example #35
0
def run(job):
    jobModel = Job()
    jobModel.updateJob(job, status=JobStatus.RUNNING)

    try:
        newFile = createThumbnail(**job['kwargs'])
        log = 'Created thumbnail file %s.' % newFile['_id']
        jobModel.updateJob(job, status=JobStatus.SUCCESS, log=log)
    except Exception:
        t, val, tb = sys.exc_info()
        log = '%s: %s\n%s' % (t.__name__, repr(val), traceback.extract_tb(tb))
        jobModel.updateJob(job, status=JobStatus.ERROR, log=log)
        raise
Example #36
0
def setMultiscaleMetaData(jobId, inputFolderId, outputFolderId):
    """Set the multiscale meta data for the jobId.

    Currently, we use this to keep track of the input and output
    folders.

    Returns the updated job.
    """
    # We want to update the job with some multiscale settings.
    # We will put it in the meta data.
    job = Job().findOne({'_id': jobId})
    multiscale_io = {
        'meta': {
            'multiscale_settings': {
                'inputFolderId': inputFolderId,
                'outputFolderId': outputFolderId
            }
        }
    }

    return Job().updateJob(job, otherFields=multiscale_io)
Example #37
0
 def buildImage(self, image, params):
     user = self.getCurrentUser()
     recipe = self.model('recipe', 'wholetale').load(image['recipeId'],
                                                     user=user,
                                                     level=AccessType.READ,
                                                     exc=True)
     url = '{}/archive/{}.tar.gz'.format(recipe['url'], recipe['commitId'])
     jobTitle = 'Building image %s' % image['fullName']
     jobModel = Job()
     # Create a job to be handled by the worker plugin
     job = jobModel.createJob(
         title=jobTitle,
         type='build_image',
         handler='worker_handler',
         user=user,
         public=False,
         args=(str(image['_id']), image['fullName'], url),
         kwargs={},
         otherFields={'celeryTaskName': 'gwvolman.tasks.build_image'})
     jobModel.scheduleJob(job)
     return job
Example #38
0
    def testWorker(self):
        # Test the settings
        resp = self.request('/system/setting', method='PUT', params={
            'list': json.dumps([{
                'key': worker.PluginSettings.BROKER,
                'value': 'amqp://[email protected]'
            }, {
                'key': worker.PluginSettings.BACKEND,
                'value': 'amqp://[email protected]'
            }])
        }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={})

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            # Make sure we sent the job to celery
            self.assertEqual(len(celeryMock.mock_calls), 2)
            self.assertEqual(celeryMock.mock_calls[0][1], ('girder_worker',))
            self.assertEqual(celeryMock.mock_calls[0][2], {
                'broker': 'amqp://[email protected]',
                'backend': 'amqp://[email protected]'
            })

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls

            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], (
                'girder_worker.run', job['args'], job['kwargs']))

            self.assertTrue('headers' in sendTaskCalls[0][2])
            self.assertTrue('jobInfoSpec' in sendTaskCalls[0][2]['headers'])

            # Make sure we got and saved the celery task id
            job = jobModel.load(job['_id'], force=True)
            self.assertEqual(job['celeryTaskId'], 'fake_id')
            self.assertEqual(job['status'], JobStatus.QUEUED)
Example #39
0
def run(job):
    jobModel = Job()
    jobModel.updateJob(job, status=JobStatus.RUNNING)

    try:
        newFile = createThumbnail(**job['kwargs'])
        log = 'Created thumbnail file %s.' % newFile['_id']
        jobModel.updateJob(job, status=JobStatus.SUCCESS, log=log)
    except Exception:
        t, val, tb = sys.exc_info()
        log = '%s: %s\n%s' % (t.__name__, repr(val), traceback.extract_tb(tb))
        jobModel.updateJob(job, status=JobStatus.ERROR, log=log)
        raise
Example #40
0
def runSlicerCliTasksDescriptionForItem(
        self, item, image, args, setName, setDescription, pullImage, params):
    if 'meta' not in item:
        item['meta'] = {}

    if image is None:
        image = item.get('meta', {}).get('itemTaskSpec', {}).get('docker_image')

    if not image:
        raise RestException(
            'You must pass an image parameter, or set the itemTaskSpec.docker_image '
            'field of the item.')

    jobModel = Job()
    token = Token().createToken(
        days=3, scope='item_task.set_task_spec.%s' % item['_id'])
    job = jobModel.createJob(
        title='Read docker Slicer CLI: %s' % image, type='item.item_task_slicer_cli_description',
        handler='worker_handler', user=self.getCurrentUser())

    if args[-1:] == ['--xml']:
        args = args[:-1]

    job.update({
        'itemTaskId': item['_id'],
        'kwargs': {
            'task': {
                'mode': 'docker',
                'docker_image': image,
                'container_args': args + ['--xml'],
                'pull_image': pullImage,
                'outputs': [{
                    'id': '_stdout',
                    'format': 'text'
                }],
            },
            'outputs': {
                '_stdout': {
                    'mode': 'http',
                    'method': 'PUT',
                    'format': 'text',
                    'url': '/'.join((utils.getWorkerApiUrl(), 'item', str(item['_id']),
                                     'item_task_slicer_cli_xml')),
                    'params': {
                        'setName': setName,
                        'setDescription': setDescription
                    },
                    'headers': {'Girder-Token': token['_id']}
                }
            },
            'jobInfo': utils.jobInfoSpec(job),
            'validate': False,
            'auto_convert': False
        }
    })

    item['meta']['itemTaskSpec'] = {
        'mode': 'docker',
        'docker_image': image
    }

    if args:
        item['meta']['itemTaskSlicerCliArgs'] = args

    Item().save(item)

    job = jobModel.save(job)
    jobModel.scheduleJob(job)

    return job
Example #41
0
    def testConfigureItemTaskFromSlicerCli(self):
        # Create a new item that will become a task
        item = Item().createItem(name='placeholder', creator=self.admin, folder=self.privateFolder)

        # Create task to introspect container
        with mock.patch('girder.plugins.jobs.models.job.Job.scheduleJob') as scheduleMock:
            resp = self.request(
                '/item/%s/item_task_slicer_cli_description' % item['_id'], method='POST', params={
                    'image': 'johndoe/foo:v5',
                    'args': json.dumps(['--foo', 'bar'])
                }, user=self.admin)
            self.assertStatusOk(resp)
            self.assertEqual(resp.json['_modelType'], 'job')
            self.assertEqual(len(scheduleMock.mock_calls), 1)
            job = scheduleMock.mock_calls[0][1][0]
            self.assertEqual(job['handler'], 'worker_handler')
            self.assertEqual(job['itemTaskId'], item['_id'])
            self.assertEqual(job['kwargs']['outputs']['_stdout']['method'], 'PUT')
            self.assertTrue(job['kwargs']['outputs']['_stdout']['url'].endswith(
                'item/%s/item_task_slicer_cli_xml' % item['_id']))
            token = job['kwargs']['outputs']['_stdout']['headers']['Girder-Token']

        # Task should not be registered until we get the callback
        resp = self.request('/item_task', user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json, [])

        # Image and args should be stored in the item metadata
        item = Item().load(item['_id'], force=True)
        self.assertEqual(item['meta']['itemTaskSpec']['docker_image'], 'johndoe/foo:v5')
        self.assertEqual(item['meta']['itemTaskSlicerCliArgs'], ['--foo', 'bar'])

        # Simulate callback from introspection job
        with open(os.path.join(os.path.dirname(__file__), 'slicer_cli.xml')) as f:
            xml = f.read()

        resp = self.request(
            '/item/%s/item_task_slicer_cli_xml' % item['_id'], method='PUT', params={
                'setName': True,
                'setDescription': True
            }, token=token, body=xml, type='application/xml')
        self.assertStatusOk(resp)

        # We should only be able to see tasks we have read access on
        resp = self.request('/item_task')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json, [])

        resp = self.request('/item_task', user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)
        self.assertEqual(resp.json[0]['_id'], str(item['_id']))

        item = Item().load(item['_id'], force=True)
        self.assertEqual(item['name'], 'PET phantom detector CLI')
        self.assertEqual(
            item['description'],
            u'**Description**: Detects positions of PET/CT pocket phantoms in PET image.\n\n'
            u'**Author(s)**: Girder Developers\n\n**Version**: 1.0\n\n'
            u'**License**: Apache 2.0\n\n**Acknowledgements**: *none*\n\n'
            u'*This description was auto-generated from the Slicer CLI XML specification.*'
        )
        self.assertTrue(item['meta']['isItemTask'])
        self.assertEqual(item['meta']['itemTaskSpec'], {
            'mode': 'docker',
            'docker_image': 'johndoe/foo:v5',
            'container_args': [
                '--foo', 'bar', '--InputImage=$input{--InputImage}',
                '--MaximumLineStraightnessDeviation=$input{--MaximumLineStraightnessDeviation}',
                '--MaximumRadius=$input{--MaximumRadius}',
                '--MaximumSphereDistance=$input{--MaximumSphereDistance}',
                '--MinimumRadius=$input{--MinimumRadius}',
                '--MinimumSphereActivity=$input{--MinimumSphereActivity}',
                '--MinimumSphereDistance=$input{--MinimumSphereDistance}',
                '--SpheresPerPhantom=$input{--SpheresPerPhantom}', '$flag{--StrictSorting}',
                '--DetectedPoints=$output{--DetectedPoints}'
            ],
            'inputs': [{
                'description': 'Input image to be analysed.',
                'format': 'image',
                'name': 'InputImage', 'type': 'image', 'id': '--InputImage',
                'target': 'filepath'
            }, {
                'description': 'Used for eliminating detections which are not in a straight line. '
                               'Unit: multiples of geometric average of voxel spacing',
                'format': 'number',
                'default': {'data': 1.0},
                'type': 'number',
                'id': '--MaximumLineStraightnessDeviation',
                'name': 'MaximumLineStraightnessDeviation'
            }, {
                'description': 'Used for eliminating too big blobs. Unit: millimeter [mm]',
                'format': 'number', 'default': {'data': 20.0},
                'type': 'number',
                'id': '--MaximumRadius',
                'name': 'MaximumRadius'
            }, {
                'description': 'Signifies maximum distance between adjacent sphere centers [mm]. '
                               'Used to separate phantoms from tumors.',
                'format': 'number', 'default': {'data': 40.0},
                'type': 'number',
                'id': '--MaximumSphereDistance',
                'name': 'MaximumSphereDistance'
            }, {
                'description': 'Used for eliminating too small blobs. Unit: millimeter [mm]',
                'format': 'number',
                'default': {'data': 3.0},
                'type': 'number',
                'id': '--MinimumRadius',
                'name': 'MinimumRadius'
            }, {
                'description': 'Used for thresholding in blob detection. '
                               'Unit: becquerels per milliliter [Bq/ml]',
                'format': 'number', 'default': {'data': 5000.0},
                'type': 'number',
                'id': '--MinimumSphereActivity',
                'name': 'MinimumSphereActivity'
            }, {
                'description': 'Signifies minimum distance between adjacent sphere centers [mm]. '
                               'Used to separate phantoms from tumors.',
                'format': 'number',
                'default': {'data': 30.0},
                'type': 'number',
                'id': '--MinimumSphereDistance',
                'name': 'MinimumSphereDistance'
            }, {
                'description': 'What kind of phantom are we working with here?',
                'format': 'number-enumeration',
                'default': {'data': 3},
                'type': 'number-enumeration',
                'id': '--SpheresPerPhantom',
                'name': 'SpheresPerPhantom',
                'values': [2, 3]
            }, {
                'description': 'Controls whether spheres within a phantom must have descending '
                               'activities. If OFF, they can have approximately same activities '
                               '(within 15%).',
                'format': 'boolean',
                'default': {'data': False},
                'type': 'boolean',
                'id': '--StrictSorting',
                'name': 'StrictSorting'
            }],
            'outputs': [{
                'description': 'Fiducial points, one for each detected sphere. '
                               'Will be multiple of 3.',
                'format': 'new-file',
                'name': 'DetectedPoints',
                'type': 'new-file',
                'id': '--DetectedPoints',
                'target': 'filepath'
            }]
        })

        # Shouldn't be able to run the task if we don't have execute permission flag
        Folder().setUserAccess(
            self.privateFolder, user=self.user, level=AccessType.READ, save=True)
        resp = self.request(
            '/item_task/%s/execution' % item['_id'], method='POST', user=self.user)
        self.assertStatus(resp, 403)

        # Grant the user permission, and run the task
        from girder.plugins.item_tasks.constants import ACCESS_FLAG_EXECUTE_TASK
        Folder().setUserAccess(
            self.privateFolder, user=self.user, level=AccessType.WRITE,
            flags=ACCESS_FLAG_EXECUTE_TASK, currentUser=self.admin, save=True)

        inputs = {
            '--InputImage': {
                'mode': 'girder',
                'resource_type': 'item',
                'id': str(item['_id'])
            },
            '--MaximumLineStraightnessDeviation': {
                'mode': 'inline',
                'data': 1
            },
            '--MaximumRadius': {
                'mode': 'inline',
                'data': 20
            },
            '--MaximumSphereDistance': {
                'mode': 'inline',
                'data': 40
            },
            '--MinimumRadius': {
                'mode': 'inline',
                'data': 3
            },
            '--MinimumSphereActivity': {
                'mode': 'inline',
                'data': 5000
            },
            '--MinimumSphereDistance': {
                'mode': 'inline',
                'data': 30
            },
            '--SpheresPerPhantom': {
                'mode': 'inline',
                'data': 3},
            '--StrictSorting': {
                'mode': 'inline',
                'data': False
            }
        }

        outputs = {
            '--DetectedPoints': {
                'mode': 'girder',
                'parent_id': str(self.privateFolder['_id']),
                'parent_type': 'folder',
                'name': 'test.txt'
            }
        }

        # Ensure task was scheduled
        with mock.patch('girder.plugins.jobs.models.job.Job.scheduleJob') as scheduleMock:
            resp = self.request(
                '/item_task/%s/execution' % item['_id'], method='POST', user=self.user, params={
                    'inputs': json.dumps(inputs),
                    'outputs': json.dumps(outputs)
                })
            self.assertEqual(len(scheduleMock.mock_calls), 1)
        self.assertStatusOk(resp)
        job = resp.json
        self.assertEqual(job['_modelType'], 'job')
        self.assertNotIn('kwargs', job)  # ordinary user can't see kwargs

        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        job = jobModel.load(job['_id'], force=True)
        output = job['kwargs']['outputs']['--DetectedPoints']

        # Simulate output from the worker
        contents = b'Hello world'
        resp = self.request(
            path='/file', method='POST', token=output['token'], params={
                'parentType': output['parent_type'],
                'parentId': output['parent_id'],
                'name': output['name'],
                'size': len(contents),
                'mimeType': 'text/plain',
                'reference': output['reference']
            })
        self.assertStatusOk(resp)

        uploadId = resp.json['_id']
        fields = [('offset', 0), ('uploadId', uploadId)]
        files = [('chunk', output['name'], contents)]
        resp = self.multipartRequest(
            path='/file/chunk', fields=fields, files=files, token=output['token'])
        self.assertStatusOk(resp)
        file = resp.json
        self.assertEqual(file['_modelType'], 'file')
        self.assertEqual(file['size'], 11)
        self.assertEqual(file['mimeType'], 'text/plain')
        file = File().load(file['_id'], force=True)

        # Make sure temp token is removed once we change job status to final state
        job = jobModel.load(job['_id'], force=True)
        self.assertIn('itemTaskTempToken', job)

        from girder.plugins.jobs.constants import JobStatus
        # Transition through states to SUCCESS
        job = jobModel.updateJob(job, status=JobStatus.QUEUED)
        job = jobModel.updateJob(job, status=JobStatus.RUNNING)
        job = jobModel.updateJob(job, status=JobStatus.SUCCESS)

        self.assertNotIn('itemTaskTempToken', job)
        self.assertIn('itemTaskBindings', job)

        # Wait for async data.process event to bind output provenance
        start = time.time()
        while time.time() - start < 15:
            job = jobModel.load(job['_id'], force=True)

            if 'itemId' in job['itemTaskBindings']['outputs']['--DetectedPoints']:
                break
            else:
                time.sleep(0.2)
        else:
            raise Exception('Output binding did not occur in time')

        self.assertEqual(
            job['itemTaskBindings']['outputs']['--DetectedPoints']['itemId'], file['itemId'])
Example #42
0
class JobsTestCase(base.TestCase):
    def setUp(self):
        base.TestCase.setUp(self)

        self.users = [User().createUser(
            'usr' + str(n), 'passwd', 'tst', 'usr', '*****@*****.**' % n)
            for n in range(3)]

        from girder.plugins.jobs.models.job import Job
        self.jobModel = Job()

    def testJobs(self):
        self.job = None

        def schedule(event):
            self.job = event.info
            if self.job['handler'] == 'my_handler':
                self.job['status'] = JobStatus.RUNNING
                self.job = self.jobModel.save(self.job)
                self.assertEqual(self.job['args'], ('hello', 'world'))
                self.assertEqual(self.job['kwargs'], {'a': 'b'})

        events.bind('jobs.schedule', 'test', schedule)

        # Create a job
        job = self.jobModel.createJob(
            title='Job Title', type='my_type', args=('hello', 'world'),
            kwargs={'a': 'b'}, user=self.users[1], handler='my_handler',
            public=False)
        self.assertEqual(self.job, None)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure our handler was invoked
        self.jobModel.scheduleJob(job)
        self.assertEqual(self.job['_id'], job['_id'])
        self.assertEqual(self.job['status'], JobStatus.RUNNING)

        # Since the job is not public, user 2 should not have access
        path = '/job/%s' % job['_id']
        resp = self.request(path, user=self.users[2])
        self.assertStatus(resp, 403)
        resp = self.request(path, user=self.users[2], method='PUT')
        self.assertStatus(resp, 403)
        resp = self.request(path, user=self.users[2], method='DELETE')
        self.assertStatus(resp, 403)

        # If no user is specified, we should get a 401 error
        resp = self.request(path, user=None)
        self.assertStatus(resp, 401)

        # Make sure user who created the job can see it
        resp = self.request(path, user=self.users[1])
        self.assertStatusOk(resp)

        # We should be able to update the job as the user who created it
        resp = self.request(path, method='PUT', user=self.users[1], params={
            'log': 'My log message\n'
        })
        self.assertStatusOk(resp)

        # We should be able to create a job token and use that to update it too
        token = self.jobModel.createJobToken(job)
        resp = self.request(path, method='PUT', params={
            'log': 'append message',
            'token': token['_id']
        })
        self.assertStatusOk(resp)
        # We shouldn't get the log back in this case
        self.assertNotIn('log', resp.json)

        # Do a fetch on the job itself to get the log
        resp = self.request(path, user=self.users[1])
        self.assertStatusOk(resp)
        self.assertEqual(
            resp.json['log'], ['My log message\n', 'append message'])

        # Test overwriting the log and updating status
        resp = self.request(path, method='PUT', params={
            'log': 'overwritten log',
            'overwrite': 'true',
            'status': JobStatus.SUCCESS,
            'token': token['_id']
        })
        self.assertStatusOk(resp)
        self.assertNotIn('log', resp.json)
        self.assertEqual(resp.json['status'], JobStatus.SUCCESS)

        job = self.jobModel.load(job['_id'], force=True, includeLog=True)
        self.assertEqual(job['log'], ['overwritten log'])

        # We should be able to delete the job as the user who created it
        resp = self.request(path, user=self.users[1], method='DELETE')
        self.assertStatusOk(resp)
        job = self.jobModel.load(job['_id'], force=True)
        self.assertIsNone(job)

    def testLegacyLogBehavior(self):
        # Force save a job with a string log to simulate a legacy job record
        job = self.jobModel.createJob(
            title='legacy', type='legacy', user=self.users[1], save=False)
        job['log'] = 'legacy log'
        job = self.jobModel.save(job, validate=False)

        self.assertEqual(job['log'], 'legacy log')

        # Load the record, we should now get the log as a list
        job = self.jobModel.load(job['_id'], force=True, includeLog=True)
        self.assertEqual(job['log'], ['legacy log'])

    def testListJobs(self):
        job = self.jobModel.createJob(title='A job', type='t', user=self.users[1], public=False)
        anonJob = self.jobModel.createJob(title='Anon job', type='t')
        # Ensure timestamp for public job is strictly higher (ms resolution)
        time.sleep(0.1)
        publicJob = self.jobModel.createJob(
            title='Anon job', type='t', public=True)

        # User 1 should be able to see their own jobs
        resp = self.request('/job', user=self.users[1], params={
            'userId': self.users[1]['_id']
        })
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)
        self.assertEqual(resp.json[0]['_id'], str(job['_id']))

        # User 2 should not see user 1's jobs in the list
        resp = self.request('/job', user=self.users[2], params={
            'userId': self.users[1]['_id']
        })
        self.assertEqual(resp.json, [])

        # Omitting a userId should assume current user
        resp = self.request('/job', user=self.users[1])
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)
        self.assertEqual(resp.json[0]['_id'], str(job['_id']))

        # Explicitly passing "None" should show anonymous jobs
        resp = self.request('/job', user=self.users[0], params={
            'userId': 'none'
        })
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 2)
        self.assertEqual(resp.json[0]['_id'], str(publicJob['_id']))
        self.assertEqual(resp.json[1]['_id'], str(anonJob['_id']))

        # Non-admins should only see public anon jobs
        resp = self.request('/job', params={'userId': 'none'})
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)
        self.assertEqual(resp.json[0]['_id'], str(publicJob['_id']))

    def testListAllJobs(self):
        self.jobModel.createJob(title='user 0 job', type='t', user=self.users[0], public=False)
        self.jobModel.createJob(title='user 1 job', type='t', user=self.users[1], public=False)
        self.jobModel.createJob(title='user 1 job', type='t', user=self.users[1], public=True)
        self.jobModel.createJob(title='user 2 job', type='t', user=self.users[2])
        self.jobModel.createJob(title='anonymous job', type='t')
        self.jobModel.createJob(title='anonymous public job', type='t2', public=True)

        # User 0, as a site admin, should be able to see all jobs
        resp = self.request('/job/all', user=self.users[0])
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 6)

        # Test deprecated listAll method
        jobs = list(self.jobModel.listAll(limit=0, offset=0, sort=None, currentUser=self.users[0]))
        self.assertEqual(len(jobs), 6)

        # get with filter
        resp = self.request('/job/all', user=self.users[0], params={
            'types': json.dumps(['t']),
            'statuses': json.dumps([0])
        })
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 5)

        # get with unmet filter conditions
        resp = self.request('/job/all', user=self.users[0], params={
            'types': json.dumps(['nonexisttype'])
        })
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 0)

        # User 1, as non site admin, should encounter http 403 (Forbidden)
        resp = self.request('/job/all', user=self.users[1])
        self.assertStatus(resp, 403)

        # Not authenticated user should encounter http 401 (unauthorized)
        resp = self.request('/job/all')
        self.assertStatus(resp, 401)

    def testFiltering(self):
        job = self.jobModel.createJob(title='A job', type='t', user=self.users[1], public=True)

        job['_some_other_field'] = 'foo'
        job = self.jobModel.save(job)

        resp = self.request('/job/%s' % job['_id'])
        self.assertStatusOk(resp)
        self.assertTrue('created' in resp.json)
        self.assertTrue('_some_other_field' not in resp.json)
        self.assertTrue('kwargs' not in resp.json)
        self.assertTrue('args' not in resp.json)

        resp = self.request('/job/%s' % job['_id'], user=self.users[0])
        self.assertTrue('kwargs' in resp.json)
        self.assertTrue('args' in resp.json)

        self.jobModel.exposeFields(level=AccessType.READ, fields={'_some_other_field'})
        self.jobModel.hideFields(level=AccessType.READ, fields={'created'})

        resp = self.request('/job/%s' % job['_id'])
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['_some_other_field'], 'foo')
        self.assertTrue('created' not in resp.json)

    def testJobProgressAndNotifications(self):
        job = self.jobModel.createJob(title='a job', type='t', user=self.users[1], public=True)

        path = '/job/%s' % job['_id']
        resp = self.request(path)
        self.assertEqual(resp.json['progress'], None)
        self.assertEqual(resp.json['timestamps'], [])

        resp = self.request(path, method='PUT', user=self.users[1], params={
            'progressTotal': 100,
            'progressCurrent': 3,
            'progressMessage': 'Started',
            'notify': 'false',
            'status': JobStatus.QUEUED
        })
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['progress'], {
            'total': 100,
            'current': 3,
            'message': 'Started',
            'notificationId': None
        })

        # The status update should make it so we now have a timestamp
        self.assertEqual(len(resp.json['timestamps']), 1)
        self.assertEqual(resp.json['timestamps'][0]['status'], JobStatus.QUEUED)
        self.assertIn('time', resp.json['timestamps'][0])

        # If the status does not change on update, no timestamp should be added
        resp = self.request(path, method='PUT', user=self.users[1], params={
            'status': JobStatus.QUEUED
        })
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json['timestamps']), 1)
        self.assertEqual(resp.json['timestamps'][0]['status'], JobStatus.QUEUED)

        # We passed notify=false, so we should only have the job creation notification
        resp = self.request(path='/notification/stream', method='GET',
                            user=self.users[1], isJson=False,
                            params={'timeout': 0})
        messages = self.getSseMessages(resp)
        self.assertEqual(len(messages), 1)

        # Update progress with notify=true (the default)
        resp = self.request(path, method='PUT', user=self.users[1], params={
            'progressCurrent': 50,
            'progressMessage': 'Something bad happened',
            'status': JobStatus.ERROR
        })
        self.assertStatusOk(resp)
        self.assertNotEqual(resp.json['progress']['notificationId'], None)

        # We should now see three notifications (job created + job status + progress)
        resp = self.request(path='/notification/stream', method='GET',
                            user=self.users[1], isJson=False,
                            params={'timeout': 0})
        messages = self.getSseMessages(resp)
        job = self.jobModel.load(job['_id'], force=True)
        self.assertEqual(len(messages), 3)
        creationNotify = messages[0]
        progressNotify = messages[1]
        statusNotify = messages[2]

        self.assertEqual(creationNotify['type'], 'job_created')
        self.assertEqual(creationNotify['data']['_id'], str(job['_id']))
        self.assertEqual(statusNotify['type'], 'job_status')
        self.assertEqual(statusNotify['data']['_id'], str(job['_id']))
        self.assertEqual(int(statusNotify['data']['status']), JobStatus.ERROR)
        self.assertNotIn('kwargs', statusNotify['data'])
        self.assertNotIn('log', statusNotify['data'])

        self.assertEqual(progressNotify['type'], 'progress')
        self.assertEqual(progressNotify['data']['title'], job['title'])
        self.assertEqual(progressNotify['data']['current'], float(50))
        self.assertEqual(progressNotify['data']['state'], 'error')
        self.assertEqual(progressNotify['_id'], str(job['progress']['notificationId']))

    def testDotsInKwargs(self):
        kwargs = {
            '$key.with.dots': 'value',
            'foo': [{
                'moar.dots': True
            }]
        }
        job = self.jobModel.createJob(title='dots', type='x', user=self.users[0], kwargs=kwargs)

        # Make sure we can update a job and notification creation works
        self.jobModel.updateJob(job, status=JobStatus.QUEUED, notify=True)

        self.assertEqual(job['kwargs'], kwargs)

        resp = self.request('/job/%s' % job['_id'], user=self.users[0])
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['kwargs'], kwargs)

        job = self.jobModel.load(job['_id'], force=True)
        self.assertEqual(job['kwargs'], kwargs)
        job = self.jobModel.filter(job, self.users[0])
        self.assertEqual(job['kwargs'], kwargs)
        job = self.jobModel.filter(job, self.users[1])
        self.assertFalse('kwargs' in job)

    def testLocalJob(self):
        job = self.jobModel.createLocalJob(
            title='local', type='local', user=self.users[0], kwargs={
                'hello': 'world'
            }, module='plugin_tests.local_job_impl')

        self.jobModel.scheduleJob(job)

        job = self.jobModel.load(job['_id'], force=True, includeLog=True)
        self.assertEqual(job['log'], ['job ran!'])

        job = self.jobModel.createLocalJob(
            title='local', type='local', user=self.users[0], kwargs={
                'hello': 'world'
            }, module='plugin_tests.local_job_impl', function='fail')

        self.jobModel.scheduleJob(job)

        job = self.jobModel.load(job['_id'], force=True, includeLog=True)
        self.assertEqual(job['log'], ['job failed'])

    def testValidateCustomStatus(self):
        job = self.jobModel.createJob(title='test', type='x', user=self.users[0])

        def validateStatus(event):
            if event.info == 1234:
                event.preventDefault().addResponse(True)

        def validTransitions(event):
            if event.info['status'] == 1234:
                event.preventDefault().addResponse([JobStatus.INACTIVE])

        with self.assertRaises(ValidationException):
            self.jobModel.updateJob(job, status=1234)  # Should fail

        with events.bound('jobs.status.validate', 'test', validateStatus), \
                events.bound('jobs.status.validTransitions', 'test', validTransitions):
            self.jobModel.updateJob(job, status=1234)  # Should work

            with self.assertRaises(ValidationException):
                self.jobModel.updateJob(job, status=4321)  # Should fail

    def testValidateCustomStrStatus(self):
        job = self.jobModel.createJob(title='test', type='x', user=self.users[0])

        def validateStatus(event):
            states = ['a', 'b', 'c']

            if event.info in states:
                event.preventDefault().addResponse(True)

        def validTransitions(event):
            if event.info['status'] == 'a':
                event.preventDefault().addResponse([JobStatus.INACTIVE])

        with self.assertRaises(ValidationException):
            self.jobModel.updateJob(job, status='a')

        with events.bound('jobs.status.validate', 'test', validateStatus), \
                events.bound('jobs.status.validTransitions', 'test', validTransitions):
            self.jobModel.updateJob(job, status='a')
            self.assertEqual(job['status'], 'a')

        with self.assertRaises(ValidationException), \
                events.bound('jobs.status.validate', 'test', validateStatus):
            self.jobModel.updateJob(job, status='foo')

    def testUpdateOtherFields(self):
        job = self.jobModel.createJob(title='test', type='x', user=self.users[0])
        job = self.jobModel.updateJob(job, otherFields={'other': 'fields'})
        self.assertEqual(job['other'], 'fields')

    def testCancelJob(self):
        job = self.jobModel.createJob(title='test', type='x', user=self.users[0])
        # add to the log
        job = self.jobModel.updateJob(job, log='entry 1\n')
        # Reload without the log
        job = self.jobModel.load(id=job['_id'], force=True)
        self.assertEqual(len(job.get('log', [])), 0)
        # Cancel
        job = self.jobModel.cancelJob(job)
        self.assertEqual(job['status'], JobStatus.CANCELED)
        # Reloading should still have the log and be canceled
        job = self.jobModel.load(id=job['_id'], force=True, includeLog=True)
        self.assertEqual(job['status'], JobStatus.CANCELED)
        self.assertEqual(len(job.get('log', [])), 1)

    def testCancelJobEndpoint(self):
        job = self.jobModel.createJob(title='test', type='x', user=self.users[0])

        # Ensure requires write perms
        jobCancelUrl = '/job/%s/cancel' % job['_id']
        resp = self.request(jobCancelUrl, user=self.users[1], method='PUT')
        self.assertStatus(resp, 403)

        # Try again with the right user
        jobCancelUrl = '/job/%s/cancel' % job['_id']
        resp = self.request(jobCancelUrl, user=self.users[0], method='PUT')
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['status'], JobStatus.CANCELED)

    def testJobsTypesAndStatuses(self):
        self.jobModel.createJob(title='user 0 job', type='t1', user=self.users[0], public=False)
        self.jobModel.createJob(title='user 1 job', type='t2', user=self.users[1], public=False)
        self.jobModel.createJob(title='user 1 job', type='t3', user=self.users[1], public=True)
        self.jobModel.createJob(title='user 2 job', type='t4', user=self.users[2])
        self.jobModel.createJob(title='anonymous job', type='t5')
        self.jobModel.createJob(title='anonymous public job', type='t6', public=True)

        # User 1, as non site admin, should encounter http 403 (Forbidden)
        resp = self.request('/job/typeandstatus/all', user=self.users[1])
        self.assertStatus(resp, 403)

        # Admin user gets all types and statuses
        resp = self.request('/job/typeandstatus/all', user=self.users[0])
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json['types']), 6)
        self.assertEqual(len(resp.json['statuses']), 1)

        # standard user gets types and statuses of its own jobs
        resp = self.request('/job/typeandstatus', user=self.users[1])
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json['types']), 2)
        self.assertEqual(len(resp.json['statuses']), 1)

    def testDefaultParentId(self):
        job = self.jobModel.createJob(title='Job', type='Job', user=self.users[0])
        # If not specified parentId should be None
        self.assertEquals(job['parentId'], None)

    def testIsParentIdCorrect(self):
        parentJob = self.jobModel.createJob(
            title='Parent Job', type='Parent Job', user=self.users[0])

        childJob = self.jobModel.createJob(
            title='Child Job', type='Child Job', user=self.users[0], parentJob=parentJob)
        # During initialization parent job should be set correctly
        self.assertEqual(childJob['parentId'], parentJob['_id'])

    def testSetParentCorrectly(self):
        parentJob = self.jobModel.createJob(
            title='Parent Job', type='Parent Job', user=self.users[0])
        childJob = self.jobModel.createJob(title='Child Job', type='Child Job', user=self.users[0])

        self.jobModel.setParentJob(childJob, parentJob)

        # After setParentJob method is called parent job should be set correctly
        self.assertEqual(childJob['parentId'], parentJob['_id'])

    def testParentCannotBeEqualToChild(self):
        childJob = self.jobModel.createJob(title='Child Job', type='Child Job', user=self.users[0])

        # Cannot set a job as it's own parent
        with self.assertRaises(ValidationException):
            self.jobModel.setParentJob(childJob, childJob)

    def testParentIdCannotBeOverridden(self):
        parentJob = self.jobModel.createJob(
            title='Parent Job', type='Parent Job', user=self.users[0])

        anotherParentJob = self.jobModel.createJob(
            title='Another Parent Job', type='Parent Job', user=self.users[0])

        childJob = self.jobModel.createJob(
            title='Child Job', type='Child Job', user=self.users[0], parentJob=parentJob)

        with self.assertRaises(ValidationException):
            # If parent job is set, cannot be overridden
            self.jobModel.setParentJob(childJob, anotherParentJob)

    def testListChildJobs(self):
        parentJob = self.jobModel.createJob(
            title='Parent Job', type='Parent Job', user=self.users[0])

        childJob = self.jobModel.createJob(
            title='Child Job', type='Child Job', user=self.users[0], parentJob=parentJob)

        self.jobModel.createJob(
            title='Another Child Job', type='Child Job', user=self.users[0], parentJob=parentJob)

        # Should return a list with 2 jobs
        self.assertEquals(len(list(self.jobModel.listChildJobs(parentJob))), 2)
        # Should return an empty list
        self.assertEquals(len(list(self.jobModel.listChildJobs(childJob))), 0)

    def testListChildJobsRest(self):
        parentJob = self.jobModel.createJob(
            title='Parent Job', type='Parent Job', user=self.users[0])

        childJob = self.jobModel.createJob(
            title='Child Job', type='Child Job', user=self.users[0], parentJob=parentJob)

        self.jobModel.createJob(
            title='Another Child Job', type='Child Job', user=self.users[0], parentJob=parentJob)

        resp = self.request('/job', user=self.users[0],
                            params={'parentId': str(parentJob['_id'])})
        resp2 = self.request('/job', user=self.users[0],
                             params={'parentId': str(childJob['_id'])})

        self.assertStatusOk(resp)
        self.assertStatusOk(resp2)

        # Should return a list with 2 jobs
        self.assertEquals(len(resp.json), 2)
        # Should return an empty list
        self.assertEquals(len(resp2.json), 0)

    def testCreateJobRest(self):
        resp = self.request('/job', method='POST',
                            user=self.users[0],
                            params={'title': 'job', 'type': 'job'})
        # If user does not have the necessary token status is 403
        self.assertStatus(resp, 403)

        token = Token().createToken(scope=REST_CREATE_JOB_TOKEN_SCOPE)

        resp2 = self.request(
            '/job', method='POST', token=token, params={'title': 'job', 'type': 'job'})
        # If user has the necessary token status is 200
        self.assertStatusOk(resp2)

    def testJobStateTransitions(self):
        job = self.jobModel.createJob(
            title='user 0 job', type='t1', user=self.users[0], public=False)

        # We can't move straight to SUCCESS
        with self.assertRaises(ValidationException):
            job = self.jobModel.updateJob(job, status=JobStatus.SUCCESS)

        self.jobModel.updateJob(job, status=JobStatus.QUEUED)
        self.jobModel.updateJob(job, status=JobStatus.RUNNING)
        self.jobModel.updateJob(job, status=JobStatus.ERROR)

        # We shouldn't be able to move backwards
        with self.assertRaises(ValidationException):
            self.jobModel.updateJob(job, status=JobStatus.QUEUED)
        with self.assertRaises(ValidationException):
            self.jobModel.updateJob(job, status=JobStatus.RUNNING)
        with self.assertRaises(ValidationException):
            self.jobModel.updateJob(job, status=JobStatus.INACTIVE)

    def testJobSaveEventModification(self):
        def customSave(event):
            kwargs = json_util.loads(event.info['kwargs'])
            kwargs['key2'] = 'newvalue'
            event.info['kwargs'] = json_util.dumps(kwargs)

        job = self.jobModel.createJob(title='A job', type='t', user=self.users[1], public=True)

        job['kwargs'] = {'key1': 'value1', 'key2': 'value2'}
        with events.bound('model.job.save', 'test', customSave):
            job = self.jobModel.save(job)
            self.assertEqual(job['kwargs']['key2'], 'newvalue')