Example #1
0
def _addReturnParameterFileBinding(bspec, hargs, user, token, job):

    curName = _return_parameter_file_name

    # check if return parameter file was requested in the REST request
    if (curName + _girderOutputFolderSuffix not in hargs['params']
            or curName + _girderOutputNameSuffix not in hargs['params']):
        return

    curModel = ModelImporter.model('folder')
    curId = hargs['params'][curName + _girderOutputFolderSuffix]

    hargs[curName] = curModel.load(id=curId, level=AccessType.WRITE, user=user)

    curBindingSpec = wutils.girderOutputSpec(
        hargs[curName],
        token,
        name=hargs['params'][curName + _girderOutputNameSuffix],
        dataType='string',
        dataFormat='string',
        reference=json.dumps({
            'type': 'slicer_cli.parameteroutput',
            'jobId': str(job['_id'])
        }))

    bspec[curName] = curBindingSpec
Example #2
0
    def testWorker(self):
        # Test the settings
        resp = self.request('/system/setting', method='PUT', params={
            'list': json.dumps([{
                'key': worker.PluginSettings.BROKER,
                'value': 'amqp://[email protected]'
            }, {
                'key': worker.PluginSettings.BACKEND,
                'value': 'amqp://[email protected]'
            }])
        }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={})

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            # Make sure we sent the job to celery
            self.assertEqual(len(celeryMock.mock_calls), 2)
            self.assertEqual(celeryMock.mock_calls[0][1], ('girder_worker',))
            self.assertEqual(celeryMock.mock_calls[0][2], {
                'broker': 'amqp://[email protected]',
                'backend': 'amqp://[email protected]'
            })

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls

            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], (
                'girder_worker.run', job['args'], job['kwargs']))

            self.assertTrue('headers' in sendTaskCalls[0][2])
            self.assertTrue('jobInfoSpec' in sendTaskCalls[0][2]['headers'])

            # Make sure we got and saved the celery task id
            job = jobModel.load(job['_id'], force=True)
            self.assertEqual(job['celeryTaskId'], 'fake_id')
            self.assertEqual(job['status'], JobStatus.QUEUED)
Example #3
0
File: rest.py Project: aegor/girder
    def _transformOutputs(self, outputs, token, job, task, taskId):
        """
        Validates and sanitizes the output bindings. If they are Girder outputs, adds
        the necessary token info. If the token does not allow DATA_WRITE, or if the user
        does not have write access to the destination, raises an AccessException.
        """
        transformed = {}
        for k, v in six.viewitems(outputs):
            if v['mode'] == 'girder':
                ensureTokenScopes(token, TokenScope.DATA_WRITE)
                ptype = v.get('parent_type', 'folder')
                if not self._validateOutputParentType(k, ptype, task['outputs']):
                    raise ValidationException('Invalid output parent type: %s.' % ptype)

                parent = self.model(ptype).load(
                    v['parent_id'], level=AccessType.WRITE, user=self.getCurrentUser(), exc=True)

                transformed[k] = utils.girderOutputSpec(
                    parent, parentType=ptype, token=token, name=v.get('name'), dataFormat='none',
                    reference=json.dumps({
                        'type': 'item_tasks.output',
                        'id': k,
                        'jobId': str(job['_id']),
                        'taskId': str(taskId)
                    }))
            else:
                raise ValidationException('Invalid output mode: %s.' % v['mode'])

        return transformed
Example #4
0
    def testWorkerCancel(self):
        jobModel = self.model('job', 'jobs')
        job = jobModel.createJob(title='title',
                                 type='foo',
                                 handler='worker_handler',
                                 user=self.admin,
                                 public=False,
                                 args=(),
                                 kwargs={})

        job['kwargs'] = {
            'jobInfo':
            utils.jobInfoSpec(job),
            'inputs':
            [utils.girderInputSpec(self.adminFolder, resourceType='folder')],
            'outputs':
            [utils.girderOutputSpec(self.adminFolder, token=self.adminToken)]
        }
        job = jobModel.save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock, \
                mock.patch('girder.plugins.worker.AsyncResult') as asyncResult:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)
            jobModel.cancelJob(job)

            asyncResult.assert_called_with('fake_id', app=mock.ANY)
            # Check we called revoke
            asyncResult.return_value.revoke.assert_called_once()
            job = jobModel.load(job['_id'], force=True)
            self.assertEqual(job['status'], CustomJobStatus.CANCELING)
Example #5
0
    def _transformOutputs(self, outputs, token, job, task, taskId):
        """
        Validates and sanitizes the output bindings. If they are Girder outputs, adds
        the necessary token info. If the token does not allow DATA_WRITE, or if the user
        does not have write access to the destination, raises an AccessException.
        """
        transformed = {}
        for k, v in six.viewitems(outputs):
            if v['mode'] == 'girder':
                ensureTokenScopes(token, TokenScope.DATA_WRITE)
                ptype = v.get('parent_type', 'folder')
                if not self._validateOutputParentType(k, ptype, task['outputs']):
                    raise ValidationException('Invalid output parent type: %s.' % ptype)

                parent = self.model(ptype).load(
                    v['parent_id'], level=AccessType.WRITE, user=self.getCurrentUser(), exc=True)

                transformed[k] = utils.girderOutputSpec(
                    parent, parentType=ptype, token=token, name=v.get('name'), dataFormat='none',
                    reference=json.dumps({
                        'type': 'item_tasks.output',
                        'id': k,
                        'jobId': str(job['_id']),
                        'taskId': str(taskId)
                    }))
            else:
                raise ValidationException('Invalid output mode: %s.' % v['mode'])

        return transformed
Example #6
0
    def testWorkerCancel(self):
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={})

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock, \
                mock.patch('girder.plugins.worker.AsyncResult') as asyncResult:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)
            jobModel.cancelJob(job)

            asyncResult.assert_called_with('fake_id', app=mock.ANY)
            # Check we called revoke
            asyncResult.return_value.revoke.assert_called_once()
            job = jobModel.load(job['_id'], force=True)
            self.assertEqual(job['status'], CustomJobStatus.CANCELING)
Example #7
0
    def testWorkerStatusEndpoint(self):
        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        job = Job().createJob(title='title',
                              type='foo',
                              handler='worker_handler',
                              user=self.admin,
                              public=False,
                              args=(),
                              kwargs={})

        job['kwargs'] = {
            'jobInfo':
            utils.jobInfoSpec(job),
            'inputs':
            [utils.girderInputSpec(self.adminFolder, resourceType='folder')],
            'outputs':
            [utils.girderOutputSpec(self.adminFolder, token=self.adminToken)]
        }
        job = Job().save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            Job().scheduleJob(job)

        # Call the worker status endpoint
        resp = self.request('/worker/status', method='GET', user=self.admin)
        self.assertStatusOk(resp)
        self.assertHasKeys(resp.json,
                           ['report', 'stats', 'ping', 'active', 'reserved'])
def _createOutputParamBindingSpec(param, hargs, user, token):

    curBindingSpec = wutils.girderOutputSpec(
        hargs[param.name],
        token,
        name=hargs['params'][param.name + _girderOutputNameSuffix],
        dataType='string', dataFormat='string'
    )

    if param.isExternalType() and param.reference is not None:

        if param.reference not in hargs:
            raise Exception(
                'Error: The specified reference attribute value'
                '%s for parameter %s is not a valid input' % (
                    param.reference, param.name)
            )

        curBindingSpec['reference'] = json.dumps(
            {
                'itemId': str(hargs[param.reference]['_id']),
                'userId': str(user['_id'])
            }
        )

    return curBindingSpec
def _createOutputParamBindingSpec(param, hargs, user, token):

    curBindingSpec = wutils.girderOutputSpec(
        hargs[param.identifier()],
        token,
        name=hargs['params'][param.identifier() + _girderOutputNameSuffix],
        dataType='string',
        dataFormat='string')

    if param.isExternalType() and param.reference is not None:

        if param.reference not in hargs:
            raise Exception('Error: The specified reference attribute value'
                            '%s for parameter %s is not a valid input' %
                            (param.reference, param.identifier()))

        curBindingSpec['reference'] = json.dumps({
            'itemId':
            str(hargs[param.reference]['_id']),
            'userId':
            str(user['_id']),
            'identifier':
            param.identifier()
        })

    return curBindingSpec
Example #10
0
    def testWorkerStatusEndpoint(self):
        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        job = Job().createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={})

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = Job().save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            Job().scheduleJob(job)

        # Call the worker status endpoint
        resp = self.request('/worker/status', method='GET', user=self.admin)
        self.assertStatusOk(resp)
        self.assertHasKeys(resp.json, ['report', 'stats', 'ping', 'active', 'reserved'])
Example #11
0
    def testWorker(self):
        # Test the settings
        resp = self.request('/system/setting', method='PUT', params={
            'list': json.dumps([{
                'key': worker.PluginSettings.BROKER,
                'value': 'amqp://[email protected]'
            }, {
                'key': worker.PluginSettings.BACKEND,
                'value': 'amqp://[email protected]'
            }])
        }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        jobModel = self.model('job', 'jobs')
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={})

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            # Make sure we sent the job to celery
            self.assertEqual(len(celeryMock.mock_calls), 2)
            self.assertEqual(celeryMock.mock_calls[0][1], ('girder_worker',))
            self.assertEqual(celeryMock.mock_calls[0][2], {
                'broker': 'amqp://[email protected]',
                'backend': 'amqp://[email protected]'
            })

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls

            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], (
                'girder_worker.run', job['args'], job['kwargs']))

            self.assertTrue('headers' in sendTaskCalls[0][2])
            self.assertTrue('jobInfoSpec' in sendTaskCalls[0][2]['headers'])

            # Make sure we got and saved the celery task id
            job = jobModel.load(job['_id'], force=True)
            self.assertEqual(job['celeryTaskId'], 'fake_id')
            self.assertEqual(job['status'], JobStatus.QUEUED)
Example #12
0
    def createBlurImage(self, item, params):
        user = self.getCurrentUser()
        token = self.getCurrentToken()
        jobTitle = 'ITK blur: ' + item['name']
        jobModel = self.model('job', 'jobs')
        folder = self.model('folder').load(item['folderId'], force=True)

        job = jobModel.createJob(
            title=jobTitle, type='itk_blur', handler='worker_handler',
            user=user)
        jobToken = jobModel.createJobToken(job)

        scriptFile = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                                  'scripts', 'cad_script.py')
        with open(scriptFile, 'r') as fh:
            script = fh.read()

        kwargs = {
            'task': {
                'name': jobTitle,
                'mode': 'python',
                'script': script,
                'inputs': [{
                    'id': 'inputFileName',
                    'type': 'string',
                    'format': 'text',
                    'target': 'filepath'
                }],
                'outputs': [{
                    'id': 'outputFileName',
                    'format': 'text',
                    'type': 'string',
                    'target': 'filepath'
                }]
            },
            'inputs': {
                'inputFileName': utils.girderInputSpec(
                    item, resourceType='item', token=token)
            },
            'outputs': {
                'outputFileName': utils.girderOutputSpec(
                    folder, token=token, parentType='folder')
            },
            'jobInfo': {
                'method': 'PUT',
                'url': '/'.join((getApiUrl(), 'job', str(job['_id']))),
                'headers': {'Girder-Token': jobToken['_id']},
                'logPrint': True
            },
            'validate': False,
            'auto_convert': False
        }
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)
        return job
Example #13
0
    def testWorkerDifferentTask(self):
        # Test the settings
        resp = self.request('/system/setting', method='PUT', params={
            'key': worker.PluginSettings.API_URL,
            'value': 'bad value'
        }, user=self.admin)
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json['message'], 'API URL must start with http:// or https://.')

        resp = self.request('/system/setting', method='PUT', params={
            'list': json.dumps([{
                'key': worker.PluginSettings.BROKER,
                'value': 'amqp://[email protected]'
            }, {
                'key': worker.PluginSettings.BACKEND,
                'value': 'amqp://[email protected]'
            }])
        }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={},
            otherFields={
                'celeryTaskName': 'some_other.task',
                'celeryQueue': 'my_other_q'
            })

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls
            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], (
                'some_other.task', job['args'], job['kwargs']))
            self.assertIn('queue', sendTaskCalls[0][2])
            self.assertEqual(sendTaskCalls[0][2]['queue'], 'my_other_q')
Example #14
0
    def testWorkerDifferentTask(self):
        # Test the settings
        resp = self.request('/system/setting', method='PUT', params={
            'key': worker.PluginSettings.API_URL,
            'value': 'bad value'
        }, user=self.admin)
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json['message'], 'API URL must start with http:// or https://.')

        resp = self.request('/system/setting', method='PUT', params={
            'list': json.dumps([{
                'key': worker.PluginSettings.BROKER,
                'value': 'amqp://[email protected]'
            }, {
                'key': worker.PluginSettings.BACKEND,
                'value': 'amqp://[email protected]'
            }])
        }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        jobModel = self.model('job', 'jobs')
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={},
            otherFields={
                'celeryTaskName': 'some_other.task',
                'celeryQueue': 'my_other_q'
            })

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls
            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], (
                'some_other.task', job['args'], job['kwargs']))
            self.assertIn('queue', sendTaskCalls[0][2])
            self.assertEqual(sendTaskCalls[0][2]['queue'], 'my_other_q')
Example #15
0
    def testWorker(self):
        # Test the settings
        resp = self.request(
            "/system/setting",
            method="PUT",
            params={
                "list": json.dumps(
                    [
                        {"key": worker.PluginSettings.BROKER, "value": "amqp://[email protected]"},
                        {"key": worker.PluginSettings.BACKEND, "value": "amqp://[email protected]"},
                    ]
                )
            },
            user=self.admin,
        )
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        jobModel = self.model("job", "jobs")
        job = jobModel.createJob(
            title="title", type="foo", handler="worker_handler", user=self.admin, public=False, args=(), kwargs={}
        )

        job["kwargs"] = {
            "jobInfo": utils.jobInfoSpec(job),
            "inputs": [utils.girderInputSpec(self.adminFolder, resourceType="folder")],
            "outputs": [utils.girderOutputSpec(self.adminFolder, token=self.adminToken)],
        }
        job = jobModel.save(job)
        self.assertEqual(job["status"], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch("celery.Celery") as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            # Make sure we sent the job to celery
            self.assertEqual(len(celeryMock.mock_calls), 2)
            self.assertEqual(celeryMock.mock_calls[0][1], ("girder_worker",))
            self.assertEqual(
                celeryMock.mock_calls[0][2],
                {"broker": "amqp://[email protected]", "backend": "amqp://[email protected]"},
            )

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls
            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], ("girder_worker.run", job["args"], job["kwargs"]))

            # Make sure we got and saved the celery task id
            job = jobModel.load(job["_id"], force=True)
            self.assertEqual(job["celeryTaskId"], "fake_id")
            self.assertEqual(job["status"], JobStatus.QUEUED)
Example #16
0
    def testOutputs(self, folder, item, params):
        token = self.getCurrentToken()

        jobModel = self.model('job', 'jobs')

        job = jobModel.createJob(title='docker output test: %s' %
                                 folder['name'],
                                 type='docker_test',
                                 handler='worker_handler',
                                 user=self.getCurrentUser())
        jobToken = jobModel.createJobToken(job)

        kwargs = {
            'task': {
                'mode':
                'docker',
                'docker_image':
                'testoutputs:latest',
                'pull_image':
                False,
                'inputs': [{
                    'id': 'input',
                    'target': 'filepath',
                    'format': 'text',
                    'type': 'string'
                }],
                'outputs': [{
                    'id': 'out.txt',
                    'target': 'filepath',
                    'format': 'text',
                    'type': 'string'
                }]
            },
            'inputs': {
                'input':
                utils.girderInputSpec(item, resourceType='item', token=token)
            },
            'outputs': {
                'out.txt': utils.girderOutputSpec(folder, token)
            },
            'jobInfo': utils.jobInfoSpec(job, jobToken)
        }
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)
        return job
Example #17
0
    def testOutputs(self, folder, item, params):
        token = self.getCurrentToken()

        jobModel = self.model('job', 'jobs')

        job = jobModel.createJob(
            title='docker output test: %s' % folder['name'], type='docker_test',
            handler='worker_handler', user=self.getCurrentUser())
        jobToken = jobModel.createJobToken(job)

        kwargs = {
            'task': {
                'mode': 'docker',
                'docker_image': 'testoutputs:latest',
                'pull_image': False,
                'inputs': [{
                    'id': 'input',
                    'target': 'filepath',
                    'format': 'text',
                    'type': 'string'
                }],
                'outputs': [{
                    'id': 'out.txt',
                    'target': 'filepath',
                    'format': 'text',
                    'type': 'string'
                }]
            },
            'inputs': {
                'input': utils.girderInputSpec(
                    item, resourceType='item', token=token)
            },
            'outputs': {
                'out.txt': utils.girderOutputSpec(
                    folder, token)
            },
            'jobInfo': utils.jobInfoSpec(job, jobToken)
        }
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)
        return job
Example #18
0
def _addReturnParameterFileBinding(bspec, hargs, user, token):

    curName = _return_parameter_file_name

    # check if return parameter file was requested in the REST request
    if (curName + _girderOutputFolderSuffix not in hargs['params']
            or curName + _girderOutputNameSuffix not in hargs['params']):
        return

    curModel = ModelImporter.model('folder')
    curId = hargs['params'][curName + _girderOutputFolderSuffix]

    hargs[curName] = curModel.load(id=curId, level=AccessType.WRITE, user=user)

    curBindingSpec = wutils.girderOutputSpec(
        hargs[curName],
        token,
        name=hargs['params'][curName + _girderOutputNameSuffix],
        dataType='string',
        dataFormat='string')

    bspec[curName] = curBindingSpec
def _addReturnParameterFileBinding(bspec, hargs, user, token):

    curName = _return_parameter_file_name

    # check if return parameter file was requested in the REST request
    if (curName + _girderOutputFolderSuffix not in hargs['params'] or
            curName + _girderOutputNameSuffix not in hargs['params']):
        return

    curModel = ModelImporter.model('folder')
    curId = hargs['params'][curName + _girderOutputFolderSuffix]

    hargs[curName] = curModel.load(id=curId,
                                   level=AccessType.WRITE,
                                   user=user)

    curBindingSpec = wutils.girderOutputSpec(
        hargs[curName],
        token,
        name=hargs['params'][curName + _girderOutputNameSuffix],
        dataType='string', dataFormat='string'
    )

    bspec[curName] = curBindingSpec
Example #20
0
    def _generateSuperpixels(self, image):
        SUPERPIXEL_VERSION = 3.0

        user = User().load(image['creatorId'], force=True, exc=True)
        # Use admin user, to ensure that worker always has access
        token = Token().createToken(
            user=getAdminUser(),
            days=1,
            scope=[TokenScope.DATA_READ, TokenScope.DATA_WRITE])

        with open(os.path.join(
                os.path.dirname(__file__),
                '_generate_superpixels.py'), 'r') as scriptStream:
            script = scriptStream.read()

        title = 'superpixels v%s generation: %s' % (
            SUPERPIXEL_VERSION, image['name'])
        job = Job().createJob(
            title=title,
            type='isic_archive_superpixels',
            handler='worker_handler',
            kwargs={
                'jobInfo': None,  # will be filled after job is created
                'task': {
                    'mode': 'python',
                    'script': script,
                    'name': title,
                    'inputs': [{
                        'id': 'originalFile',
                        'type': 'string',
                        'format': 'text',
                        'target': 'filepath'
                    }, {
                        'id': 'segmentation_helpersPath',
                        'type': 'string',
                        'format': 'text',
                    }],
                    'outputs': [{
                        'id': 'superpixelsEncodedBytes',
                        'type': 'string',
                        'format': 'text',
                        'target': 'memory'
                    }]
                },
                'inputs': {
                    'originalFile': workerUtils.girderInputSpec(
                        resource=self.originalFile(image),
                        resourceType='file',
                        token=token),
                    'segmentation_helpersPath': {
                        'mode': 'inline',
                        'format': 'text',
                        'data': segmentation_helpers.__path__[0]
                    }
                },
                'outputs': {
                    'superpixelsEncodedBytes': workerUtils.girderOutputSpec(
                        parent=image,
                        token=token,
                        parentType='item',
                        name='%s_superpixels_v%s.png' %
                             (image['name'], SUPERPIXEL_VERSION),
                        reference=''
                    )
                },
                'auto_convert': False,
                'validate': False
            },
            user=user,
            public=False,
            save=True  # must save to create an _id for workerUtils.jobInfoSpec
        )
        job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(
            job,
            Job().createJobToken(job),
            logPrint=True
        )
        job['meta'] = {
            'creator': 'isic_archive',
            'task': 'generateSuperpixels',
            'imageId': image['_id'],
            'imageName': image['name'],
            'superpixelsVersion': SUPERPIXEL_VERSION
        }
        job = Job().save(job)

        Job().scheduleJob(job)
        return job
Example #21
0
    def runTaskSpec(self, key, params, **kwargs):
        """Create a job from the given task spec."""
        task_spec = get_task_spec(key)
        if task_spec is None:
            raise RestException('No task named %s.' % key)

        # validate input bindings
        for input_spec in task_spec['inputs']:
            input_name = input_spec['name']
            input_key = 'INPUT({})'.format(input_name)

            try:
                payload = params[input_key]
            except KeyError:
                # Check to see if the input spec provides a default.
                # If not, raise an exception.
                if 'default' not in input_spec:
                    raise RestException(
                        'No binding provided for input "{}".'.format(
                            input_name))

            if RE_ARG_SPEC.match(payload) is None:
                raise RestException(
                    'invalid payload for input "{}": "{}"'.format(
                        input_name, payload))

        # validate output bindings
        for output_spec in task_spec['outputs']:
            output_name = output_spec['name']
            output_key = 'OUTPUT({})'.format(output_name)

            try:
                payload = params[output_key]
            except KeyError:
                continue

            if RE_ARG_SPEC.match(payload) is None:
                raise RestException(
                    'invalid payload for output "{}": "{}"'.format(
                        output_name, payload))

        #
        # validation complete
        #

        job_title = params.get('title', 'sumo {}'.format(task_spec['name']))

        user, token = self.getCurrentUser(True)

        job = self.model('job', 'jobs').createJob(title=job_title,
                                                  type='sumo',
                                                  user=user,
                                                  handler='worker_handler')

        scratchDirectory = self._ensureJobDirectory(user, job['_id'])

        jobToken = self.model('job', 'jobs').createJobToken(job)

        job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(job=job,
                                                           token=jobToken,
                                                           logPrint=True)

        if not token:
            # It seems like we should be able to use a token without USER_AUTH
            # in its scope, but I'm not sure how.
            token = self.model('token').createToken(user,
                                                    days=1,
                                                    scope=TokenScope.USER_AUTH)

        jobpayload = (self.model('jobpayload', 'osumo').createJobpayload(
            job['_id'], user['_id']))

        job_inputs = {}
        for input_spec in task_spec['inputs']:
            input_name = input_spec['name']
            input_key = 'INPUT({})'.format(input_name)

            payload = params.get(input_key)
            if payload is None:
                continue

            job_input = {}

            m = RE_ARG_SPEC.match(payload)
            pos_args, extra_args = m.group(1), m.group(3)
            pos_args = pos_args.split(':')
            if extra_args:
                extra_args = json.loads('{{{}}}'.format(extra_args))
            else:
                extra_args = {}

            input_type = pos_args[0]

            if input_type in ('FILE', 'ITEM'):
                resource_id = pos_args[1]
                resource_type = input_type.lower()
                data_type = extra_args.get('type',
                                           input_spec.get('type', 'string'))
                data_format = extra_args.get('format',
                                             input_spec.get('format', 'text'))

                job_input.update(
                    workerUtils.girderInputSpec(self._getResource(
                        resource_type, resource_id, user),
                                                resourceType=resource_type,
                                                token=token,
                                                dataType=data_type,
                                                dataFormat=data_format))

            elif input_type == 'HTTP':
                # TODO(opadron): maybe we'll want to implement this, someday?
                raise NotImplementedError('HTTP input not implemented')

            elif input_type == 'INTEGER':
                value = pos_args[1]
                job_input['type'] = 'number'
                job_input['format'] = 'number'
                job_input['mode'] = 'inline'
                job_input['data'] = int(value)

            elif input_type == 'FLOAT':
                value = pos_args[1]
                job_input['type'] = 'number'
                job_input['format'] = 'number'
                job_input['mode'] = 'inline'
                job_input['data'] = float(value)

            elif input_type == 'STRING':
                value = ':'.join(pos_args[1:])
                job_input['type'] = 'string'
                job_input['format'] = 'text'
                job_input['mode'] = 'inline'
                job_input['data'] = value

            elif input_type == 'BOOLEAN':
                value = pos_args[1]
                job_input['type'] = 'boolean'
                job_input['format'] = 'json'
                job_input['mode'] = 'inline'
                job_input['data'] = 'true' if int(value) else 'false'

            else:
                raise NotImplementedError(
                    'Input type "{}" not supported'.format(input_type))

            job_input.update(extra_args)
            job_inputs[input_name] = job_input

        job_outputs = {}
        for output_spec in task_spec['outputs']:
            output_name = output_spec['name']
            output_key = 'OUTPUT({})'.format(output_name)

            payload = params.get(output_key)
            if payload is None:
                continue

            job_output = {}

            m = RE_ARG_SPEC.match(payload)
            pos_args, extra_args = m.group(1), m.group(3)
            pos_args = pos_args.split(':')
            if extra_args:
                extra_args = json.loads('{{{}}}'.format(extra_args))
            else:
                extra_args = {}

            output_type = pos_args[0]

            if output_type in ('FILE', 'ITEM'):
                parent_id, resource_name = (pos_args + [None])[1:3]
                parent_type = ('folder' if output_type == 'FILE' else 'file')
                data_type = extra_args.get('type',
                                           output_spec.get('type', 'string'))
                data_format = extra_args.get('format',
                                             output_spec.get('format', 'text'))

                job_output.update(
                    workerUtils.girderOutputSpec(scratchDirectory,
                                                 parentType='folder',
                                                 token=token,
                                                 name=resource_name,
                                                 dataType=data_type,
                                                 dataFormat=data_format))

            elif output_type in ('INTEGER', 'FLOAT', 'STRING', 'BOOLEAN',
                                 'JSON'):
                parse_result = urllib.parse.urlparse(
                    getConfig()['database']['uri'])

                job_output['mode'] = 'sumo'
                job_output['db'] = parse_result.path[1:]
                job_output['collection'] = 'jobpayload'
                job_output['host'] = parse_result.netloc
                job_output['id'] = jobpayload['_id']
                job_output['key'] = output_name

                if output_type == 'INTEGER':
                    job_output['type'] = 'number'
                    job_output['format'] = 'number'
                    job_output['converter'] = 'int'

                elif output_type == 'FLOAT':
                    job_output['type'] = 'number'
                    job_output['format'] = 'number'
                    job_output['converter'] = 'float'

                elif output_type == 'STRING':
                    job_output['type'] = 'string'
                    job_output['format'] = 'text'

                elif output_type == 'BOOLEAN':
                    job_output['type'] = 'boolean'
                    job_output['format'] = 'boolean'
                    job_output['converter'] = 'bool'

                elif output_type == 'JSON':
                    job_output['type'] = 'string'
                    job_output['format'] = 'text'
                    job_output['converter'] = 'json'

            else:
                raise NotImplementedError(
                    'Output type "{}" not supported'.format(output_type))

            job_output.update(extra_args)
            job_outputs[output_name] = job_output

        job['kwargs'].update(task=task_spec,
                             inputs=job_inputs,
                             outputs=job_outputs)

        job = self.model('job', 'jobs').save(job)
        self.model('jobuser', 'osumo').createJobuser(job['_id'], user['_id'])
        self.model('job', 'jobs').scheduleJob(job)

        return {
            'job': self.model('job', 'jobs').filter(job, user),
            'folder': self.model('folder').filter(scratchDirectory, user),
            'token': str(token['_id'])
        }
Example #22
0
    def processTask(self, params, **kwargs):
        self.requireParams(('taskkey', ), params)
        if getattr(job_specs, params['taskkey'], None) is None:
            raise RestException('No task named %s.' % params['taskkey'])
        task = copy.deepcopy(getattr(job_specs, params['taskkey']))
        data = {}
        data.update({input['key']: input for input in task['inputs']})
        data.update({input['key']: input for input in task['parameters']})
        # Any input that doesn't have a default is required.
        self.requireParams((key for key in data
                            if 'default' not in data[key]), params)
        user, token = self._getTaskUser(task)
        self._adjustDataTypes(data, params, user)

        job = self.model('job', 'jobs').createJob(
            title='sumo %s' % task.get('name', 'task'),
            type='sumo',
            user=user,
            handler='worker_handler')

        jobToken = self.model('job', 'jobs').createJobToken(job)

        job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(
            job=job,
            token=jobToken,
            logPrint=True)

        if not token:
            # It seems like we should be able to use a token without USER_AUTH
            # in its scope, but I'm not sure how.
            token = self.model('token').createToken(
                user, days=1, scope=TokenScope.USER_AUTH)

        inputs = {}
        for key in data:
            if data[key].get('input') is False:
                continue
            spec = data.get(key, {}).copy()
            if data[key].get('type') in ('file', 'item', 'folder'):
                spec = workerUtils.girderInputSpec(
                    spec['data'], resourceType=data[key]['type'],
                    token=token,
                    dataType=data[key].get('dataType', 'string'),
                    dataFormat=data[key].get('dataFormat', 'text'),
                    )
            inputs[key] = spec

        # TODO(opadron): make a special-purpose token just for this job in case
        # the user logs out before it finishes.
        outputs = {}
        for output in task.get('outputs', {}):
            key = output['key']
            spec = {'token': token}
            for subkey in output:
                if (subkey in inspect.getargspec(
                        workerUtils.girderOutputSpec).args):
                    value = output[subkey]
                    if value.startswith('parameter:'):
                        valuekey = value.split(':', 1)[1]
                        value = data.get(valuekey, {}).get('data')
                    spec[subkey] = value
            outputs[key] = workerUtils.girderOutputSpec(**spec)

        job['kwargs'].update(task=task['task'], inputs=inputs, outputs=outputs)

        job = self.model('job', 'jobs').save(job)
        self.model('job', 'jobs').scheduleJob(job)
        self.jobInfo[str(job['_id'])] = {'user': user}

        return {
            'job': self.model('job', 'jobs').filter(job, user),
            'token': str(token['_id'])
        }
    def _createLargeImageJob(self, item, fileObj, user, token):
        path = os.path.join(os.path.dirname(__file__), '..', 'create_tiff.py')
        with open(path, 'r') as f:
            script = f.read()

        title = 'TIFF conversion: %s' % fileObj['name']
        Job = self.model('job', 'jobs')
        job = Job.createJob(
            title=title, type='large_image_tiff', handler='worker_handler',
            user=user)
        jobToken = Job.createJobToken(job)

        outputName = os.path.splitext(fileObj['name'])[0] + '.tiff'
        if outputName == fileObj['name']:
            outputName = (os.path.splitext(fileObj['name'])[0] + '.' +
                          time.strftime('%Y%m%d-%H%M%S') + '.tiff')

        task = {
            'mode': 'python',
            'script': script,
            'name': title,
            'inputs': [{
                'id': 'in_path',
                'target': 'filepath',
                'type': 'string',
                'format': 'text'
            }, {
                'id': 'out_filename',
                'type': 'string',
                'format': 'text'
            }, {
                'id': 'tile_size',
                'type': 'number',
                'format': 'number'
            }, {
                'id': 'quality',
                'type': 'number',
                'format': 'number'
            }],
            'outputs': [{
                'id': 'out_path',
                'target': 'filepath',
                'type': 'string',
                'format': 'text'
            }]
        }

        inputs = {
            'in_path': workerUtils.girderInputSpec(
                fileObj, resourceType='file', token=token),
            'quality': {
                'mode': 'inline',
                'type': 'number',
                'format': 'number',
                'data': 90
            },
            'tile_size': {
                'mode': 'inline',
                'type': 'number',
                'format': 'number',
                'data': 256
            },
            'out_filename': {
                'mode': 'inline',
                'type': 'string',
                'format': 'text',
                'data': outputName
            }
        }

        outputs = {
            'out_path': workerUtils.girderOutputSpec(
                parent=item, token=token, parentType='item')
        }

        # TODO: Give the job an owner
        job['kwargs'] = {
            'task': task,
            'inputs': inputs,
            'outputs': outputs,
            'jobInfo': workerUtils.jobInfoSpec(job, jobToken),
            'auto_convert': False,
            'validate': False
        }
        job['meta'] = {
            'creator': 'large_image',
            'itemId': str(item['_id']),
            'task': 'createImageItem',
        }

        job = Job.save(job)
        Job.scheduleJob(job)

        return job
Example #24
0
    def processVideo(self, id, params):
        force = params['force']
        user, userToken = getCurrentUser(True)

        itemModel = self.model('item')
        fileModel = self.model('file')
        tokenModel = self.model('token')
        jobModel = self.model('job', 'jobs')

        item = itemModel.load(id, user=user, level=AccessType.READ)

        itemVideoData = item.get('video', {})
        jobId = itemVideoData.get('jobId')

        itemAlreadyProcessed = False
        job = None
        if jobId is not None:
            job = jobModel.load(jobId, level=AccessType.READ, user=user)

        if not force:
            if job is not None:
                status = job['status']
                if status not in (None, JobStatus.ERROR, JobStatus.CANCELED):
                    itemAlreadyProcessed = True

            if itemAlreadyProcessed:
                result = {
                    'video': {
                        'jobCreated': False,
                        'message': 'Processing job already created.'
                    }
                }

                result.update(job)
                return result

        # if user provided fileId, use that one
        fileId = params.get('fileId')
        if fileId is not None:
            # ensure the provided fileId is valid
            inputFile = fileModel.findOne({
                'itemId': ObjectId(id),
                '_id': ObjectId(fileId)
            })

            if inputFile is None:
                raise RestException(
                    'Item with id=%s has no such file with id=%s' %
                    (id, fileId))

        else:
            # User did not provide a fileId.
            #
            # If we're *re*running a processing job (force=True), look
            # for the fileId used by the old job.
            if force and job:
                fileId = job.get('meta', {}).get('video', {}).get('fileId')
                if fileId:
                    # ensure the provided fileId is valid, but in this case,
                    # don't raise an exception if it is not -- just discard the
                    # fileId and move on
                    inputFile = fileModel.findOne({
                        'itemId': ObjectId(id),
                        '_id': ObjectId(fileId)
                    })

                    if inputFile is None:
                        fileId = None

        # if we *still* don't have a fileId, just grab the first one found under
        # the given item.
        if fileId is None:
            inputFile = fileModel.findOne({'itemId': ObjectId(id)})

            # if there *are* no files, bail
            if inputFile is None:
                raise RestException('item %s has no files' % itemId)

            fileId = inputFile['_id']

        # if we are *re*running a processing job (force=True), remove all files
        # from this item that were created by the last processing job...
        #
        # ...unless (for some reason) the user is running the job against that
        # particular file (this is almost certainly user error, but for now,
        # we'll just keep the file around).
        if force:
            fileIdList = itemVideoData.get('createdFiles', [])
            for f in fileIdList:
                if f == fileId:
                    continue

                theFile = fileModel.load(f, level=AccessType.WRITE, user=user)

                if theFile:
                    fileModel.remove(theFile)
            itemVideoData['createdFiles'] = []

        # begin construction of the actual job
        if not userToken:
            # It seems like we should be able to use a token without USER_AUTH
            # in its scope, but I'm not sure how.
            userToken = tokenModel.createToken(user,
                                               days=1,
                                               scope=TokenScope.USER_AUTH)

        jobTitle = 'Video Processing'
        job = jobModel.createJob(title=jobTitle,
                                 type='video',
                                 user=user,
                                 handler='worker_handler')
        jobToken = jobModel.createJobToken(job)

        job['kwargs'] = job.get('kwargs', {})
        job['kwargs']['task'] = {
            'mode':
            'docker',

            # TODO(opadron): replace this once we have a maintained
            #                image on dockerhub
            'docker_image':
            'ffmpeg_local',
            'progress_pipe':
            True,
            'a':
            'b',
            'pull_image':
            False,
            'inputs': [{
                'id': 'input',
                'type': 'string',
                'format': 'text',
                'target': 'filepath'
            }],
            'outputs': [
                {
                    'id': '_stdout',
                    'type': 'string',
                    'format': 'text',
                    'target': 'memory'
                },
                {
                    'id': '_stderr',
                    'type': 'string',
                    'format': 'text',
                    'target': 'memory'
                },
                {
                    'id': 'source',
                    'type:': 'string',
                    'format': 'text',
                    'target': 'filepath',
                    'path': '/mnt/girder_worker/data/source.webm'
                },
                {
                    'id': 'meta',
                    'type:': 'string',
                    'format': 'text',
                    'target': 'filepath',
                    'path': '/mnt/girder_worker/data/meta.json'
                },
            ]
        }

        _, itemExt = os.path.splitext(item['name'])

        job['kwargs']['inputs'] = {
            'input':
            workerUtils.girderInputSpec(inputFile,
                                        resourceType='file',
                                        token=userToken,
                                        name='input' + itemExt,
                                        dataType='string',
                                        dataFormat='text')
        }

        job['kwargs']['outputs'] = {
            '_stdout':
            workerUtils.girderOutputSpec(item,
                                         parentType='item',
                                         token=userToken,
                                         name='processing_stdout.txt',
                                         dataType='string',
                                         dataFormat='text',
                                         reference='videoPlugin'),
            '_stderr':
            workerUtils.girderOutputSpec(item,
                                         parentType='item',
                                         token=userToken,
                                         name='processing_stderr.txt',
                                         dataType='string',
                                         dataFormat='text',
                                         reference='videoPlugin'),
            'source':
            workerUtils.girderOutputSpec(item,
                                         parentType='item',
                                         token=userToken,
                                         name='source.webm',
                                         dataType='string',
                                         dataFormat='text',
                                         reference='videoPlugin'),
            'meta':
            workerUtils.girderOutputSpec(item,
                                         parentType='item',
                                         token=userToken,
                                         name='meta.json',
                                         dataType='string',
                                         dataFormat='text',
                                         reference='videoPluginMeta'),
        }

        job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(job=job,
                                                           token=jobToken,
                                                           logPrint=True)

        job['meta'] = job.get('meta', {})
        job['meta']['video_plugin'] = {'itemId': id, 'fileId': fileId}

        job = jobModel.save(job)
        jobModel.scheduleJob(job)

        itemVideoData['jobId'] = str(job['_id'])
        item['video'] = itemVideoData
        itemModel.save(item)

        result = {
            'video': {
                'jobCreated': True,
                'message': 'Processing job created.'
            }
        }

        result.update(job)
        return result
Example #25
0
    def _createLargeImageJob(self, item, fileObj, user, token):
        path = os.path.join(os.path.dirname(__file__), '..', 'create_tiff.py')
        with open(path, 'r') as f:
            script = f.read()

        title = 'TIFF conversion: %s' % fileObj['name']
        Job = self.model('job', 'jobs')
        job = Job.createJob(title=title,
                            type='large_image_tiff',
                            handler='worker_handler',
                            user=user)
        jobToken = Job.createJobToken(job)

        task = {
            'mode':
            'python',
            'script':
            script,
            'name':
            title,
            'inputs': [{
                'id': 'in_path',
                'target': 'filepath',
                'type': 'string',
                'format': 'text'
            }, {
                'id': 'out_filename',
                'type': 'string',
                'format': 'text'
            }, {
                'id': 'tile_size',
                'type': 'number',
                'format': 'number'
            }, {
                'id': 'quality',
                'type': 'number',
                'format': 'number'
            }],
            'outputs': [{
                'id': 'out_path',
                'target': 'filepath',
                'type': 'string',
                'format': 'text'
            }]
        }

        inputs = {
            'in_path':
            workerUtils.girderInputSpec(item, resourceType='item',
                                        token=token),
            'quality': {
                'mode': 'inline',
                'type': 'number',
                'format': 'number',
                'data': 90
            },
            'tile_size': {
                'mode': 'inline',
                'type': 'number',
                'format': 'number',
                'data': 256
            },
            'out_filename': {
                'mode': 'inline',
                'type': 'string',
                'format': 'text',
                'data': os.path.splitext(fileObj['name'])[0] + '.tiff'
            }
        }

        outputs = {
            'out_path':
            workerUtils.girderOutputSpec(parent=item,
                                         token=token,
                                         parentType='item')
        }

        # TODO: Give the job an owner
        job['kwargs'] = {
            'task': task,
            'inputs': inputs,
            'outputs': outputs,
            'jobInfo': workerUtils.jobInfoSpec(job, jobToken),
            'auto_convert': False,
            'validate': False
        }

        job = Job.save(job)
        Job.scheduleJob(job)

        return job
Example #26
0
    def _generateSuperpixels(self, image):
        Job = self.model('job', 'jobs')
        Token = self.model('token')
        User = self.model('user', 'isic_archive')

        SUPERPIXEL_VERSION = 3.0

        user = User.load(image['creatorId'], force=True, exc=True)
        # Use admin user, to ensure that worker always has access
        token = Token.createToken(
            user=getAdminUser(),
            days=1,
            scope=[TokenScope.DATA_READ, TokenScope.DATA_WRITE])

        with open(os.path.join(
                os.path.dirname(__file__),
                '_generate_superpixels.py'), 'r') as scriptStream:
            script = scriptStream.read()

        title = 'superpixels v%s generation: %s' % (
            SUPERPIXEL_VERSION, image['name'])
        job = Job.createJob(
            title=title,
            type='isic_archive_superpixels',
            handler='worker_handler',
            kwargs={
                'jobInfo': None,  # will be filled after job is created
                'task': {
                    'mode': 'python',
                    'script': script,
                    'name': title,
                    'inputs': [{
                        'id': 'originalFile',
                        'type': 'string',
                        'format': 'text',
                        'target': 'filepath'
                    }, {
                        'id': 'segmentation_helpersPath',
                        'type': 'string',
                        'format': 'text',
                    }],
                    'outputs': [{
                        'id': 'superpixelsEncodedBytes',
                        'type': 'string',
                        'format': 'text',
                        'target': 'memory'
                    }]
                },
                'inputs': {
                    'originalFile': workerUtils.girderInputSpec(
                        resource=self.originalFile(image),
                        resourceType='file',
                        token=token),
                    'segmentation_helpersPath': {
                        'mode': 'inline',
                        'format': 'text',
                        'data': segmentation_helpers.__path__[0]
                    }
                },
                'outputs': {
                    'superpixelsEncodedBytes': workerUtils.girderOutputSpec(
                        parent=image,
                        token=token,
                        parentType='item',
                        name='%s_superpixels_v%s.png' %
                             (image['name'], SUPERPIXEL_VERSION),
                        reference=''
                    )
                },
                'auto_convert': False,
                'validate': False
            },
            user=user,
            public=False,
            save=True  # must save to create an _id for workerUtils.jobInfoSpec
        )
        job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(
            job,
            Job.createJobToken(job),
            logPrint=True
        )
        job['meta'] = {
            'creator': 'isic_archive',
            'task': 'generateSuperpixels',
            'imageId': image['_id'],
            'imageName': image['name'],
            'superpixelsVersion': SUPERPIXEL_VERSION
        }
        job = Job.save(job)

        Job.scheduleJob(job)
        return job