Exemplo n.º 1
0
    def test_traditional_task_cancel_in_queue(self, params):
        # Fill up queue
        blockers = []
        for _ in range(0, multiprocessing.cpu_count()):
            blockers.append(cancelable.delay(sleep_interval=0.1))

        jobModel = self.model('job', 'jobs')
        job = jobModel.createJob(title='test_traditional_task_cancel',
                                 type='worker',
                                 handler='worker_handler',
                                 user=self.getCurrentUser(),
                                 public=False,
                                 args=(self.girder_worker_run_cancelable, ),
                                 kwargs={
                                     'inputs': {},
                                     'outputs': {}
                                 })

        job['kwargs']['jobInfo'] = utils.jobInfoSpec(job)

        jobModel.save(job)
        jobModel.scheduleJob(job)
        jobModel.cancelJob(job)

        # Now clean up the blockers
        for blocker in blockers:
            blocker.revoke()

        return job
Exemplo n.º 2
0
    def testWorkerCancel(self):
        jobModel = self.model('job', 'jobs')
        job = jobModel.createJob(title='title',
                                 type='foo',
                                 handler='worker_handler',
                                 user=self.admin,
                                 public=False,
                                 args=(),
                                 kwargs={})

        job['kwargs'] = {
            'jobInfo':
            utils.jobInfoSpec(job),
            'inputs':
            [utils.girderInputSpec(self.adminFolder, resourceType='folder')],
            'outputs':
            [utils.girderOutputSpec(self.adminFolder, token=self.adminToken)]
        }
        job = jobModel.save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock, \
                mock.patch('girder.plugins.worker.AsyncResult') as asyncResult:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)
            jobModel.cancelJob(job)

            asyncResult.assert_called_with('fake_id', app=mock.ANY)
            # Check we called revoke
            asyncResult.return_value.revoke.assert_called_once()
            job = jobModel.load(job['_id'], force=True)
            self.assertEqual(job['status'], CustomJobStatus.CANCELING)
Exemplo n.º 3
0
    def testWorker(self):
        # Test the settings
        resp = self.request('/system/setting', method='PUT', params={
            'list': json.dumps([{
                'key': worker.PluginSettings.BROKER,
                'value': 'amqp://[email protected]'
            }, {
                'key': worker.PluginSettings.BACKEND,
                'value': 'amqp://[email protected]'
            }])
        }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={})

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            # Make sure we sent the job to celery
            self.assertEqual(len(celeryMock.mock_calls), 2)
            self.assertEqual(celeryMock.mock_calls[0][1], ('girder_worker',))
            self.assertEqual(celeryMock.mock_calls[0][2], {
                'broker': 'amqp://[email protected]',
                'backend': 'amqp://[email protected]'
            })

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls

            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], (
                'girder_worker.run', job['args'], job['kwargs']))

            self.assertTrue('headers' in sendTaskCalls[0][2])
            self.assertTrue('jobInfoSpec' in sendTaskCalls[0][2]['headers'])

            # Make sure we got and saved the celery task id
            job = jobModel.load(job['_id'], force=True)
            self.assertEqual(job['celeryTaskId'], 'fake_id')
            self.assertEqual(job['status'], JobStatus.QUEUED)
Exemplo n.º 4
0
    def testWorkerCancel(self):
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={})

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock, \
                mock.patch('girder.plugins.worker.AsyncResult') as asyncResult:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)
            jobModel.cancelJob(job)

            asyncResult.assert_called_with('fake_id', app=mock.ANY)
            # Check we called revoke
            asyncResult.return_value.revoke.assert_called_once()
            job = jobModel.load(job['_id'], force=True)
            self.assertEqual(job['status'], CustomJobStatus.CANCELING)
Exemplo n.º 5
0
    def testWorkerStatusEndpoint(self):
        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        job = Job().createJob(title='title',
                              type='foo',
                              handler='worker_handler',
                              user=self.admin,
                              public=False,
                              args=(),
                              kwargs={})

        job['kwargs'] = {
            'jobInfo':
            utils.jobInfoSpec(job),
            'inputs':
            [utils.girderInputSpec(self.adminFolder, resourceType='folder')],
            'outputs':
            [utils.girderOutputSpec(self.adminFolder, token=self.adminToken)]
        }
        job = Job().save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            Job().scheduleJob(job)

        # Call the worker status endpoint
        resp = self.request('/worker/status', method='GET', user=self.admin)
        self.assertStatusOk(resp)
        self.assertHasKeys(resp.json,
                           ['report', 'stats', 'ping', 'active', 'reserved'])
Exemplo n.º 6
0
    def testFetchParent(self, file, params):
        token = self.getCurrentToken()

        jobModel = self.model('job', 'jobs')

        job = jobModel.createJob(
            title='Parent fetch test', type='parent_fetch_test',
            handler='worker_handler', user=self.getCurrentUser())
        jobToken = jobModel.createJobToken(job)

        kwargs = {
            'task': {
                'mode': 'python',
                'script': 'print(fp)\n',
                'inputs': [{
                    'id': 'fp',
                    'target': 'filepath',
                    'format': 'text',
                    'type': 'string'
                }],
                'outputs': []
            },
            'inputs': {
                'fp': utils.girderInputSpec(file, token=token, fetchParent=True)
            },
            'outputs': {},
            'validate': False,
            'auto_convert': False,
            'cleanup': False,
            'jobInfo': utils.jobInfoSpec(job, jobToken)
        }
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)
        return job
Exemplo n.º 7
0
    def testStream(self, item, params):
        token = self.getCurrentToken()

        jobModel = self.model('job', 'jobs')

        job = jobModel.createJob(title='docker stream test',
                                 type='docker_test',
                                 handler='worker_handler',
                                 user=self.getCurrentUser())
        jobToken = jobModel.createJobToken(job)
        apiUrl = getApiUrl()

        kwargs = {
            'task': {
                'mode':
                'docker',
                'docker_image':
                'testoutputs:latest',
                'pull_image':
                False,
                'inputs': [{
                    'id': 'input_pipe',
                    'target': 'filepath',
                    'stream': True
                }],
                'outputs': [{
                    'id': 'output_pipe',
                    'target': 'filepath',
                    'stream': True
                }]
            },
            'inputs': {
                'input_pipe': {
                    'mode': 'http',
                    'method': 'GET',
                    'url': '%s/item/%s/download' % (apiUrl, str(item['_id'])),
                    'headers': {
                        'Girder-Token': str(token['_id'])
                    }
                }
            },
            'outputs': {
                'output_pipe': {
                    'mode': 'http',
                    'method': 'POST',
                    'url': apiUrl + '/docker_test/stream_callback',
                    'headers': {
                        'Girder-Token': str(token['_id'])
                    }
                }
            },
            'validate': False,
            'auto_convert': False,
            'cleanup': False,
            'jobInfo': utils.jobInfoSpec(job, jobToken)
        }
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)
        return job
Exemplo n.º 8
0
    def testWorkerStatusEndpoint(self):
        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        job = Job().createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={})

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = Job().save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            Job().scheduleJob(job)

        # Call the worker status endpoint
        resp = self.request('/worker/status', method='GET', user=self.admin)
        self.assertStatusOk(resp)
        self.assertHasKeys(resp.json, ['report', 'stats', 'ping', 'active', 'reserved'])
Exemplo n.º 9
0
    def testWorker(self):
        # Test the settings
        resp = self.request('/system/setting', method='PUT', params={
            'list': json.dumps([{
                'key': worker.PluginSettings.BROKER,
                'value': 'amqp://[email protected]'
            }, {
                'key': worker.PluginSettings.BACKEND,
                'value': 'amqp://[email protected]'
            }])
        }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        jobModel = self.model('job', 'jobs')
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={})

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)
        self.assertEqual(job['status'], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            # Make sure we sent the job to celery
            self.assertEqual(len(celeryMock.mock_calls), 2)
            self.assertEqual(celeryMock.mock_calls[0][1], ('girder_worker',))
            self.assertEqual(celeryMock.mock_calls[0][2], {
                'broker': 'amqp://[email protected]',
                'backend': 'amqp://[email protected]'
            })

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls

            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], (
                'girder_worker.run', job['args'], job['kwargs']))

            self.assertTrue('headers' in sendTaskCalls[0][2])
            self.assertTrue('jobInfoSpec' in sendTaskCalls[0][2]['headers'])

            # Make sure we got and saved the celery task id
            job = jobModel.load(job['_id'], force=True)
            self.assertEqual(job['celeryTaskId'], 'fake_id')
            self.assertEqual(job['status'], JobStatus.QUEUED)
Exemplo n.º 10
0
def runSlicerCliTasksDescriptionForFolder(self, folder, image, args, pullImage,
                                          params):
    jobModel = Job()
    token = Token().createToken(days=3,
                                scope='item_task.set_task_spec.%s' %
                                folder['_id'],
                                user=self.getCurrentUser())
    job = jobModel.createJob(title='Read docker task specs: %s' % image,
                             type='folder.item_task_slicer_cli_description',
                             handler='worker_handler',
                             user=self.getCurrentUser())

    if args[-1:] == ['--xml']:
        args = args[:-1]

    jobOptions = {
        'itemTaskId': folder['_id'],
        'kwargs': {
            'task': {
                'mode': 'docker',
                'docker_image': image,
                'container_args': args + ['--xml'],
                'pull_image': pullImage,
                'outputs': [{
                    'id': '_stdout',
                    'format': 'text'
                }],
            },
            'outputs': {
                '_stdout': {
                    'mode':
                    'http',
                    'method':
                    'POST',
                    'format':
                    'text',
                    'url':
                    '/'.join((utils.getWorkerApiUrl(), 'folder',
                              str(folder['_id']), 'item_task_slicer_cli_xml')),
                    'headers': {
                        'Girder-Token': token['_id']
                    },
                    'params': {
                        'image': image,
                        'args': json.dumps(args),
                        'pullImage': pullImage
                    }
                }
            },
            'jobInfo': utils.jobInfoSpec(job),
            'validate': False,
            'auto_convert': False
        }
    }
    job.update(jobOptions)

    job = jobModel.save(job)
    jobModel.scheduleJob(job)
    return job
Exemplo n.º 11
0
def create_task_job(job_defaults,
                    sender=None,
                    body=None,
                    exchange=None,
                    routing_key=None,
                    headers=None,
                    properties=None,
                    declare=None,
                    retry_policy=None,
                    **kwargs):

    from girder.utility.model_importer import ModelImporter
    from girder.api.rest import getCurrentUser
    try:
        # girder v2 worker plugin
        from girder.plugins.worker import utils
    except ImportError:
        # girder v3 worker plugin
        from girder_worker.girder_plugin import utils

    job_model = ModelImporter.model('job', 'jobs')

    user = headers.pop('girder_user', getCurrentUser())

    # Sanitize any Transform objects
    task_args = tuple(_walk_obj(body[0], _maybe_model_repr))
    task_kwargs = _walk_obj(body[1], _maybe_model_repr)

    job = job_model.createJob(
        **{
            'title':
            headers.pop('girder_job_title',
                        job_defaults.get('girder_job_title', '')),
            'type':
            headers.pop('girder_job_type',
                        job_defaults.get('girder_job_type', '')),
            'handler':
            headers.pop('girder_job_handler',
                        job_defaults.get('girder_job_handler', '')),
            'public':
            headers.pop('girder_job_public',
                        job_defaults.get('girder_job_public', '')),
            'user':
            user,
            'args':
            task_args,
            'kwargs':
            task_kwargs,
            'otherFields':
            dict(celeryTaskId=headers['id'],
                 **headers.pop('girder_job_other_fields',
                               job_defaults.get('girder_job_other_fields',
                                                '')))
        })

    headers['jobInfoSpec'] = utils.jobInfoSpec(job)
    return job
Exemplo n.º 12
0
    def executeTask(self, item, jobTitle, includeJobInfo, inputs, outputs):
        user = self.getCurrentUser()
        if jobTitle is None:
            jobTitle = item['name']
        task, handler = self._validateTask(item)

        jobModel = Job()
        job = jobModel.createJob(title=jobTitle,
                                 type='item_task',
                                 handler=handler,
                                 user=user)

        # If this is a user auth token, we make an IO-enabled token
        token = self.getCurrentToken()
        tokenModel = Token()
        if tokenModel.hasScope(token, TokenScope.USER_AUTH):
            token = tokenModel.createToken(user=user,
                                           days=7,
                                           scope=(TokenScope.DATA_READ,
                                                  TokenScope.DATA_WRITE))
            job['itemTaskTempToken'] = token['_id']

        token = tokenModel.addScope(token,
                                    'item_tasks.job_write:%s' % job['_id'])

        job.update({
            'itemTaskId': item['_id'],
            'itemTaskBindings': {
                'inputs': inputs,
                'outputs': outputs
            },
            'kwargs': {
                'task':
                task,
                'inputs':
                self._transformInputs(inputs, token),
                'outputs':
                self._transformOutputs(outputs, token, job, task, item['_id']),
                'validate':
                False,
                'auto_convert':
                False,
                'cleanup':
                True
            }
        })

        if includeJobInfo:
            job['kwargs']['jobInfo'] = utils.jobInfoSpec(job)

        if 'itemTaskCeleryQueue' in item.get('meta', {}):
            job['celeryQueue'] = item['meta']['itemTaskCeleryQueue']

        job = jobModel.save(job)
        jobModel.scheduleJob(job)

        return job
Exemplo n.º 13
0
    def testStream(self, item, params):
        token = self.getCurrentToken()

        jobModel = self.model('job', 'jobs')

        job = jobModel.createJob(
            title='docker stream test', type='docker_test',
            handler='worker_handler', user=self.getCurrentUser())
        jobToken = jobModel.createJobToken(job)
        apiUrl = getApiUrl()

        kwargs = {
            'task': {
                'mode': 'docker',
                'docker_image': 'testoutputs:latest',
                'pull_image': False,
                'inputs': [{
                    'id': 'input_pipe',
                    'target': 'filepath',
                    'stream': True
                }],
                'outputs': [{
                    'id': 'output_pipe',
                    'target': 'filepath',
                    'stream': True
                }]
            },
            'inputs': {
                'input_pipe': {
                    'mode': 'http',
                    'method': 'GET',
                    'url': '%s/item/%s/download' % (apiUrl, str(item['_id'])),
                    'headers': {
                        'Girder-Token': str(token['_id'])
                    }
                }
            },
            'outputs': {
                'output_pipe': {
                    'mode': 'http',
                    'method': 'POST',
                    'url': apiUrl + '/docker_test/stream_callback',
                    'headers': {
                        'Girder-Token': str(token['_id'])
                    }
                }
            },
            'validate': False,
            'auto_convert': False,
            'cleanup': False,
            'jobInfo': utils.jobInfoSpec(job, jobToken)
        }
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)
        return job
Exemplo n.º 14
0
    def runJsonTasksDescription(self, folder, image, pullImage, params):
        jobModel = self.model('job', 'jobs')
        token = self.model('token').createToken(
            days=3,
            scope='item_task.set_task_spec.%s' % folder['_id'],
            user=self.getCurrentUser())
        job = jobModel.createJob(title='Read docker task specs: %s' % image,
                                 type='item_task.json_description',
                                 handler='worker_handler',
                                 user=self.getCurrentUser())

        jobOptions = {
            'itemTaskId': folder['_id'],
            'kwargs': {
                'task': {
                    'mode': 'docker',
                    'docker_image': image,
                    'container_args': [],
                    'pull_image': pullImage,
                    'outputs': [{
                        'id': '_stdout',
                        'format': 'text'
                    }],
                },
                'outputs': {
                    '_stdout': {
                        'mode':
                        'http',
                        'method':
                        'POST',
                        'format':
                        'text',
                        'url':
                        '/'.join((utils.getWorkerApiUrl(), self.resourceName,
                                  str(folder['_id']), 'json_specs')),
                        'headers': {
                            'Girder-Token': token['_id']
                        },
                        'params': {
                            'image': image,
                            'pullImage': pullImage
                        }
                    }
                },
                'jobInfo': utils.jobInfoSpec(job),
                'validate': False,
                'auto_convert': False,
                'cleanup': True
            }
        }
        job.update(jobOptions)

        job = jobModel.save(job)
        jobModel.scheduleJob(job)
        return job
Exemplo n.º 15
0
    def testWorkerDifferentTask(self):
        # Test the settings
        resp = self.request('/system/setting', method='PUT', params={
            'key': worker.PluginSettings.API_URL,
            'value': 'bad value'
        }, user=self.admin)
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json['message'], 'API URL must start with http:// or https://.')

        resp = self.request('/system/setting', method='PUT', params={
            'list': json.dumps([{
                'key': worker.PluginSettings.BROKER,
                'value': 'amqp://[email protected]'
            }, {
                'key': worker.PluginSettings.BACKEND,
                'value': 'amqp://[email protected]'
            }])
        }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        from girder.plugins.jobs.models.job import Job
        jobModel = Job()
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={},
            otherFields={
                'celeryTaskName': 'some_other.task',
                'celeryQueue': 'my_other_q'
            })

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls
            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], (
                'some_other.task', job['args'], job['kwargs']))
            self.assertIn('queue', sendTaskCalls[0][2])
            self.assertEqual(sendTaskCalls[0][2]['queue'], 'my_other_q')
Exemplo n.º 16
0
    def testWorker(self):
        # Test the settings
        resp = self.request(
            "/system/setting",
            method="PUT",
            params={
                "list": json.dumps(
                    [
                        {"key": worker.PluginSettings.BROKER, "value": "amqp://[email protected]"},
                        {"key": worker.PluginSettings.BACKEND, "value": "amqp://[email protected]"},
                    ]
                )
            },
            user=self.admin,
        )
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        jobModel = self.model("job", "jobs")
        job = jobModel.createJob(
            title="title", type="foo", handler="worker_handler", user=self.admin, public=False, args=(), kwargs={}
        )

        job["kwargs"] = {
            "jobInfo": utils.jobInfoSpec(job),
            "inputs": [utils.girderInputSpec(self.adminFolder, resourceType="folder")],
            "outputs": [utils.girderOutputSpec(self.adminFolder, token=self.adminToken)],
        }
        job = jobModel.save(job)
        self.assertEqual(job["status"], JobStatus.INACTIVE)

        # Schedule the job, make sure it is sent to celery
        with mock.patch("celery.Celery") as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            # Make sure we sent the job to celery
            self.assertEqual(len(celeryMock.mock_calls), 2)
            self.assertEqual(celeryMock.mock_calls[0][1], ("girder_worker",))
            self.assertEqual(
                celeryMock.mock_calls[0][2],
                {"broker": "amqp://[email protected]", "backend": "amqp://[email protected]"},
            )

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls
            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], ("girder_worker.run", job["args"], job["kwargs"]))

            # Make sure we got and saved the celery task id
            job = jobModel.load(job["_id"], force=True)
            self.assertEqual(job["celeryTaskId"], "fake_id")
            self.assertEqual(job["status"], JobStatus.QUEUED)
Exemplo n.º 17
0
    def testWorkerDifferentTask(self):
        # Test the settings
        resp = self.request('/system/setting', method='PUT', params={
            'key': worker.PluginSettings.API_URL,
            'value': 'bad value'
        }, user=self.admin)
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json['message'], 'API URL must start with http:// or https://.')

        resp = self.request('/system/setting', method='PUT', params={
            'list': json.dumps([{
                'key': worker.PluginSettings.BROKER,
                'value': 'amqp://[email protected]'
            }, {
                'key': worker.PluginSettings.BACKEND,
                'value': 'amqp://[email protected]'
            }])
        }, user=self.admin)
        self.assertStatusOk(resp)

        # Create a job to be handled by the worker plugin
        jobModel = self.model('job', 'jobs')
        job = jobModel.createJob(
            title='title', type='foo', handler='worker_handler',
            user=self.admin, public=False, args=(), kwargs={},
            otherFields={
                'celeryTaskName': 'some_other.task',
                'celeryQueue': 'my_other_q'
            })

        job['kwargs'] = {
            'jobInfo': utils.jobInfoSpec(job),
            'inputs': [
                utils.girderInputSpec(self.adminFolder, resourceType='folder')
            ],
            'outputs': [
                utils.girderOutputSpec(self.adminFolder, token=self.adminToken)
            ]
        }
        job = jobModel.save(job)

        # Schedule the job, make sure it is sent to celery
        with mock.patch('celery.Celery') as celeryMock:
            instance = celeryMock.return_value
            instance.send_task.return_value = FakeAsyncResult()

            jobModel.scheduleJob(job)

            sendTaskCalls = celeryMock.return_value.send_task.mock_calls
            self.assertEqual(len(sendTaskCalls), 1)
            self.assertEqual(sendTaskCalls[0][1], (
                'some_other.task', job['args'], job['kwargs']))
            self.assertIn('queue', sendTaskCalls[0][2])
            self.assertEqual(sendTaskCalls[0][2]['queue'], 'my_other_q')
Exemplo n.º 18
0
    def executeTask(self, item, jobTitle, includeJobInfo, inputs, outputs):
        user = self.getCurrentUser()
        if jobTitle is None:
            jobTitle = item['name']
        task, handler = self._validateTask(item)

        if task.get('mode') == 'girder_worker':
            return runCeleryTask(item['meta']['itemTaskImport'], inputs)

        jobModel = self.model('job', 'jobs')
        jobModel = Job()
        job = jobModel.createJob(
            title=jobTitle, type='item_task', handler=handler, user=user)

        # If this is a user auth token, we make an IO-enabled token
        token = self.getCurrentToken()
        tokenModel = Token()
        if tokenModel.hasScope(token, TokenScope.USER_AUTH):
            token = tokenModel.createToken(
                user=user, days=7, scope=(TokenScope.DATA_READ, TokenScope.DATA_WRITE))
            job['itemTaskTempToken'] = token['_id']

        token = tokenModel.addScope(token, 'item_tasks.job_write:%s' % job['_id'])

        job.update({
            'itemTaskId': item['_id'],
            'itemTaskBindings': {
                'inputs': inputs,
                'outputs': outputs
            },
            'kwargs': {
                'task': task,
                'inputs': self._transformInputs(inputs, token),
                'outputs': self._transformOutputs(outputs, token, job, task, item['_id']),
                'validate': False,
                'auto_convert': False,
                'cleanup': True
            }
        })

        if includeJobInfo:
            job['kwargs']['jobInfo'] = utils.jobInfoSpec(job)

        if 'itemTaskCeleryQueue' in item.get('meta', {}):
            job['celeryQueue'] = item['meta']['itemTaskCeleryQueue']

        job = jobModel.save(job)
        jobModel.scheduleJob(job)

        return job
Exemplo n.º 19
0
def runSlicerCliTasksDescriptionForFolder(self, folder, image, args, pullImage, params):
    jobModel = self.model('job', 'jobs')
    token = self.model('token').createToken(
        days=3, scope='item_task.set_task_spec.%s' % folder['_id'],
        user=self.getCurrentUser())
    job = jobModel.createJob(
        title='Read docker task specs: %s' % image, type='folder.item_task_slicer_cli_description',
        handler='worker_handler', user=self.getCurrentUser())

    if args[-1:] == ['--xml']:
        args = args[:-1]

    jobOptions = {
        'itemTaskId': folder['_id'],
        'kwargs': {
            'task': {
                'mode': 'docker',
                'docker_image': image,
                'container_args': args + ['--xml'],
                'pull_image': pullImage,
                'outputs': [{
                    'id': '_stdout',
                    'format': 'text'
                }],
            },
            'outputs': {
                '_stdout': {
                    'mode': 'http',
                    'method': 'POST',
                    'format': 'text',
                    'url': '/'.join((utils.getWorkerApiUrl(), 'folder', str(folder['_id']),
                                     'item_task_slicer_cli_xml')),
                    'headers': {'Girder-Token': token['_id']},
                    'params': {
                        'image': image,
                        'args': json.dumps(args),
                        'pullImage': pullImage
                    }
                }
            },
            'jobInfo': utils.jobInfoSpec(job),
            'validate': False,
            'auto_convert': False
        }
    }
    job.update(jobOptions)

    job = jobModel.save(job)
    jobModel.scheduleJob(job)
    return job
Exemplo n.º 20
0
    def testOutputs(self, folder, item, params):
        token = self.getCurrentToken()

        jobModel = self.model('job', 'jobs')

        job = jobModel.createJob(title='docker output test: %s' %
                                 folder['name'],
                                 type='docker_test',
                                 handler='worker_handler',
                                 user=self.getCurrentUser())
        jobToken = jobModel.createJobToken(job)

        kwargs = {
            'task': {
                'mode':
                'docker',
                'docker_image':
                'testoutputs:latest',
                'pull_image':
                False,
                'inputs': [{
                    'id': 'input',
                    'target': 'filepath',
                    'format': 'text',
                    'type': 'string'
                }],
                'outputs': [{
                    'id': 'out.txt',
                    'target': 'filepath',
                    'format': 'text',
                    'type': 'string'
                }]
            },
            'inputs': {
                'input':
                utils.girderInputSpec(item, resourceType='item', token=token)
            },
            'outputs': {
                'out.txt': utils.girderOutputSpec(folder, token)
            },
            'jobInfo': utils.jobInfoSpec(job, jobToken)
        }
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)
        return job
Exemplo n.º 21
0
def girder_before_task_publish(sender=None, body=None, exchange=None,
                               routing_key=None, headers=None, properties=None,
                               declare=None, retry_policy=None, **kwargs):
    if 'jobInfoSpec' not in headers:
        try:
            # Note: If we can import these objects from the girder packages we
            # assume our producer is in a girder REST request. This allows
            # us to create the job model's directly. Otherwise there will be an
            # ImportError and we can create the job via a REST request using
            # the jobInfoSpec in headers.
            from girder.utility.model_importer import ModelImporter
            from girder.plugins.worker import utils
            from girder.api.rest import getCurrentUser

            job_model = ModelImporter.model('job', 'jobs')

            user = headers.pop('girder_user', getCurrentUser())
            token = headers.pop('girder_token', None)

            task_args, task_kwargs = body[0], body[1]

            job = job_model.createJob(
                **{'title': headers.get('girder_job_title', Task._girder_job_title),
                   'type': headers.get('girder_job_type', Task._girder_job_type),
                   'handler': headers.get('girder_job_handler', Task._girder_job_handler),
                   'public': headers.get('girder_job_public', Task._girder_job_public),
                   'user': user,
                   'args': task_args,
                   'kwargs': task_kwargs,
                   'otherFields': dict(celeryTaskId=headers['id'],
                                       **headers.get('girder_job_other_fields',
                                                     Task._girder_job_other_fields))})
            # If we don't have a token from girder_token kwarg,  use
            # the job token instead. Otherwise no token
            if token is None:
                token = job.get('token', None)

            headers['jobInfoSpec'] = utils.jobInfoSpec(job, token)
            headers['apiUrl'] = utils.getWorkerApiUrl()

        except ImportError:
            # TODO: Check for self.job_manager to see if we have
            #       tokens etc to contact girder and create a job model
            #       we may be in a chain or a chord or some-such
            pass
Exemplo n.º 22
0
    def testOutputs(self, folder, item, params):
        token = self.getCurrentToken()

        jobModel = self.model('job', 'jobs')

        job = jobModel.createJob(
            title='docker output test: %s' % folder['name'], type='docker_test',
            handler='worker_handler', user=self.getCurrentUser())
        jobToken = jobModel.createJobToken(job)

        kwargs = {
            'task': {
                'mode': 'docker',
                'docker_image': 'testoutputs:latest',
                'pull_image': False,
                'inputs': [{
                    'id': 'input',
                    'target': 'filepath',
                    'format': 'text',
                    'type': 'string'
                }],
                'outputs': [{
                    'id': 'out.txt',
                    'target': 'filepath',
                    'format': 'text',
                    'type': 'string'
                }]
            },
            'inputs': {
                'input': utils.girderInputSpec(
                    item, resourceType='item', token=token)
            },
            'outputs': {
                'out.txt': utils.girderOutputSpec(
                    folder, token)
            },
            'jobInfo': utils.jobInfoSpec(job, jobToken)
        }
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)
        return job
Exemplo n.º 23
0
    def test_traditional_job_custom_task_name_fails(self, params):
        jobModel = self.model('job', 'jobs')

        job = jobModel.createJob(
            title='test_traditional_job_custom_task_name_fails',
            type='traditional',
            handler='worker_handler',
            user=self.getCurrentUser(),
            public=False,
            args=(),
            kwargs={},
            otherFields={
                'celeryTaskName': 'common_tasks.test_tasks.fail.fail_after'
            })

        job['kwargs']['jobInfo'] = utils.jobInfoSpec(job)

        jobModel.save(job)
        jobModel.scheduleJob(job)

        return job
Exemplo n.º 24
0
    def test_traditional_job_girder_worker_run_fails(self, params):

        jobModel = self.model('job', 'jobs')
        job = jobModel.createJob(
            title='test_traditional_job_girder_worker_run_fails',
            type='traditional',
            handler='worker_handler',
            user=self.getCurrentUser(),
            public=False,
            args=(self.girder_worker_run_failing_analysis, ),
            kwargs={
                'inputs': self.girder_worker_run_inputs,
                'outputs': self.girder_worker_run_outputs
            })

        job['kwargs']['jobInfo'] = utils.jobInfoSpec(job)

        jobModel.save(job)
        jobModel.scheduleJob(job)

        return job
Exemplo n.º 25
0
    def test_traditional_task_cancel(self, params):
        jobModel = self.model('job', 'jobs')
        job = jobModel.createJob(title='test_traditional_task_cancel',
                                 type='worker',
                                 handler='worker_handler',
                                 user=self.getCurrentUser(),
                                 public=False,
                                 args=(self.girder_worker_run_cancelable, ),
                                 kwargs={
                                     'inputs': {},
                                     'outputs': {}
                                 })

        job['kwargs']['jobInfo'] = utils.jobInfoSpec(job)

        jobModel.save(job)
        jobModel.scheduleJob(job)
        assert wait_for_status(self.getCurrentUser(), job, JobStatus.RUNNING)
        jobModel.cancelJob(job)

        return job
Exemplo n.º 26
0
    def testFetchParent(self, file, params):
        token = self.getCurrentToken()

        jobModel = self.model('job', 'jobs')

        job = jobModel.createJob(title='Parent fetch test',
                                 type='parent_fetch_test',
                                 handler='worker_handler',
                                 user=self.getCurrentUser())
        jobToken = jobModel.createJobToken(job)

        kwargs = {
            'task': {
                'mode':
                'python',
                'script':
                'print(fp)\n',
                'inputs': [{
                    'id': 'fp',
                    'target': 'filepath',
                    'format': 'text',
                    'type': 'string'
                }],
                'outputs': []
            },
            'inputs': {
                'fp': utils.girderInputSpec(file,
                                            token=token,
                                            fetchParent=True)
            },
            'outputs': {},
            'validate': False,
            'auto_convert': False,
            'cleanup': False,
            'jobInfo': utils.jobInfoSpec(job, jobToken)
        }
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)
        return job
Exemplo n.º 27
0
    def runTaskSpec(self, key, params, **kwargs):
        """Create a job from the given task spec."""
        task_spec = get_task_spec(key)
        if task_spec is None:
            raise RestException('No task named %s.' % key)

        # validate input bindings
        for input_spec in task_spec['inputs']:
            input_name = input_spec['name']
            input_key = 'INPUT({})'.format(input_name)

            try:
                payload = params[input_key]
            except KeyError:
                # Check to see if the input spec provides a default.
                # If not, raise an exception.
                if 'default' not in input_spec:
                    raise RestException(
                        'No binding provided for input "{}".'.format(
                            input_name))

            if RE_ARG_SPEC.match(payload) is None:
                raise RestException(
                    'invalid payload for input "{}": "{}"'.format(
                        input_name, payload))

        # validate output bindings
        for output_spec in task_spec['outputs']:
            output_name = output_spec['name']
            output_key = 'OUTPUT({})'.format(output_name)

            try:
                payload = params[output_key]
            except KeyError:
                continue

            if RE_ARG_SPEC.match(payload) is None:
                raise RestException(
                    'invalid payload for output "{}": "{}"'.format(
                        output_name, payload))

        #
        # validation complete
        #

        job_title = params.get('title', 'sumo {}'.format(task_spec['name']))

        user, token = self.getCurrentUser(True)

        job = self.model('job', 'jobs').createJob(title=job_title,
                                                  type='sumo',
                                                  user=user,
                                                  handler='worker_handler')

        scratchDirectory = self._ensureJobDirectory(user, job['_id'])

        jobToken = self.model('job', 'jobs').createJobToken(job)

        job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(job=job,
                                                           token=jobToken,
                                                           logPrint=True)

        if not token:
            # It seems like we should be able to use a token without USER_AUTH
            # in its scope, but I'm not sure how.
            token = self.model('token').createToken(user,
                                                    days=1,
                                                    scope=TokenScope.USER_AUTH)

        jobpayload = (self.model('jobpayload', 'osumo').createJobpayload(
            job['_id'], user['_id']))

        job_inputs = {}
        for input_spec in task_spec['inputs']:
            input_name = input_spec['name']
            input_key = 'INPUT({})'.format(input_name)

            payload = params.get(input_key)
            if payload is None:
                continue

            job_input = {}

            m = RE_ARG_SPEC.match(payload)
            pos_args, extra_args = m.group(1), m.group(3)
            pos_args = pos_args.split(':')
            if extra_args:
                extra_args = json.loads('{{{}}}'.format(extra_args))
            else:
                extra_args = {}

            input_type = pos_args[0]

            if input_type in ('FILE', 'ITEM'):
                resource_id = pos_args[1]
                resource_type = input_type.lower()
                data_type = extra_args.get('type',
                                           input_spec.get('type', 'string'))
                data_format = extra_args.get('format',
                                             input_spec.get('format', 'text'))

                job_input.update(
                    workerUtils.girderInputSpec(self._getResource(
                        resource_type, resource_id, user),
                                                resourceType=resource_type,
                                                token=token,
                                                dataType=data_type,
                                                dataFormat=data_format))

            elif input_type == 'HTTP':
                # TODO(opadron): maybe we'll want to implement this, someday?
                raise NotImplementedError('HTTP input not implemented')

            elif input_type == 'INTEGER':
                value = pos_args[1]
                job_input['type'] = 'number'
                job_input['format'] = 'number'
                job_input['mode'] = 'inline'
                job_input['data'] = int(value)

            elif input_type == 'FLOAT':
                value = pos_args[1]
                job_input['type'] = 'number'
                job_input['format'] = 'number'
                job_input['mode'] = 'inline'
                job_input['data'] = float(value)

            elif input_type == 'STRING':
                value = ':'.join(pos_args[1:])
                job_input['type'] = 'string'
                job_input['format'] = 'text'
                job_input['mode'] = 'inline'
                job_input['data'] = value

            elif input_type == 'BOOLEAN':
                value = pos_args[1]
                job_input['type'] = 'boolean'
                job_input['format'] = 'json'
                job_input['mode'] = 'inline'
                job_input['data'] = 'true' if int(value) else 'false'

            else:
                raise NotImplementedError(
                    'Input type "{}" not supported'.format(input_type))

            job_input.update(extra_args)
            job_inputs[input_name] = job_input

        job_outputs = {}
        for output_spec in task_spec['outputs']:
            output_name = output_spec['name']
            output_key = 'OUTPUT({})'.format(output_name)

            payload = params.get(output_key)
            if payload is None:
                continue

            job_output = {}

            m = RE_ARG_SPEC.match(payload)
            pos_args, extra_args = m.group(1), m.group(3)
            pos_args = pos_args.split(':')
            if extra_args:
                extra_args = json.loads('{{{}}}'.format(extra_args))
            else:
                extra_args = {}

            output_type = pos_args[0]

            if output_type in ('FILE', 'ITEM'):
                parent_id, resource_name = (pos_args + [None])[1:3]
                parent_type = ('folder' if output_type == 'FILE' else 'file')
                data_type = extra_args.get('type',
                                           output_spec.get('type', 'string'))
                data_format = extra_args.get('format',
                                             output_spec.get('format', 'text'))

                job_output.update(
                    workerUtils.girderOutputSpec(scratchDirectory,
                                                 parentType='folder',
                                                 token=token,
                                                 name=resource_name,
                                                 dataType=data_type,
                                                 dataFormat=data_format))

            elif output_type in ('INTEGER', 'FLOAT', 'STRING', 'BOOLEAN',
                                 'JSON'):
                parse_result = urllib.parse.urlparse(
                    getConfig()['database']['uri'])

                job_output['mode'] = 'sumo'
                job_output['db'] = parse_result.path[1:]
                job_output['collection'] = 'jobpayload'
                job_output['host'] = parse_result.netloc
                job_output['id'] = jobpayload['_id']
                job_output['key'] = output_name

                if output_type == 'INTEGER':
                    job_output['type'] = 'number'
                    job_output['format'] = 'number'
                    job_output['converter'] = 'int'

                elif output_type == 'FLOAT':
                    job_output['type'] = 'number'
                    job_output['format'] = 'number'
                    job_output['converter'] = 'float'

                elif output_type == 'STRING':
                    job_output['type'] = 'string'
                    job_output['format'] = 'text'

                elif output_type == 'BOOLEAN':
                    job_output['type'] = 'boolean'
                    job_output['format'] = 'boolean'
                    job_output['converter'] = 'bool'

                elif output_type == 'JSON':
                    job_output['type'] = 'string'
                    job_output['format'] = 'text'
                    job_output['converter'] = 'json'

            else:
                raise NotImplementedError(
                    'Output type "{}" not supported'.format(output_type))

            job_output.update(extra_args)
            job_outputs[output_name] = job_output

        job['kwargs'].update(task=task_spec,
                             inputs=job_inputs,
                             outputs=job_outputs)

        job = self.model('job', 'jobs').save(job)
        self.model('jobuser', 'osumo').createJobuser(job['_id'], user['_id'])
        self.model('job', 'jobs').scheduleJob(job)

        return {
            'job': self.model('job', 'jobs').filter(job, user),
            'folder': self.model('folder').filter(scratchDirectory, user),
            'token': str(token['_id'])
        }
Exemplo n.º 28
0
    def _createLargeImageJob(self, item, fileObj, user, token):
        path = os.path.join(os.path.dirname(__file__), '..', 'create_tiff.py')
        with open(path, 'r') as f:
            script = f.read()

        title = 'TIFF conversion: %s' % fileObj['name']
        Job = self.model('job', 'jobs')
        job = Job.createJob(
            title=title, type='large_image_tiff', handler='worker_handler',
            user=user)
        jobToken = Job.createJobToken(job)

        outputName = os.path.splitext(fileObj['name'])[0] + '.tiff'
        if outputName == fileObj['name']:
            outputName = (os.path.splitext(fileObj['name'])[0] + '.' +
                          time.strftime('%Y%m%d-%H%M%S') + '.tiff')

        task = {
            'mode': 'python',
            'script': script,
            'name': title,
            'inputs': [{
                'id': 'in_path',
                'target': 'filepath',
                'type': 'string',
                'format': 'text'
            }, {
                'id': 'out_filename',
                'type': 'string',
                'format': 'text'
            }, {
                'id': 'tile_size',
                'type': 'number',
                'format': 'number'
            }, {
                'id': 'quality',
                'type': 'number',
                'format': 'number'
            }],
            'outputs': [{
                'id': 'out_path',
                'target': 'filepath',
                'type': 'string',
                'format': 'text'
            }]
        }

        inputs = {
            'in_path': workerUtils.girderInputSpec(
                fileObj, resourceType='file', token=token),
            'quality': {
                'mode': 'inline',
                'type': 'number',
                'format': 'number',
                'data': 90
            },
            'tile_size': {
                'mode': 'inline',
                'type': 'number',
                'format': 'number',
                'data': 256
            },
            'out_filename': {
                'mode': 'inline',
                'type': 'string',
                'format': 'text',
                'data': outputName
            }
        }

        outputs = {
            'out_path': workerUtils.girderOutputSpec(
                parent=item, token=token, parentType='item')
        }

        # TODO: Give the job an owner
        job['kwargs'] = {
            'task': task,
            'inputs': inputs,
            'outputs': outputs,
            'jobInfo': workerUtils.jobInfoSpec(job, jobToken),
            'auto_convert': False,
            'validate': False
        }
        job['meta'] = {
            'creator': 'large_image',
            'itemId': str(item['_id']),
            'task': 'createImageItem',
        }

        job = Job.save(job)
        Job.scheduleJob(job)

        return job
Exemplo n.º 29
0
    def processVideo(self, id, params):
        force = params['force']
        user, userToken = getCurrentUser(True)

        itemModel = self.model('item')
        fileModel = self.model('file')
        tokenModel = self.model('token')
        jobModel = self.model('job', 'jobs')

        item = itemModel.load(id, user=user, level=AccessType.READ)

        itemVideoData = item.get('video', {})
        jobId = itemVideoData.get('jobId')

        itemAlreadyProcessed = False
        job = None
        if jobId is not None:
            job = jobModel.load(jobId, level=AccessType.READ, user=user)

        if not force:
            if job is not None:
                status = job['status']
                if status not in (None, JobStatus.ERROR, JobStatus.CANCELED):
                    itemAlreadyProcessed = True

            if itemAlreadyProcessed:
                result = {
                    'video': {
                        'jobCreated': False,
                        'message': 'Processing job already created.'
                    }
                }

                result.update(job)
                return result

        # if user provided fileId, use that one
        fileId = params.get('fileId')
        if fileId is not None:
            # ensure the provided fileId is valid
            inputFile = fileModel.findOne({
                'itemId': ObjectId(id),
                '_id': ObjectId(fileId)
            })

            if inputFile is None:
                raise RestException(
                    'Item with id=%s has no such file with id=%s' %
                    (id, fileId))

        else:
            # User did not provide a fileId.
            #
            # If we're *re*running a processing job (force=True), look
            # for the fileId used by the old job.
            if force and job:
                fileId = job.get('meta', {}).get('video', {}).get('fileId')
                if fileId:
                    # ensure the provided fileId is valid, but in this case,
                    # don't raise an exception if it is not -- just discard the
                    # fileId and move on
                    inputFile = fileModel.findOne({
                        'itemId': ObjectId(id),
                        '_id': ObjectId(fileId)
                    })

                    if inputFile is None:
                        fileId = None

        # if we *still* don't have a fileId, just grab the first one found under
        # the given item.
        if fileId is None:
            inputFile = fileModel.findOne({'itemId': ObjectId(id)})

            # if there *are* no files, bail
            if inputFile is None:
                raise RestException('item %s has no files' % itemId)

            fileId = inputFile['_id']

        # if we are *re*running a processing job (force=True), remove all files
        # from this item that were created by the last processing job...
        #
        # ...unless (for some reason) the user is running the job against that
        # particular file (this is almost certainly user error, but for now,
        # we'll just keep the file around).
        if force:
            fileIdList = itemVideoData.get('createdFiles', [])
            for f in fileIdList:
                if f == fileId:
                    continue

                theFile = fileModel.load(f, level=AccessType.WRITE, user=user)

                if theFile:
                    fileModel.remove(theFile)
            itemVideoData['createdFiles'] = []

        # begin construction of the actual job
        if not userToken:
            # It seems like we should be able to use a token without USER_AUTH
            # in its scope, but I'm not sure how.
            userToken = tokenModel.createToken(user,
                                               days=1,
                                               scope=TokenScope.USER_AUTH)

        jobTitle = 'Video Processing'
        job = jobModel.createJob(title=jobTitle,
                                 type='video',
                                 user=user,
                                 handler='worker_handler')
        jobToken = jobModel.createJobToken(job)

        job['kwargs'] = job.get('kwargs', {})
        job['kwargs']['task'] = {
            'mode':
            'docker',

            # TODO(opadron): replace this once we have a maintained
            #                image on dockerhub
            'docker_image':
            'ffmpeg_local',
            'progress_pipe':
            True,
            'a':
            'b',
            'pull_image':
            False,
            'inputs': [{
                'id': 'input',
                'type': 'string',
                'format': 'text',
                'target': 'filepath'
            }],
            'outputs': [
                {
                    'id': '_stdout',
                    'type': 'string',
                    'format': 'text',
                    'target': 'memory'
                },
                {
                    'id': '_stderr',
                    'type': 'string',
                    'format': 'text',
                    'target': 'memory'
                },
                {
                    'id': 'source',
                    'type:': 'string',
                    'format': 'text',
                    'target': 'filepath',
                    'path': '/mnt/girder_worker/data/source.webm'
                },
                {
                    'id': 'meta',
                    'type:': 'string',
                    'format': 'text',
                    'target': 'filepath',
                    'path': '/mnt/girder_worker/data/meta.json'
                },
            ]
        }

        _, itemExt = os.path.splitext(item['name'])

        job['kwargs']['inputs'] = {
            'input':
            workerUtils.girderInputSpec(inputFile,
                                        resourceType='file',
                                        token=userToken,
                                        name='input' + itemExt,
                                        dataType='string',
                                        dataFormat='text')
        }

        job['kwargs']['outputs'] = {
            '_stdout':
            workerUtils.girderOutputSpec(item,
                                         parentType='item',
                                         token=userToken,
                                         name='processing_stdout.txt',
                                         dataType='string',
                                         dataFormat='text',
                                         reference='videoPlugin'),
            '_stderr':
            workerUtils.girderOutputSpec(item,
                                         parentType='item',
                                         token=userToken,
                                         name='processing_stderr.txt',
                                         dataType='string',
                                         dataFormat='text',
                                         reference='videoPlugin'),
            'source':
            workerUtils.girderOutputSpec(item,
                                         parentType='item',
                                         token=userToken,
                                         name='source.webm',
                                         dataType='string',
                                         dataFormat='text',
                                         reference='videoPlugin'),
            'meta':
            workerUtils.girderOutputSpec(item,
                                         parentType='item',
                                         token=userToken,
                                         name='meta.json',
                                         dataType='string',
                                         dataFormat='text',
                                         reference='videoPluginMeta'),
        }

        job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(job=job,
                                                           token=jobToken,
                                                           logPrint=True)

        job['meta'] = job.get('meta', {})
        job['meta']['video_plugin'] = {'itemId': id, 'fileId': fileId}

        job = jobModel.save(job)
        jobModel.scheduleJob(job)

        itemVideoData['jobId'] = str(job['_id'])
        item['video'] = itemVideoData
        itemModel.save(item)

        result = {
            'video': {
                'jobCreated': True,
                'message': 'Processing job created.'
            }
        }

        result.update(job)
        return result
Exemplo n.º 30
0
def runSlicerCliTasksDescriptionForItem(self, item, image, args, setName,
                                        setDescription, pullImage, params):
    if 'meta' not in item:
        item['meta'] = {}

    if image is None:
        image = item.get('meta', {}).get('itemTaskSpec',
                                         {}).get('docker_image')

    if not image:
        raise RestException(
            'You must pass an image parameter, or set the itemTaskSpec.docker_image '
            'field of the item.')

    jobModel = Job()
    token = Token().createToken(days=3,
                                scope='item_task.set_task_spec.%s' %
                                item['_id'])
    job = jobModel.createJob(title='Read docker Slicer CLI: %s' % image,
                             type='item.item_task_slicer_cli_description',
                             handler='worker_handler',
                             user=self.getCurrentUser())

    if args[-1:] == ['--xml']:
        args = args[:-1]

    job.update({
        'itemTaskId': item['_id'],
        'kwargs': {
            'task': {
                'mode': 'docker',
                'docker_image': image,
                'container_args': args + ['--xml'],
                'pull_image': pullImage,
                'outputs': [{
                    'id': '_stdout',
                    'format': 'text'
                }],
            },
            'outputs': {
                '_stdout': {
                    'mode':
                    'http',
                    'method':
                    'PUT',
                    'format':
                    'text',
                    'url':
                    '/'.join((utils.getWorkerApiUrl(), 'item',
                              str(item['_id']), 'item_task_slicer_cli_xml')),
                    'params': {
                        'setName': setName,
                        'setDescription': setDescription
                    },
                    'headers': {
                        'Girder-Token': token['_id']
                    }
                }
            },
            'jobInfo': utils.jobInfoSpec(job),
            'validate': False,
            'auto_convert': False
        }
    })

    item['meta']['itemTaskSpec'] = {'mode': 'docker', 'docker_image': image}

    if args:
        item['meta']['itemTaskSlicerCliArgs'] = args

    Item().save(item)

    job = jobModel.save(job)
    jobModel.scheduleJob(job)

    return job
Exemplo n.º 31
0
def runJsonTasksDescriptionForItem(self, item, image, taskName, setName, setDescription,
                                   pullImage, params):
    if 'meta' not in item:
        item['meta'] = {}

    if image is None:
        image = item.get('meta', {}).get('itemTaskSpec', {}).get('docker_image')

    if not image:
        raise RestException(
            'You must pass an image parameter, or set the itemTaskSpec.docker_image '
            'field of the item.')

    jobModel = self.model('job', 'jobs')
    token = self.model('token').createToken(
        days=3, scope='item_task.set_task_spec.%s' % item['_id'],
        user=self.getCurrentUser())
    job = jobModel.createJob(
        title='Read docker task specs: %s' % image, type='item.item_task_json_description',
        handler='worker_handler', user=self.getCurrentUser())

    jobOptions = {
        'itemTaskId': item['_id'],
        'kwargs': {
            'task': {
                'mode': 'docker',
                'docker_image': image,
                'container_args': [],
                'pull_image': pullImage,
                'outputs': [{
                    'id': '_stdout',
                    'format': 'text'
                }],
            },
            'outputs': {
                '_stdout': {
                    'mode': 'http',
                    'method': 'PUT',
                    'format': 'text',
                    'url': '/'.join((utils.getWorkerApiUrl(), 'item', str(item['_id']),
                                     'item_task_json_specs')),
                    'headers': {'Girder-Token': token['_id']},
                    'params': {
                        'image': image,
                        'taskName': taskName,
                        'setName': setName,
                        'setDescription': setDescription,
                        'pullImage': pullImage
                    }
                }
            },
            'jobInfo': utils.jobInfoSpec(job),
            'validate': False,
            'auto_convert': False
        }
    }
    job.update(jobOptions)

    job = jobModel.save(job)
    jobModel.scheduleJob(job)
    return job
Exemplo n.º 32
0
    def cliHandler(self, **hargs):

        user = self.getCurrentUser()
        token = self.getCurrentToken()['_id']

        # create job
        jobModel = self.model('job', 'jobs')
        jobTitle = '.'.join((restResource.resourceName, cliName))
        job = jobModel.createJob(title=jobTitle,
                                 type=jobTitle,
                                 handler='worker_handler',
                                 user=user)
        kwargs = {
            'validate': False,
            'auto_convert': True,
            'cleanup': True,
            'inputs': dict(),
            'outputs': dict()
        }

        # create job info
        jobToken = jobModel.createJobToken(job)
        kwargs['jobInfo'] = wutils.jobInfoSpec(job, jobToken)

        # initialize task spec
        taskSpec = {'name': cliName,
                    'mode': 'docker',
                    'docker_image': dockerImage,
                    'pull_image': False,
                    'inputs': [],
                    'outputs': []}

        _addIndexedInputParamsToTaskSpec(index_input_params, taskSpec)

        _addIndexedOutputParamsToTaskSpec(index_output_params, taskSpec, hargs)

        _addOptionalInputParamsToTaskSpec(opt_input_params, taskSpec)

        _addOptionalOutputParamsToTaskSpec(opt_output_params, taskSpec, hargs)

        if len(simple_out_params) > 0:
            _addReturnParameterFileParamToTaskSpec(taskSpec, hargs)

        kwargs['task'] = taskSpec

        # add input/output parameter bindings
        _addIndexedInputParamBindings(index_input_params,
                                      kwargs['inputs'], hargs, token)

        _addIndexedOutputParamBindings(index_output_params,
                                       kwargs['outputs'], hargs, user, token)

        _addOptionalInputParamBindings(opt_input_params,
                                       kwargs['inputs'], hargs, user, token)

        _addOptionalOutputParamBindings(opt_output_params,
                                        kwargs['outputs'], hargs, user, token)

        if len(simple_out_params) > 0:
            _addReturnParameterFileBinding(kwargs['outputs'],
                                           hargs, user, token)

        # construct container arguments
        containerArgs = [cliRelPath]

        _addOptionalInputParamsToContainerArgs(opt_input_params,
                                               containerArgs, hargs)

        _addOptionalOutputParamsToContainerArgs(opt_input_params,
                                                containerArgs, kwargs, hargs)

        _addReturnParameterFileToContainerArgs(containerArgs, kwargs, hargs)

        _addIndexedParamsToContainerArgs(index_params,
                                         containerArgs, hargs)

        taskSpec['container_args'] = containerArgs

        # schedule job
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)

        # return result
        return jobModel.filter(job, user)
Exemplo n.º 33
0
    def _generateSuperpixels(self, image):
        Job = self.model('job', 'jobs')
        Token = self.model('token')
        User = self.model('user', 'isic_archive')

        SUPERPIXEL_VERSION = 3.0

        user = User.load(image['creatorId'], force=True, exc=True)
        # Use admin user, to ensure that worker always has access
        token = Token.createToken(
            user=getAdminUser(),
            days=1,
            scope=[TokenScope.DATA_READ, TokenScope.DATA_WRITE])

        with open(os.path.join(
                os.path.dirname(__file__),
                '_generate_superpixels.py'), 'r') as scriptStream:
            script = scriptStream.read()

        title = 'superpixels v%s generation: %s' % (
            SUPERPIXEL_VERSION, image['name'])
        job = Job.createJob(
            title=title,
            type='isic_archive_superpixels',
            handler='worker_handler',
            kwargs={
                'jobInfo': None,  # will be filled after job is created
                'task': {
                    'mode': 'python',
                    'script': script,
                    'name': title,
                    'inputs': [{
                        'id': 'originalFile',
                        'type': 'string',
                        'format': 'text',
                        'target': 'filepath'
                    }, {
                        'id': 'segmentation_helpersPath',
                        'type': 'string',
                        'format': 'text',
                    }],
                    'outputs': [{
                        'id': 'superpixelsEncodedBytes',
                        'type': 'string',
                        'format': 'text',
                        'target': 'memory'
                    }]
                },
                'inputs': {
                    'originalFile': workerUtils.girderInputSpec(
                        resource=self.originalFile(image),
                        resourceType='file',
                        token=token),
                    'segmentation_helpersPath': {
                        'mode': 'inline',
                        'format': 'text',
                        'data': segmentation_helpers.__path__[0]
                    }
                },
                'outputs': {
                    'superpixelsEncodedBytes': workerUtils.girderOutputSpec(
                        parent=image,
                        token=token,
                        parentType='item',
                        name='%s_superpixels_v%s.png' %
                             (image['name'], SUPERPIXEL_VERSION),
                        reference=''
                    )
                },
                'auto_convert': False,
                'validate': False
            },
            user=user,
            public=False,
            save=True  # must save to create an _id for workerUtils.jobInfoSpec
        )
        job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(
            job,
            Job.createJobToken(job),
            logPrint=True
        )
        job['meta'] = {
            'creator': 'isic_archive',
            'task': 'generateSuperpixels',
            'imageId': image['_id'],
            'imageName': image['name'],
            'superpixelsVersion': SUPERPIXEL_VERSION
        }
        job = Job.save(job)

        Job.scheduleJob(job)
        return job
Exemplo n.º 34
0
def runSlicerCliTasksDescriptionForItem(
        self, item, image, args, setName, setDescription, pullImage, params):
    if 'meta' not in item:
        item['meta'] = {}

    if image is None:
        image = item.get('meta', {}).get('itemTaskSpec', {}).get('docker_image')

    if not image:
        raise RestException(
            'You must pass an image parameter, or set the itemTaskSpec.docker_image '
            'field of the item.')

    jobModel = Job()
    token = Token().createToken(
        days=3, scope='item_task.set_task_spec.%s' % item['_id'])
    job = jobModel.createJob(
        title='Read docker Slicer CLI: %s' % image, type='item.item_task_slicer_cli_description',
        handler='worker_handler', user=self.getCurrentUser())

    if args[-1:] == ['--xml']:
        args = args[:-1]

    job.update({
        'itemTaskId': item['_id'],
        'kwargs': {
            'task': {
                'mode': 'docker',
                'docker_image': image,
                'container_args': args + ['--xml'],
                'pull_image': pullImage,
                'outputs': [{
                    'id': '_stdout',
                    'format': 'text'
                }],
            },
            'outputs': {
                '_stdout': {
                    'mode': 'http',
                    'method': 'PUT',
                    'format': 'text',
                    'url': '/'.join((utils.getWorkerApiUrl(), 'item', str(item['_id']),
                                     'item_task_slicer_cli_xml')),
                    'params': {
                        'setName': setName,
                        'setDescription': setDescription
                    },
                    'headers': {'Girder-Token': token['_id']}
                }
            },
            'jobInfo': utils.jobInfoSpec(job),
            'validate': False,
            'auto_convert': False
        }
    })

    item['meta']['itemTaskSpec'] = {
        'mode': 'docker',
        'docker_image': image
    }

    if args:
        item['meta']['itemTaskSlicerCliArgs'] = args

    Item().save(item)

    job = jobModel.save(job)
    jobModel.scheduleJob(job)

    return job
Exemplo n.º 35
0
    def cliHandler(self, **hargs):

        user = self.getCurrentUser()
        token = self.getCurrentToken()['_id']

        # create job
        jobModel = self.model('job', 'jobs')
        jobTitle = '.'.join((restResource.resourceName, cliName))
        job = jobModel.createJob(title=jobTitle,
                                 type=jobTitle,
                                 handler='worker_handler',
                                 user=user)
        kwargs = {
            'validate': False,
            'auto_convert': True,
            'cleanup': True,
            'inputs': dict(),
            'outputs': dict()
        }

        # create job info
        jobToken = jobModel.createJobToken(job)
        kwargs['jobInfo'] = wutils.jobInfoSpec(job, jobToken)

        # initialize task spec
        taskSpec = {
            'name': cliName,
            'mode': 'docker',
            'docker_image': dockerImage,
            'pull_image': True,
            'inputs': [],
            'outputs': []
        }

        _addIndexedInputParamsToTaskSpec(index_input_params, taskSpec)

        _addIndexedOutputParamsToTaskSpec(index_output_params, taskSpec, hargs)

        _addOptionalInputParamsToTaskSpec(opt_input_params, taskSpec)

        _addOptionalOutputParamsToTaskSpec(opt_output_params, taskSpec, hargs)

        if len(simple_out_params) > 0:
            _addReturnParameterFileParamToTaskSpec(taskSpec, hargs)

        kwargs['task'] = taskSpec

        # add input/output parameter bindings
        _addIndexedInputParamBindings(index_input_params, kwargs['inputs'],
                                      hargs, token)

        _addIndexedOutputParamBindings(index_output_params, kwargs['outputs'],
                                       hargs, user, token)

        _addOptionalInputParamBindings(opt_input_params, kwargs['inputs'],
                                       hargs, user, token)

        _addOptionalOutputParamBindings(opt_output_params, kwargs['outputs'],
                                        hargs, user, token)

        if len(simple_out_params) > 0:
            _addReturnParameterFileBinding(kwargs['outputs'], hargs, user,
                                           token)

        # construct container arguments
        containerArgs = [cliRelPath]

        _addOptionalInputParamsToContainerArgs(opt_input_params, containerArgs,
                                               hargs)

        _addOptionalOutputParamsToContainerArgs(opt_input_params,
                                                containerArgs, kwargs, hargs)

        _addReturnParameterFileToContainerArgs(containerArgs, kwargs, hargs)

        _addIndexedParamsToContainerArgs(index_params, containerArgs, hargs)

        taskSpec['container_args'] = containerArgs

        # schedule job
        job['kwargs'] = kwargs
        job = jobModel.save(job)
        jobModel.scheduleJob(job)

        # return result
        return jobModel.filter(job, user)
Exemplo n.º 36
0
    def processTask(self, params, **kwargs):
        self.requireParams(('taskkey', ), params)
        if getattr(job_specs, params['taskkey'], None) is None:
            raise RestException('No task named %s.' % params['taskkey'])
        task = copy.deepcopy(getattr(job_specs, params['taskkey']))
        data = {}
        data.update({input['key']: input for input in task['inputs']})
        data.update({input['key']: input for input in task['parameters']})
        # Any input that doesn't have a default is required.
        self.requireParams((key for key in data
                            if 'default' not in data[key]), params)
        user, token = self._getTaskUser(task)
        self._adjustDataTypes(data, params, user)

        job = self.model('job', 'jobs').createJob(
            title='sumo %s' % task.get('name', 'task'),
            type='sumo',
            user=user,
            handler='worker_handler')

        jobToken = self.model('job', 'jobs').createJobToken(job)

        job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(
            job=job,
            token=jobToken,
            logPrint=True)

        if not token:
            # It seems like we should be able to use a token without USER_AUTH
            # in its scope, but I'm not sure how.
            token = self.model('token').createToken(
                user, days=1, scope=TokenScope.USER_AUTH)

        inputs = {}
        for key in data:
            if data[key].get('input') is False:
                continue
            spec = data.get(key, {}).copy()
            if data[key].get('type') in ('file', 'item', 'folder'):
                spec = workerUtils.girderInputSpec(
                    spec['data'], resourceType=data[key]['type'],
                    token=token,
                    dataType=data[key].get('dataType', 'string'),
                    dataFormat=data[key].get('dataFormat', 'text'),
                    )
            inputs[key] = spec

        # TODO(opadron): make a special-purpose token just for this job in case
        # the user logs out before it finishes.
        outputs = {}
        for output in task.get('outputs', {}):
            key = output['key']
            spec = {'token': token}
            for subkey in output:
                if (subkey in inspect.getargspec(
                        workerUtils.girderOutputSpec).args):
                    value = output[subkey]
                    if value.startswith('parameter:'):
                        valuekey = value.split(':', 1)[1]
                        value = data.get(valuekey, {}).get('data')
                    spec[subkey] = value
            outputs[key] = workerUtils.girderOutputSpec(**spec)

        job['kwargs'].update(task=task['task'], inputs=inputs, outputs=outputs)

        job = self.model('job', 'jobs').save(job)
        self.model('job', 'jobs').scheduleJob(job)
        self.jobInfo[str(job['_id'])] = {'user': user}

        return {
            'job': self.model('job', 'jobs').filter(job, user),
            'token': str(token['_id'])
        }
Exemplo n.º 37
0
    def _generateSuperpixels(self, image):
        SUPERPIXEL_VERSION = 3.0

        user = User().load(image['creatorId'], force=True, exc=True)
        # Use admin user, to ensure that worker always has access
        token = Token().createToken(
            user=getAdminUser(),
            days=1,
            scope=[TokenScope.DATA_READ, TokenScope.DATA_WRITE])

        with open(os.path.join(
                os.path.dirname(__file__),
                '_generate_superpixels.py'), 'r') as scriptStream:
            script = scriptStream.read()

        title = 'superpixels v%s generation: %s' % (
            SUPERPIXEL_VERSION, image['name'])
        job = Job().createJob(
            title=title,
            type='isic_archive_superpixels',
            handler='worker_handler',
            kwargs={
                'jobInfo': None,  # will be filled after job is created
                'task': {
                    'mode': 'python',
                    'script': script,
                    'name': title,
                    'inputs': [{
                        'id': 'originalFile',
                        'type': 'string',
                        'format': 'text',
                        'target': 'filepath'
                    }, {
                        'id': 'segmentation_helpersPath',
                        'type': 'string',
                        'format': 'text',
                    }],
                    'outputs': [{
                        'id': 'superpixelsEncodedBytes',
                        'type': 'string',
                        'format': 'text',
                        'target': 'memory'
                    }]
                },
                'inputs': {
                    'originalFile': workerUtils.girderInputSpec(
                        resource=self.originalFile(image),
                        resourceType='file',
                        token=token),
                    'segmentation_helpersPath': {
                        'mode': 'inline',
                        'format': 'text',
                        'data': segmentation_helpers.__path__[0]
                    }
                },
                'outputs': {
                    'superpixelsEncodedBytes': workerUtils.girderOutputSpec(
                        parent=image,
                        token=token,
                        parentType='item',
                        name='%s_superpixels_v%s.png' %
                             (image['name'], SUPERPIXEL_VERSION),
                        reference=''
                    )
                },
                'auto_convert': False,
                'validate': False
            },
            user=user,
            public=False,
            save=True  # must save to create an _id for workerUtils.jobInfoSpec
        )
        job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(
            job,
            Job().createJobToken(job),
            logPrint=True
        )
        job['meta'] = {
            'creator': 'isic_archive',
            'task': 'generateSuperpixels',
            'imageId': image['_id'],
            'imageName': image['name'],
            'superpixelsVersion': SUPERPIXEL_VERSION
        }
        job = Job().save(job)

        Job().scheduleJob(job)
        return job
Exemplo n.º 38
0
def runJsonTasksDescriptionForItem(self, item, image, taskName, setName,
                                   setDescription, pullImage, params):
    if 'meta' not in item:
        item['meta'] = {}

    if image is None:
        image = item.get('meta', {}).get('itemTaskSpec',
                                         {}).get('docker_image')

    if not image:
        raise RestException(
            'You must pass an image parameter, or set the itemTaskSpec.docker_image '
            'field of the item.')

    jobModel = self.model('job', 'jobs')
    token = self.model('token').createToken(
        days=3,
        scope='item_task.set_task_spec.%s' % item['_id'],
        user=self.getCurrentUser())
    job = jobModel.createJob(title='Read docker task specs: %s' % image,
                             type='item.item_task_json_description',
                             handler='worker_handler',
                             user=self.getCurrentUser())

    jobOptions = {
        'itemTaskId': item['_id'],
        'kwargs': {
            'task': {
                'mode': 'docker',
                'docker_image': image,
                'container_args': [],
                'pull_image': pullImage,
                'outputs': [{
                    'id': '_stdout',
                    'format': 'text'
                }],
            },
            'outputs': {
                '_stdout': {
                    'mode':
                    'http',
                    'method':
                    'PUT',
                    'format':
                    'text',
                    'url':
                    '/'.join((utils.getWorkerApiUrl(), 'item',
                              str(item['_id']), 'item_task_json_specs')),
                    'headers': {
                        'Girder-Token': token['_id']
                    },
                    'params': {
                        'image': image,
                        'taskName': taskName,
                        'setName': setName,
                        'setDescription': setDescription,
                        'pullImage': pullImage
                    }
                }
            },
            'jobInfo': utils.jobInfoSpec(job),
            'validate': False,
            'auto_convert': False
        }
    }
    job.update(jobOptions)

    job = jobModel.save(job)
    jobModel.scheduleJob(job)
    return job
Exemplo n.º 39
0
    def _createLargeImageJob(self, item, fileObj, user, token):
        path = os.path.join(os.path.dirname(__file__), '..', 'create_tiff.py')
        with open(path, 'r') as f:
            script = f.read()

        title = 'TIFF conversion: %s' % fileObj['name']
        Job = self.model('job', 'jobs')
        job = Job.createJob(title=title,
                            type='large_image_tiff',
                            handler='worker_handler',
                            user=user)
        jobToken = Job.createJobToken(job)

        task = {
            'mode':
            'python',
            'script':
            script,
            'name':
            title,
            'inputs': [{
                'id': 'in_path',
                'target': 'filepath',
                'type': 'string',
                'format': 'text'
            }, {
                'id': 'out_filename',
                'type': 'string',
                'format': 'text'
            }, {
                'id': 'tile_size',
                'type': 'number',
                'format': 'number'
            }, {
                'id': 'quality',
                'type': 'number',
                'format': 'number'
            }],
            'outputs': [{
                'id': 'out_path',
                'target': 'filepath',
                'type': 'string',
                'format': 'text'
            }]
        }

        inputs = {
            'in_path':
            workerUtils.girderInputSpec(item, resourceType='item',
                                        token=token),
            'quality': {
                'mode': 'inline',
                'type': 'number',
                'format': 'number',
                'data': 90
            },
            'tile_size': {
                'mode': 'inline',
                'type': 'number',
                'format': 'number',
                'data': 256
            },
            'out_filename': {
                'mode': 'inline',
                'type': 'string',
                'format': 'text',
                'data': os.path.splitext(fileObj['name'])[0] + '.tiff'
            }
        }

        outputs = {
            'out_path':
            workerUtils.girderOutputSpec(parent=item,
                                         token=token,
                                         parentType='item')
        }

        # TODO: Give the job an owner
        job['kwargs'] = {
            'task': task,
            'inputs': inputs,
            'outputs': outputs,
            'jobInfo': workerUtils.jobInfoSpec(job, jobToken),
            'auto_convert': False,
            'validate': False
        }

        job = Job.save(job)
        Job.scheduleJob(job)

        return job
Exemplo n.º 40
0
    def cliHandler(self, **hargs):
        # print 'in cliHandler hargs is '
        # print hargs
        user = self.getCurrentUser()
        token = self.getCurrentToken()['_id']

        # create job
        jobModel = self.model('job', 'jobs')
        jobTitle = '.'.join((restResource.resourceName, cliName))

        # User Group access control,
        # register group into particular job so that this user can access this job
        groups = list(Group().list(user=user))

        groupsAccess = []
        for eachGroup in groups:
            eachGroupAccess = {'id': eachGroup['_id'], 'level': 0}
            groupsAccess.append(eachGroupAccess)

        job = jobModel.createJob(title=jobTitle,
                                 type=jobTitle,
                                 handler='worker_handler',
                                 user=user,
                                 otherFields={'access': {'groups': groupsAccess}})
        kwargs = {
            'validate': False,
            'auto_convert': True,
            'cleanup': True,
            'inputs': dict(),
            'outputs': dict()
        }

        # create job info
        jobToken = jobModel.createJobToken(job)
        kwargs['jobInfo'] = wutils.jobInfoSpec(job, jobToken)

        # initialize task spec
        taskSpec = {'name': cliName,
                    'mode': 'docker',
                    'docker_image': dockerImage,
                    'pull_image': False,
                    'inputs': [],
                    'outputs': []}

        _addIndexedInputParamsToTaskSpec(index_input_params, taskSpec)

        _addIndexedOutputParamsToTaskSpec(index_output_params, taskSpec, hargs)

        _addOptionalInputParamsToTaskSpec(opt_input_params, taskSpec)

        _addOptionalOutputParamsToTaskSpec(opt_output_params, taskSpec, hargs)

        if len(simple_out_params) > 0:
            _addReturnParameterFileParamToTaskSpec(taskSpec, hargs)

        kwargs['task'] = taskSpec

        # add input/output parameter bindings
        _addIndexedInputParamBindings(index_input_params,
                                      kwargs['inputs'], hargs, token)

        _addIndexedOutputParamBindings(index_output_params,
                                       kwargs['outputs'], hargs, user, token)

        _addOptionalInputParamBindings(opt_input_params,
                                       kwargs['inputs'], hargs, user, token)

        _addOptionalOutputParamBindings(opt_output_params,
                                        kwargs['outputs'], hargs, user, token)

        if len(simple_out_params) > 0:
            _addReturnParameterFileBinding(kwargs['outputs'],
                                           hargs, user, token)

        # construct container arguments
        containerArgs = [cliRelPath]

        _addOptionalInputParamsToContainerArgs(opt_input_params,
                                               containerArgs, hargs)

        _addOptionalOutputParamsToContainerArgs(opt_output_params,
                                                containerArgs, kwargs, hargs)

        _addReturnParameterFileToContainerArgs(containerArgs, kwargs, hargs)

        # print 'index_params'
        # print index_params
        _addIndexedParamsToContainerArgs(index_params,
                                         containerArgs, hargs)

        taskSpec['container_args'] = containerArgs

        # schedule job
        job['kwargs'] = kwargs
        # print '-------job is-------'
        # print job
        job = jobModel.save(job)
        jobModel.scheduleJob(job)

        # return result
        return jobModel.filter(job, user)
Exemplo n.º 41
0
    def scoreSubmission(self, submission, apiUrl):
        """
        Run a Girder Worker job to score a submission.
        """
        folderModel = self.model('folder')
        jobModel = self.model('job', 'jobs')
        phaseModel = self.model('phase', 'covalic')
        settingModel = self.model('setting')
        tokenModel = self.model('token')
        userModel = self.model('user')

        phase = phaseModel.load(submission['phaseId'], force=True)
        folder = folderModel.load(submission['folderId'], force=True)
        user = userModel.load(submission['creatorId'], force=True)

        otherFields = {}
        if 'overallScore' in submission:
            otherFields['rescoring'] = True

        jobTitle = '%s submission: %s' % (phase['name'], folder['name'])
        ## like this: 	p4 submission: submission_5bfb55d1076129165ff185ea_1543198304042
        job = jobModel.createJob(title=jobTitle,
                                 type='covalic_score',
                                 handler='worker_handler',
                                 user=user,
                                 otherFields=otherFields)

        scoreUserId = settingModel.get(PluginSettings.SCORING_USER_ID)
        if not scoreUserId:
            raise GirderException(
                'No scoring user ID is set. Please set one on the plugin configuration page.'
            )

        scoreUser = userModel.load(scoreUserId, force=True)
        if not scoreUser:
            raise GirderException('Invalid scoring user setting (%s).' %
                                  scoreUserId)

        scoreToken = tokenModel.createToken(user=scoreUser, days=7)
        folderModel.setUserAccess(folder,
                                  user=scoreUser,
                                  level=AccessType.READ,
                                  save=True)

        groundTruth = folderModel.load(phase['groundTruthFolderId'],
                                       force=True)

        if not phaseModel.hasAccess(
                phase, user=scoreUser, level=AccessType.ADMIN):
            phaseModel.setUserAccess(phase,
                                     user=scoreUser,
                                     level=AccessType.ADMIN,
                                     save=True)

        if not folderModel.hasAccess(
                groundTruth, user=scoreUser, level=AccessType.READ):
            folderModel.setUserAccess(groundTruth,
                                      user=scoreUser,
                                      level=AccessType.READ,
                                      save=True)

        task = phase.get('scoreTask', {})
        image = task.get('dockerImage') or 'girder/covalic-metrics:latest'
        containerArgs = task.get('dockerArgs') or [
            '--groundtruth=$input{groundtruth}',
            '--submission=$input{submission}'
        ]

        kwargs = {
            'task': {
                'name':
                jobTitle,
                'mode':
                'docker',
                'docker_image':
                image,
                'container_args':
                containerArgs,
                'inputs': [{
                    'id': 'submission',
                    'type': 'string',
                    'format': 'text',
                    'target': 'filepath',
                    'filename': 'submission.zip'
                }, {
                    'id': 'groundtruth',
                    'type': 'string',
                    'format': 'text',
                    'target': 'filepath',
                    'filename': 'groundtruth.zip'
                }],
                'outputs': [{
                    'id': '_stdout',
                    'format': 'string',
                    'type': 'string'
                }]
            },
            'inputs': {
                'submission':
                utils.girderInputSpec(folder, 'folder', token=scoreToken),
                ##############  /opt/covalic/girder/plugins/worker/server/utils.py
                ##############  resource=folder, resourceType='folder'
                'groundtruth':
                utils.girderInputSpec(groundTruth, 'folder', token=scoreToken)
            },
            'outputs': {
                '_stdout': {
                    'mode':
                    'http',
                    'method':
                    'POST',
                    'format':
                    'string',
                    'url':
                    '/'.join((apiUrl, 'covalic_submission',
                              str(submission['_id']), 'score')),
                    'headers': {
                        'Girder-Token': scoreToken['_id']
                    },
                    #'The score id is': submission['_id']
                }
            },
            'jobInfo': utils.jobInfoSpec(job),
            ##############  /opt/covalic/girder/plugins/worker/server/utils.py
            'validate': False,
            'auto_convert': False,
            'cleanup': True
        }
        job['kwargs'] = kwargs
        job['covalicSubmissionId'] = submission['_id']
        job = jobModel.save(job)
        ############## document=job in file /opt/covalic/girder/girder/models/model_base.py
        jobModel.scheduleJob(job)
        ##########################3 ?

        submission['jobId'] = job['_id']
        return self.save(submission, validate=False)
Exemplo n.º 42
0
def girder_before_task_publish(sender=None,
                               body=None,
                               exchange=None,
                               routing_key=None,
                               headers=None,
                               properties=None,
                               declare=None,
                               retry_policy=None,
                               **kwargs):

    try:
        if 'jobInfoSpec' not in headers:
            try:
                # Note: If we can import these objects from the girder packages we
                # assume our producer is in a girder REST request. This allows
                # us to create the job model's directly. Otherwise there will be an
                # ImportError and we can create the job via a REST request using
                # the jobInfoSpec in headers.
                from girder.utility.model_importer import ModelImporter
                from girder.plugins.worker import utils
                from girder.api.rest import getCurrentUser

                job_model = ModelImporter.model('job', 'jobs')

                user = headers.pop('girder_user', getCurrentUser())

                # Sanitize any Transform objects
                task_args = tuple(_walk_obj(body[0], _maybe_model_repr))
                task_kwargs = _walk_obj(body[1], _maybe_model_repr)

                job = job_model.createJob(
                    **{
                        'title':
                        headers.pop('girder_job_title',
                                    Task._girder_job_title),
                        'type':
                        headers.pop('girder_job_type', Task._girder_job_type),
                        'handler':
                        headers.pop('girder_job_handler',
                                    Task._girder_job_handler),
                        'public':
                        headers.pop('girder_job_public',
                                    Task._girder_job_public),
                        'user':
                        user,
                        'args':
                        task_args,
                        'kwargs':
                        task_kwargs,
                        'otherFields':
                        dict(celeryTaskId=headers['id'],
                             **headers.pop('girder_job_other_fields',
                                           Task._girder_job_other_fields))
                    })

                headers['jobInfoSpec'] = utils.jobInfoSpec(job)

            except ImportError:
                # TODO: Check for self.job_manager to see if we have
                #       tokens etc to contact girder and create a job model
                #       we may be in a chain or a chord or some-such
                pass

        if 'girder_api_url' not in headers:
            try:
                from girder.plugins.worker import utils
                headers['girder_api_url'] = utils.getWorkerApiUrl()
            except ImportError:
                # TODO: handle situation where girder_worker is producing
                #       the message Note - this may not come up at all
                #       depending on how we pass girder_api_url through to
                #       the next task (e.g. in the context of chaining
                #       events)
                pass

        if 'girder_client_token' not in headers:
            try:
                from girder.utility.model_importer import ModelImporter
                headers['girder_client_token'] = \
                    ModelImporter.model('token').createToken()
            except ImportError:
                # TODO: handle situation where girder_worker is producing
                #       the message Note - this may not come up at all
                #       depending on how we pass girder_token through to
                #       the next task (e.g. in the context of chaining
                #       events)
                pass

        if 'girder_result_hooks' in headers:
            # Celery task headers are not automatically serialized by celery
            # before being passed off to ampq for byte packing. We will have
            # to do that here.
            p = jsonpickle.pickler.Pickler()
            headers['girder_result_hooks'] = \
                [p.flatten(grh) for grh in headers['girder_result_hooks']]

        # Finally,  remove all reserved_options from headers
        for key in Task.reserved_options:
            headers.pop(key, None)
    except Exception:
        logger.exception('An error occurred in girder_before_task_publish.')
        raise