def _createInputParamBindingSpec(param, hargs, token): # print 'in _createInputParamBindingSpec param is ' # print param #directory parameter 'inputMultipleImage' # /integer parameter 'upperBound'/integer parameter 'lowerBound' curBindingSpec = dict() if _is_on_girder(param): if _SLICER_TYPE_TO_GIRDER_MODEL_MAP[param.typ] == 'url': url = hargs['params']['url'].replace('"', '') curBindingSpec = wutils.httpInputSpec(url) else: if param.flag == '-item': curBindingSpec = wutils.girderInputSpec( hargs[param.identifier()], resourceType='item', dataType='string', dataFormat='string', token=token, fetchParent=True) else: curBindingSpec = wutils.girderInputSpec( hargs[param.identifier()], resourceType=_SLICER_TYPE_TO_GIRDER_MODEL_MAP[param.typ], dataType='string', dataFormat='string', token=token, fetchParent=True) else: # inputs that are not of type image, file, or directory # should be passed inline as string from json.dumps() curBindingSpec['mode'] = 'inline' curBindingSpec['type'] = _SLICER_TO_GIRDER_WORKER_TYPE_MAP[param.typ] curBindingSpec['format'] = 'json' curBindingSpec['data'] = hargs['params'][param.identifier()] return curBindingSpec
def testGirderInputSpec(self): # Set an API_URL so we can use the spec outside of a rest request Setting().set(worker.PluginSettings.API_URL, 'http://127.0.0.1') Setting().set(worker.PluginSettings.DIRECT_PATH, True) spec = utils.girderInputSpec(self.adminFolder, resourceType='folder') self.assertEqual(spec['id'], str(self.adminFolder['_id'])) self.assertEqual(spec['resource_type'], 'folder') self.assertFalse(spec['fetch_parent']) self.assertNotIn('direct_path', spec) spec = utils.girderInputSpec(self.sampleFile, resourceType='file') self.assertEqual(spec['id'], str(self.sampleFile['_id'])) self.assertEqual(spec['resource_type'], 'file') self.assertFalse(spec['fetch_parent']) self.assertIn('direct_path', spec) Setting().set(worker.PluginSettings.DIRECT_PATH, False) spec = utils.girderInputSpec(self.sampleFile, resourceType='file') self.assertFalse(spec['fetch_parent']) self.assertNotIn('direct_path', spec) Setting().set(worker.PluginSettings.DIRECT_PATH, True) spec = utils.girderInputSpec(self.sampleFile, resourceType='file', fetchParent=True) self.assertTrue(spec['fetch_parent']) self.assertNotIn('direct_path', spec)
def testGirderInputSpec(self): # Set an API_URL so we can use the spec outside of a rest request self.model('setting').set(worker.PluginSettings.API_URL, 'http://127.0.0.1') self.model('setting').set(worker.PluginSettings.DIRECT_PATH, True) spec = utils.girderInputSpec(self.adminFolder, resourceType='folder') self.assertEqual(spec['id'], str(self.adminFolder['_id'])) self.assertEqual(spec['resource_type'], 'folder') self.assertFalse(spec['fetch_parent']) self.assertNotIn('direct_path', spec) spec = utils.girderInputSpec(self.sampleFile, resourceType='file') self.assertEqual(spec['id'], str(self.sampleFile['_id'])) self.assertEqual(spec['resource_type'], 'file') self.assertFalse(spec['fetch_parent']) self.assertIn('direct_path', spec) self.model('setting').set(worker.PluginSettings.DIRECT_PATH, False) spec = utils.girderInputSpec(self.sampleFile, resourceType='file') self.assertFalse(spec['fetch_parent']) self.assertNotIn('direct_path', spec) self.model('setting').set(worker.PluginSettings.DIRECT_PATH, True) spec = utils.girderInputSpec(self.sampleFile, resourceType='file', fetchParent=True) self.assertTrue(spec['fetch_parent']) self.assertNotIn('direct_path', spec)
def testWorkerCancel(self): from girder.plugins.jobs.models.job import Job jobModel = Job() job = jobModel.createJob( title='title', type='foo', handler='worker_handler', user=self.admin, public=False, args=(), kwargs={}) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [ utils.girderInputSpec(self.adminFolder, resourceType='folder') ], 'outputs': [ utils.girderOutputSpec(self.adminFolder, token=self.adminToken) ] } job = jobModel.save(job) self.assertEqual(job['status'], JobStatus.INACTIVE) # Schedule the job, make sure it is sent to celery with mock.patch('celery.Celery') as celeryMock, \ mock.patch('girder.plugins.worker.AsyncResult') as asyncResult: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() jobModel.scheduleJob(job) jobModel.cancelJob(job) asyncResult.assert_called_with('fake_id', app=mock.ANY) # Check we called revoke asyncResult.return_value.revoke.assert_called_once() job = jobModel.load(job['_id'], force=True) self.assertEqual(job['status'], CustomJobStatus.CANCELING)
def testWorkerStatusEndpoint(self): # Create a job to be handled by the worker plugin from girder.plugins.jobs.models.job import Job job = Job().createJob(title='title', type='foo', handler='worker_handler', user=self.admin, public=False, args=(), kwargs={}) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [utils.girderInputSpec(self.adminFolder, resourceType='folder')], 'outputs': [utils.girderOutputSpec(self.adminFolder, token=self.adminToken)] } job = Job().save(job) self.assertEqual(job['status'], JobStatus.INACTIVE) # Schedule the job with mock.patch('celery.Celery') as celeryMock: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() Job().scheduleJob(job) # Call the worker status endpoint resp = self.request('/worker/status', method='GET', user=self.admin) self.assertStatusOk(resp) self.assertHasKeys(resp.json, ['report', 'stats', 'ping', 'active', 'reserved'])
def testWorkerStatusEndpoint(self): # Create a job to be handled by the worker plugin from girder.plugins.jobs.models.job import Job job = Job().createJob( title='title', type='foo', handler='worker_handler', user=self.admin, public=False, args=(), kwargs={}) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [ utils.girderInputSpec(self.adminFolder, resourceType='folder') ], 'outputs': [ utils.girderOutputSpec(self.adminFolder, token=self.adminToken) ] } job = Job().save(job) self.assertEqual(job['status'], JobStatus.INACTIVE) # Schedule the job with mock.patch('celery.Celery') as celeryMock: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() Job().scheduleJob(job) # Call the worker status endpoint resp = self.request('/worker/status', method='GET', user=self.admin) self.assertStatusOk(resp) self.assertHasKeys(resp.json, ['report', 'stats', 'ping', 'active', 'reserved'])
def testFetchParent(self, file, params): token = self.getCurrentToken() jobModel = self.model('job', 'jobs') job = jobModel.createJob( title='Parent fetch test', type='parent_fetch_test', handler='worker_handler', user=self.getCurrentUser()) jobToken = jobModel.createJobToken(job) kwargs = { 'task': { 'mode': 'python', 'script': 'print(fp)\n', 'inputs': [{ 'id': 'fp', 'target': 'filepath', 'format': 'text', 'type': 'string' }], 'outputs': [] }, 'inputs': { 'fp': utils.girderInputSpec(file, token=token, fetchParent=True) }, 'outputs': {}, 'validate': False, 'auto_convert': False, 'cleanup': False, 'jobInfo': utils.jobInfoSpec(job, jobToken) } job['kwargs'] = kwargs job = jobModel.save(job) jobModel.scheduleJob(job) return job
def _transformInputs(self, inputs, token): """ Validates and sanitizes the input bindings. If they are Girder inputs, adds the necessary token info. If the token does not allow DATA_READ, or if the user does not have read access to the resource, raises an AccessException. """ transformed = {} for k, v in six.viewitems(inputs): if v['mode'] == 'girder': ensureTokenScopes(token, TokenScope.DATA_READ) rtype = v.get('resource_type', 'file') if rtype not in {'file', 'item', 'folder'}: raise ValidationException('Invalid input resource_type: %s.' % rtype) resource = self.model(rtype).load( v['id'], level=AccessType.READ, user=self.getCurrentUser(), exc=True) transformed[k] = utils.girderInputSpec( resource, resourceType=rtype, token=token, dataFormat='none') elif v['mode'] == 'inline': transformed[k] = { 'mode': 'inline', 'data': v['data'] } else: raise ValidationException('Invalid input mode: %s.' % v['mode']) return transformed
def testWorker(self): # Test the settings resp = self.request('/system/setting', method='PUT', params={ 'list': json.dumps([{ 'key': worker.PluginSettings.BROKER, 'value': 'amqp://[email protected]' }, { 'key': worker.PluginSettings.BACKEND, 'value': 'amqp://[email protected]' }]) }, user=self.admin) self.assertStatusOk(resp) # Create a job to be handled by the worker plugin from girder.plugins.jobs.models.job import Job jobModel = Job() job = jobModel.createJob( title='title', type='foo', handler='worker_handler', user=self.admin, public=False, args=(), kwargs={}) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [ utils.girderInputSpec(self.adminFolder, resourceType='folder') ], 'outputs': [ utils.girderOutputSpec(self.adminFolder, token=self.adminToken) ] } job = jobModel.save(job) self.assertEqual(job['status'], JobStatus.INACTIVE) # Schedule the job, make sure it is sent to celery with mock.patch('celery.Celery') as celeryMock: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() jobModel.scheduleJob(job) # Make sure we sent the job to celery self.assertEqual(len(celeryMock.mock_calls), 2) self.assertEqual(celeryMock.mock_calls[0][1], ('girder_worker',)) self.assertEqual(celeryMock.mock_calls[0][2], { 'broker': 'amqp://[email protected]', 'backend': 'amqp://[email protected]' }) sendTaskCalls = celeryMock.return_value.send_task.mock_calls self.assertEqual(len(sendTaskCalls), 1) self.assertEqual(sendTaskCalls[0][1], ( 'girder_worker.run', job['args'], job['kwargs'])) self.assertTrue('headers' in sendTaskCalls[0][2]) self.assertTrue('jobInfoSpec' in sendTaskCalls[0][2]['headers']) # Make sure we got and saved the celery task id job = jobModel.load(job['_id'], force=True) self.assertEqual(job['celeryTaskId'], 'fake_id') self.assertEqual(job['status'], JobStatus.QUEUED)
def testWorkerCancel(self): jobModel = self.model('job', 'jobs') job = jobModel.createJob(title='title', type='foo', handler='worker_handler', user=self.admin, public=False, args=(), kwargs={}) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [utils.girderInputSpec(self.adminFolder, resourceType='folder')], 'outputs': [utils.girderOutputSpec(self.adminFolder, token=self.adminToken)] } job = jobModel.save(job) self.assertEqual(job['status'], JobStatus.INACTIVE) # Schedule the job, make sure it is sent to celery with mock.patch('celery.Celery') as celeryMock, \ mock.patch('girder.plugins.worker.AsyncResult') as asyncResult: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() jobModel.scheduleJob(job) jobModel.cancelJob(job) asyncResult.assert_called_with('fake_id', app=mock.ANY) # Check we called revoke asyncResult.return_value.revoke.assert_called_once() job = jobModel.load(job['_id'], force=True) self.assertEqual(job['status'], CustomJobStatus.CANCELING)
def testWorker(self): # Test the settings resp = self.request('/system/setting', method='PUT', params={ 'list': json.dumps([{ 'key': worker.PluginSettings.BROKER, 'value': 'amqp://[email protected]' }, { 'key': worker.PluginSettings.BACKEND, 'value': 'amqp://[email protected]' }]) }, user=self.admin) self.assertStatusOk(resp) # Create a job to be handled by the worker plugin jobModel = self.model('job', 'jobs') job = jobModel.createJob( title='title', type='foo', handler='worker_handler', user=self.admin, public=False, args=(), kwargs={}) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [ utils.girderInputSpec(self.adminFolder, resourceType='folder') ], 'outputs': [ utils.girderOutputSpec(self.adminFolder, token=self.adminToken) ] } job = jobModel.save(job) self.assertEqual(job['status'], JobStatus.INACTIVE) # Schedule the job, make sure it is sent to celery with mock.patch('celery.Celery') as celeryMock: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() jobModel.scheduleJob(job) # Make sure we sent the job to celery self.assertEqual(len(celeryMock.mock_calls), 2) self.assertEqual(celeryMock.mock_calls[0][1], ('girder_worker',)) self.assertEqual(celeryMock.mock_calls[0][2], { 'broker': 'amqp://[email protected]', 'backend': 'amqp://[email protected]' }) sendTaskCalls = celeryMock.return_value.send_task.mock_calls self.assertEqual(len(sendTaskCalls), 1) self.assertEqual(sendTaskCalls[0][1], ( 'girder_worker.run', job['args'], job['kwargs'])) self.assertTrue('headers' in sendTaskCalls[0][2]) self.assertTrue('jobInfoSpec' in sendTaskCalls[0][2]['headers']) # Make sure we got and saved the celery task id job = jobModel.load(job['_id'], force=True) self.assertEqual(job['celeryTaskId'], 'fake_id') self.assertEqual(job['status'], JobStatus.QUEUED)
def createBlurImage(self, item, params): user = self.getCurrentUser() token = self.getCurrentToken() jobTitle = 'ITK blur: ' + item['name'] jobModel = self.model('job', 'jobs') folder = self.model('folder').load(item['folderId'], force=True) job = jobModel.createJob( title=jobTitle, type='itk_blur', handler='worker_handler', user=user) jobToken = jobModel.createJobToken(job) scriptFile = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'scripts', 'cad_script.py') with open(scriptFile, 'r') as fh: script = fh.read() kwargs = { 'task': { 'name': jobTitle, 'mode': 'python', 'script': script, 'inputs': [{ 'id': 'inputFileName', 'type': 'string', 'format': 'text', 'target': 'filepath' }], 'outputs': [{ 'id': 'outputFileName', 'format': 'text', 'type': 'string', 'target': 'filepath' }] }, 'inputs': { 'inputFileName': utils.girderInputSpec( item, resourceType='item', token=token) }, 'outputs': { 'outputFileName': utils.girderOutputSpec( folder, token=token, parentType='folder') }, 'jobInfo': { 'method': 'PUT', 'url': '/'.join((getApiUrl(), 'job', str(job['_id']))), 'headers': {'Girder-Token': jobToken['_id']}, 'logPrint': True }, 'validate': False, 'auto_convert': False } job['kwargs'] = kwargs job = jobModel.save(job) jobModel.scheduleJob(job) return job
def testWorkerDifferentTask(self): # Test the settings resp = self.request('/system/setting', method='PUT', params={ 'key': worker.PluginSettings.API_URL, 'value': 'bad value' }, user=self.admin) self.assertStatus(resp, 400) self.assertEqual(resp.json['message'], 'API URL must start with http:// or https://.') resp = self.request('/system/setting', method='PUT', params={ 'list': json.dumps([{ 'key': worker.PluginSettings.BROKER, 'value': 'amqp://[email protected]' }, { 'key': worker.PluginSettings.BACKEND, 'value': 'amqp://[email protected]' }]) }, user=self.admin) self.assertStatusOk(resp) # Create a job to be handled by the worker plugin from girder.plugins.jobs.models.job import Job jobModel = Job() job = jobModel.createJob( title='title', type='foo', handler='worker_handler', user=self.admin, public=False, args=(), kwargs={}, otherFields={ 'celeryTaskName': 'some_other.task', 'celeryQueue': 'my_other_q' }) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [ utils.girderInputSpec(self.adminFolder, resourceType='folder') ], 'outputs': [ utils.girderOutputSpec(self.adminFolder, token=self.adminToken) ] } job = jobModel.save(job) # Schedule the job, make sure it is sent to celery with mock.patch('celery.Celery') as celeryMock: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() jobModel.scheduleJob(job) sendTaskCalls = celeryMock.return_value.send_task.mock_calls self.assertEqual(len(sendTaskCalls), 1) self.assertEqual(sendTaskCalls[0][1], ( 'some_other.task', job['args'], job['kwargs'])) self.assertIn('queue', sendTaskCalls[0][2]) self.assertEqual(sendTaskCalls[0][2]['queue'], 'my_other_q')
def testWorker(self): # Test the settings resp = self.request( "/system/setting", method="PUT", params={ "list": json.dumps( [ {"key": worker.PluginSettings.BROKER, "value": "amqp://[email protected]"}, {"key": worker.PluginSettings.BACKEND, "value": "amqp://[email protected]"}, ] ) }, user=self.admin, ) self.assertStatusOk(resp) # Create a job to be handled by the worker plugin jobModel = self.model("job", "jobs") job = jobModel.createJob( title="title", type="foo", handler="worker_handler", user=self.admin, public=False, args=(), kwargs={} ) job["kwargs"] = { "jobInfo": utils.jobInfoSpec(job), "inputs": [utils.girderInputSpec(self.adminFolder, resourceType="folder")], "outputs": [utils.girderOutputSpec(self.adminFolder, token=self.adminToken)], } job = jobModel.save(job) self.assertEqual(job["status"], JobStatus.INACTIVE) # Schedule the job, make sure it is sent to celery with mock.patch("celery.Celery") as celeryMock: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() jobModel.scheduleJob(job) # Make sure we sent the job to celery self.assertEqual(len(celeryMock.mock_calls), 2) self.assertEqual(celeryMock.mock_calls[0][1], ("girder_worker",)) self.assertEqual( celeryMock.mock_calls[0][2], {"broker": "amqp://[email protected]", "backend": "amqp://[email protected]"}, ) sendTaskCalls = celeryMock.return_value.send_task.mock_calls self.assertEqual(len(sendTaskCalls), 1) self.assertEqual(sendTaskCalls[0][1], ("girder_worker.run", job["args"], job["kwargs"])) # Make sure we got and saved the celery task id job = jobModel.load(job["_id"], force=True) self.assertEqual(job["celeryTaskId"], "fake_id") self.assertEqual(job["status"], JobStatus.QUEUED)
def testWorkerDifferentTask(self): # Test the settings resp = self.request('/system/setting', method='PUT', params={ 'key': worker.PluginSettings.API_URL, 'value': 'bad value' }, user=self.admin) self.assertStatus(resp, 400) self.assertEqual(resp.json['message'], 'API URL must start with http:// or https://.') resp = self.request('/system/setting', method='PUT', params={ 'list': json.dumps([{ 'key': worker.PluginSettings.BROKER, 'value': 'amqp://[email protected]' }, { 'key': worker.PluginSettings.BACKEND, 'value': 'amqp://[email protected]' }]) }, user=self.admin) self.assertStatusOk(resp) # Create a job to be handled by the worker plugin jobModel = self.model('job', 'jobs') job = jobModel.createJob( title='title', type='foo', handler='worker_handler', user=self.admin, public=False, args=(), kwargs={}, otherFields={ 'celeryTaskName': 'some_other.task', 'celeryQueue': 'my_other_q' }) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [ utils.girderInputSpec(self.adminFolder, resourceType='folder') ], 'outputs': [ utils.girderOutputSpec(self.adminFolder, token=self.adminToken) ] } job = jobModel.save(job) # Schedule the job, make sure it is sent to celery with mock.patch('celery.Celery') as celeryMock: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() jobModel.scheduleJob(job) sendTaskCalls = celeryMock.return_value.send_task.mock_calls self.assertEqual(len(sendTaskCalls), 1) self.assertEqual(sendTaskCalls[0][1], ( 'some_other.task', job['args'], job['kwargs'])) self.assertIn('queue', sendTaskCalls[0][2]) self.assertEqual(sendTaskCalls[0][2]['queue'], 'my_other_q')
def testOutputs(self, folder, item, params): token = self.getCurrentToken() jobModel = self.model('job', 'jobs') job = jobModel.createJob(title='docker output test: %s' % folder['name'], type='docker_test', handler='worker_handler', user=self.getCurrentUser()) jobToken = jobModel.createJobToken(job) kwargs = { 'task': { 'mode': 'docker', 'docker_image': 'testoutputs:latest', 'pull_image': False, 'inputs': [{ 'id': 'input', 'target': 'filepath', 'format': 'text', 'type': 'string' }], 'outputs': [{ 'id': 'out.txt', 'target': 'filepath', 'format': 'text', 'type': 'string' }] }, 'inputs': { 'input': utils.girderInputSpec(item, resourceType='item', token=token) }, 'outputs': { 'out.txt': utils.girderOutputSpec(folder, token) }, 'jobInfo': utils.jobInfoSpec(job, jobToken) } job['kwargs'] = kwargs job = jobModel.save(job) jobModel.scheduleJob(job) return job
def _createInputParamBindingSpec(param, hargs, token): curBindingSpec = dict() if _is_on_girder(param): curBindingSpec = wutils.girderInputSpec( hargs[param.name], resourceType=_SLICER_TYPE_TO_GIRDER_MODEL_MAP[param.typ], dataType='string', dataFormat='string', token=token, fetchParent=True) else: # inputs that are not of type image, file, or directory # should be passed inline as string from json.dumps() curBindingSpec['mode'] = 'inline' curBindingSpec['type'] = _SLICER_TO_GIRDER_WORKER_TYPE_MAP[param.typ] curBindingSpec['format'] = 'json' curBindingSpec['data'] = hargs['params'][param.name] return curBindingSpec
def _createInputParamBindingSpec(param, hargs, token): curBindingSpec = dict() if _is_on_girder(param): curBindingSpec = wutils.girderInputSpec( hargs[param.name], resourceType=_SLICER_TYPE_TO_GIRDER_MODEL_MAP[param.typ], dataType='string', dataFormat='string', token=token) else: # inputs that are not of type image, file, or directory # should be passed inline as string from json.dumps() curBindingSpec['mode'] = 'inline' curBindingSpec['type'] = _SLICER_TO_GIRDER_WORKER_TYPE_MAP[param.typ] curBindingSpec['format'] = 'json' curBindingSpec['data'] = hargs['params'][param.name] return curBindingSpec
def testOutputs(self, folder, item, params): token = self.getCurrentToken() jobModel = self.model('job', 'jobs') job = jobModel.createJob( title='docker output test: %s' % folder['name'], type='docker_test', handler='worker_handler', user=self.getCurrentUser()) jobToken = jobModel.createJobToken(job) kwargs = { 'task': { 'mode': 'docker', 'docker_image': 'testoutputs:latest', 'pull_image': False, 'inputs': [{ 'id': 'input', 'target': 'filepath', 'format': 'text', 'type': 'string' }], 'outputs': [{ 'id': 'out.txt', 'target': 'filepath', 'format': 'text', 'type': 'string' }] }, 'inputs': { 'input': utils.girderInputSpec( item, resourceType='item', token=token) }, 'outputs': { 'out.txt': utils.girderOutputSpec( folder, token) }, 'jobInfo': utils.jobInfoSpec(job, jobToken) } job['kwargs'] = kwargs job = jobModel.save(job) jobModel.scheduleJob(job) return job
def testFetchParent(self, file, params): token = self.getCurrentToken() jobModel = self.model('job', 'jobs') job = jobModel.createJob(title='Parent fetch test', type='parent_fetch_test', handler='worker_handler', user=self.getCurrentUser()) jobToken = jobModel.createJobToken(job) kwargs = { 'task': { 'mode': 'python', 'script': 'print(fp)\n', 'inputs': [{ 'id': 'fp', 'target': 'filepath', 'format': 'text', 'type': 'string' }], 'outputs': [] }, 'inputs': { 'fp': utils.girderInputSpec(file, token=token, fetchParent=True) }, 'outputs': {}, 'validate': False, 'auto_convert': False, 'cleanup': False, 'jobInfo': utils.jobInfoSpec(job, jobToken) } job['kwargs'] = kwargs job = jobModel.save(job) jobModel.scheduleJob(job) return job
def _generateSuperpixels(self, image): SUPERPIXEL_VERSION = 3.0 user = User().load(image['creatorId'], force=True, exc=True) # Use admin user, to ensure that worker always has access token = Token().createToken( user=getAdminUser(), days=1, scope=[TokenScope.DATA_READ, TokenScope.DATA_WRITE]) with open(os.path.join( os.path.dirname(__file__), '_generate_superpixels.py'), 'r') as scriptStream: script = scriptStream.read() title = 'superpixels v%s generation: %s' % ( SUPERPIXEL_VERSION, image['name']) job = Job().createJob( title=title, type='isic_archive_superpixels', handler='worker_handler', kwargs={ 'jobInfo': None, # will be filled after job is created 'task': { 'mode': 'python', 'script': script, 'name': title, 'inputs': [{ 'id': 'originalFile', 'type': 'string', 'format': 'text', 'target': 'filepath' }, { 'id': 'segmentation_helpersPath', 'type': 'string', 'format': 'text', }], 'outputs': [{ 'id': 'superpixelsEncodedBytes', 'type': 'string', 'format': 'text', 'target': 'memory' }] }, 'inputs': { 'originalFile': workerUtils.girderInputSpec( resource=self.originalFile(image), resourceType='file', token=token), 'segmentation_helpersPath': { 'mode': 'inline', 'format': 'text', 'data': segmentation_helpers.__path__[0] } }, 'outputs': { 'superpixelsEncodedBytes': workerUtils.girderOutputSpec( parent=image, token=token, parentType='item', name='%s_superpixels_v%s.png' % (image['name'], SUPERPIXEL_VERSION), reference='' ) }, 'auto_convert': False, 'validate': False }, user=user, public=False, save=True # must save to create an _id for workerUtils.jobInfoSpec ) job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec( job, Job().createJobToken(job), logPrint=True ) job['meta'] = { 'creator': 'isic_archive', 'task': 'generateSuperpixels', 'imageId': image['_id'], 'imageName': image['name'], 'superpixelsVersion': SUPERPIXEL_VERSION } job = Job().save(job) Job().scheduleJob(job) return job
def runTaskSpec(self, key, params, **kwargs): """Create a job from the given task spec.""" task_spec = get_task_spec(key) if task_spec is None: raise RestException('No task named %s.' % key) # validate input bindings for input_spec in task_spec['inputs']: input_name = input_spec['name'] input_key = 'INPUT({})'.format(input_name) try: payload = params[input_key] except KeyError: # Check to see if the input spec provides a default. # If not, raise an exception. if 'default' not in input_spec: raise RestException( 'No binding provided for input "{}".'.format( input_name)) if RE_ARG_SPEC.match(payload) is None: raise RestException( 'invalid payload for input "{}": "{}"'.format( input_name, payload)) # validate output bindings for output_spec in task_spec['outputs']: output_name = output_spec['name'] output_key = 'OUTPUT({})'.format(output_name) try: payload = params[output_key] except KeyError: continue if RE_ARG_SPEC.match(payload) is None: raise RestException( 'invalid payload for output "{}": "{}"'.format( output_name, payload)) # # validation complete # job_title = params.get('title', 'sumo {}'.format(task_spec['name'])) user, token = self.getCurrentUser(True) job = self.model('job', 'jobs').createJob(title=job_title, type='sumo', user=user, handler='worker_handler') scratchDirectory = self._ensureJobDirectory(user, job['_id']) jobToken = self.model('job', 'jobs').createJobToken(job) job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(job=job, token=jobToken, logPrint=True) if not token: # It seems like we should be able to use a token without USER_AUTH # in its scope, but I'm not sure how. token = self.model('token').createToken(user, days=1, scope=TokenScope.USER_AUTH) jobpayload = (self.model('jobpayload', 'osumo').createJobpayload( job['_id'], user['_id'])) job_inputs = {} for input_spec in task_spec['inputs']: input_name = input_spec['name'] input_key = 'INPUT({})'.format(input_name) payload = params.get(input_key) if payload is None: continue job_input = {} m = RE_ARG_SPEC.match(payload) pos_args, extra_args = m.group(1), m.group(3) pos_args = pos_args.split(':') if extra_args: extra_args = json.loads('{{{}}}'.format(extra_args)) else: extra_args = {} input_type = pos_args[0] if input_type in ('FILE', 'ITEM'): resource_id = pos_args[1] resource_type = input_type.lower() data_type = extra_args.get('type', input_spec.get('type', 'string')) data_format = extra_args.get('format', input_spec.get('format', 'text')) job_input.update( workerUtils.girderInputSpec(self._getResource( resource_type, resource_id, user), resourceType=resource_type, token=token, dataType=data_type, dataFormat=data_format)) elif input_type == 'HTTP': # TODO(opadron): maybe we'll want to implement this, someday? raise NotImplementedError('HTTP input not implemented') elif input_type == 'INTEGER': value = pos_args[1] job_input['type'] = 'number' job_input['format'] = 'number' job_input['mode'] = 'inline' job_input['data'] = int(value) elif input_type == 'FLOAT': value = pos_args[1] job_input['type'] = 'number' job_input['format'] = 'number' job_input['mode'] = 'inline' job_input['data'] = float(value) elif input_type == 'STRING': value = ':'.join(pos_args[1:]) job_input['type'] = 'string' job_input['format'] = 'text' job_input['mode'] = 'inline' job_input['data'] = value elif input_type == 'BOOLEAN': value = pos_args[1] job_input['type'] = 'boolean' job_input['format'] = 'json' job_input['mode'] = 'inline' job_input['data'] = 'true' if int(value) else 'false' else: raise NotImplementedError( 'Input type "{}" not supported'.format(input_type)) job_input.update(extra_args) job_inputs[input_name] = job_input job_outputs = {} for output_spec in task_spec['outputs']: output_name = output_spec['name'] output_key = 'OUTPUT({})'.format(output_name) payload = params.get(output_key) if payload is None: continue job_output = {} m = RE_ARG_SPEC.match(payload) pos_args, extra_args = m.group(1), m.group(3) pos_args = pos_args.split(':') if extra_args: extra_args = json.loads('{{{}}}'.format(extra_args)) else: extra_args = {} output_type = pos_args[0] if output_type in ('FILE', 'ITEM'): parent_id, resource_name = (pos_args + [None])[1:3] parent_type = ('folder' if output_type == 'FILE' else 'file') data_type = extra_args.get('type', output_spec.get('type', 'string')) data_format = extra_args.get('format', output_spec.get('format', 'text')) job_output.update( workerUtils.girderOutputSpec(scratchDirectory, parentType='folder', token=token, name=resource_name, dataType=data_type, dataFormat=data_format)) elif output_type in ('INTEGER', 'FLOAT', 'STRING', 'BOOLEAN', 'JSON'): parse_result = urllib.parse.urlparse( getConfig()['database']['uri']) job_output['mode'] = 'sumo' job_output['db'] = parse_result.path[1:] job_output['collection'] = 'jobpayload' job_output['host'] = parse_result.netloc job_output['id'] = jobpayload['_id'] job_output['key'] = output_name if output_type == 'INTEGER': job_output['type'] = 'number' job_output['format'] = 'number' job_output['converter'] = 'int' elif output_type == 'FLOAT': job_output['type'] = 'number' job_output['format'] = 'number' job_output['converter'] = 'float' elif output_type == 'STRING': job_output['type'] = 'string' job_output['format'] = 'text' elif output_type == 'BOOLEAN': job_output['type'] = 'boolean' job_output['format'] = 'boolean' job_output['converter'] = 'bool' elif output_type == 'JSON': job_output['type'] = 'string' job_output['format'] = 'text' job_output['converter'] = 'json' else: raise NotImplementedError( 'Output type "{}" not supported'.format(output_type)) job_output.update(extra_args) job_outputs[output_name] = job_output job['kwargs'].update(task=task_spec, inputs=job_inputs, outputs=job_outputs) job = self.model('job', 'jobs').save(job) self.model('jobuser', 'osumo').createJobuser(job['_id'], user['_id']) self.model('job', 'jobs').scheduleJob(job) return { 'job': self.model('job', 'jobs').filter(job, user), 'folder': self.model('folder').filter(scratchDirectory, user), 'token': str(token['_id']) }
def processTask(self, params, **kwargs): self.requireParams(('taskkey', ), params) if getattr(job_specs, params['taskkey'], None) is None: raise RestException('No task named %s.' % params['taskkey']) task = copy.deepcopy(getattr(job_specs, params['taskkey'])) data = {} data.update({input['key']: input for input in task['inputs']}) data.update({input['key']: input for input in task['parameters']}) # Any input that doesn't have a default is required. self.requireParams((key for key in data if 'default' not in data[key]), params) user, token = self._getTaskUser(task) self._adjustDataTypes(data, params, user) job = self.model('job', 'jobs').createJob( title='sumo %s' % task.get('name', 'task'), type='sumo', user=user, handler='worker_handler') jobToken = self.model('job', 'jobs').createJobToken(job) job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec( job=job, token=jobToken, logPrint=True) if not token: # It seems like we should be able to use a token without USER_AUTH # in its scope, but I'm not sure how. token = self.model('token').createToken( user, days=1, scope=TokenScope.USER_AUTH) inputs = {} for key in data: if data[key].get('input') is False: continue spec = data.get(key, {}).copy() if data[key].get('type') in ('file', 'item', 'folder'): spec = workerUtils.girderInputSpec( spec['data'], resourceType=data[key]['type'], token=token, dataType=data[key].get('dataType', 'string'), dataFormat=data[key].get('dataFormat', 'text'), ) inputs[key] = spec # TODO(opadron): make a special-purpose token just for this job in case # the user logs out before it finishes. outputs = {} for output in task.get('outputs', {}): key = output['key'] spec = {'token': token} for subkey in output: if (subkey in inspect.getargspec( workerUtils.girderOutputSpec).args): value = output[subkey] if value.startswith('parameter:'): valuekey = value.split(':', 1)[1] value = data.get(valuekey, {}).get('data') spec[subkey] = value outputs[key] = workerUtils.girderOutputSpec(**spec) job['kwargs'].update(task=task['task'], inputs=inputs, outputs=outputs) job = self.model('job', 'jobs').save(job) self.model('job', 'jobs').scheduleJob(job) self.jobInfo[str(job['_id'])] = {'user': user} return { 'job': self.model('job', 'jobs').filter(job, user), 'token': str(token['_id']) }
def _createLargeImageJob(self, item, fileObj, user, token): path = os.path.join(os.path.dirname(__file__), '..', 'create_tiff.py') with open(path, 'r') as f: script = f.read() title = 'TIFF conversion: %s' % fileObj['name'] Job = self.model('job', 'jobs') job = Job.createJob( title=title, type='large_image_tiff', handler='worker_handler', user=user) jobToken = Job.createJobToken(job) outputName = os.path.splitext(fileObj['name'])[0] + '.tiff' if outputName == fileObj['name']: outputName = (os.path.splitext(fileObj['name'])[0] + '.' + time.strftime('%Y%m%d-%H%M%S') + '.tiff') task = { 'mode': 'python', 'script': script, 'name': title, 'inputs': [{ 'id': 'in_path', 'target': 'filepath', 'type': 'string', 'format': 'text' }, { 'id': 'out_filename', 'type': 'string', 'format': 'text' }, { 'id': 'tile_size', 'type': 'number', 'format': 'number' }, { 'id': 'quality', 'type': 'number', 'format': 'number' }], 'outputs': [{ 'id': 'out_path', 'target': 'filepath', 'type': 'string', 'format': 'text' }] } inputs = { 'in_path': workerUtils.girderInputSpec( fileObj, resourceType='file', token=token), 'quality': { 'mode': 'inline', 'type': 'number', 'format': 'number', 'data': 90 }, 'tile_size': { 'mode': 'inline', 'type': 'number', 'format': 'number', 'data': 256 }, 'out_filename': { 'mode': 'inline', 'type': 'string', 'format': 'text', 'data': outputName } } outputs = { 'out_path': workerUtils.girderOutputSpec( parent=item, token=token, parentType='item') } # TODO: Give the job an owner job['kwargs'] = { 'task': task, 'inputs': inputs, 'outputs': outputs, 'jobInfo': workerUtils.jobInfoSpec(job, jobToken), 'auto_convert': False, 'validate': False } job['meta'] = { 'creator': 'large_image', 'itemId': str(item['_id']), 'task': 'createImageItem', } job = Job.save(job) Job.scheduleJob(job) return job
def _createLargeImageJob(self, item, fileObj, user, token): path = os.path.join(os.path.dirname(__file__), '..', 'create_tiff.py') with open(path, 'r') as f: script = f.read() title = 'TIFF conversion: %s' % fileObj['name'] Job = self.model('job', 'jobs') job = Job.createJob(title=title, type='large_image_tiff', handler='worker_handler', user=user) jobToken = Job.createJobToken(job) task = { 'mode': 'python', 'script': script, 'name': title, 'inputs': [{ 'id': 'in_path', 'target': 'filepath', 'type': 'string', 'format': 'text' }, { 'id': 'out_filename', 'type': 'string', 'format': 'text' }, { 'id': 'tile_size', 'type': 'number', 'format': 'number' }, { 'id': 'quality', 'type': 'number', 'format': 'number' }], 'outputs': [{ 'id': 'out_path', 'target': 'filepath', 'type': 'string', 'format': 'text' }] } inputs = { 'in_path': workerUtils.girderInputSpec(item, resourceType='item', token=token), 'quality': { 'mode': 'inline', 'type': 'number', 'format': 'number', 'data': 90 }, 'tile_size': { 'mode': 'inline', 'type': 'number', 'format': 'number', 'data': 256 }, 'out_filename': { 'mode': 'inline', 'type': 'string', 'format': 'text', 'data': os.path.splitext(fileObj['name'])[0] + '.tiff' } } outputs = { 'out_path': workerUtils.girderOutputSpec(parent=item, token=token, parentType='item') } # TODO: Give the job an owner job['kwargs'] = { 'task': task, 'inputs': inputs, 'outputs': outputs, 'jobInfo': workerUtils.jobInfoSpec(job, jobToken), 'auto_convert': False, 'validate': False } job = Job.save(job) Job.scheduleJob(job) return job
def processVideo(self, id, params): force = params['force'] user, userToken = getCurrentUser(True) itemModel = self.model('item') fileModel = self.model('file') tokenModel = self.model('token') jobModel = self.model('job', 'jobs') item = itemModel.load(id, user=user, level=AccessType.READ) itemVideoData = item.get('video', {}) jobId = itemVideoData.get('jobId') itemAlreadyProcessed = False job = None if jobId is not None: job = jobModel.load(jobId, level=AccessType.READ, user=user) if not force: if job is not None: status = job['status'] if status not in (None, JobStatus.ERROR, JobStatus.CANCELED): itemAlreadyProcessed = True if itemAlreadyProcessed: result = { 'video': { 'jobCreated': False, 'message': 'Processing job already created.' } } result.update(job) return result # if user provided fileId, use that one fileId = params.get('fileId') if fileId is not None: # ensure the provided fileId is valid inputFile = fileModel.findOne({ 'itemId': ObjectId(id), '_id': ObjectId(fileId) }) if inputFile is None: raise RestException( 'Item with id=%s has no such file with id=%s' % (id, fileId)) else: # User did not provide a fileId. # # If we're *re*running a processing job (force=True), look # for the fileId used by the old job. if force and job: fileId = job.get('meta', {}).get('video', {}).get('fileId') if fileId: # ensure the provided fileId is valid, but in this case, # don't raise an exception if it is not -- just discard the # fileId and move on inputFile = fileModel.findOne({ 'itemId': ObjectId(id), '_id': ObjectId(fileId) }) if inputFile is None: fileId = None # if we *still* don't have a fileId, just grab the first one found under # the given item. if fileId is None: inputFile = fileModel.findOne({'itemId': ObjectId(id)}) # if there *are* no files, bail if inputFile is None: raise RestException('item %s has no files' % itemId) fileId = inputFile['_id'] # if we are *re*running a processing job (force=True), remove all files # from this item that were created by the last processing job... # # ...unless (for some reason) the user is running the job against that # particular file (this is almost certainly user error, but for now, # we'll just keep the file around). if force: fileIdList = itemVideoData.get('createdFiles', []) for f in fileIdList: if f == fileId: continue theFile = fileModel.load(f, level=AccessType.WRITE, user=user) if theFile: fileModel.remove(theFile) itemVideoData['createdFiles'] = [] # begin construction of the actual job if not userToken: # It seems like we should be able to use a token without USER_AUTH # in its scope, but I'm not sure how. userToken = tokenModel.createToken(user, days=1, scope=TokenScope.USER_AUTH) jobTitle = 'Video Processing' job = jobModel.createJob(title=jobTitle, type='video', user=user, handler='worker_handler') jobToken = jobModel.createJobToken(job) job['kwargs'] = job.get('kwargs', {}) job['kwargs']['task'] = { 'mode': 'docker', # TODO(opadron): replace this once we have a maintained # image on dockerhub 'docker_image': 'ffmpeg_local', 'progress_pipe': True, 'a': 'b', 'pull_image': False, 'inputs': [{ 'id': 'input', 'type': 'string', 'format': 'text', 'target': 'filepath' }], 'outputs': [ { 'id': '_stdout', 'type': 'string', 'format': 'text', 'target': 'memory' }, { 'id': '_stderr', 'type': 'string', 'format': 'text', 'target': 'memory' }, { 'id': 'source', 'type:': 'string', 'format': 'text', 'target': 'filepath', 'path': '/mnt/girder_worker/data/source.webm' }, { 'id': 'meta', 'type:': 'string', 'format': 'text', 'target': 'filepath', 'path': '/mnt/girder_worker/data/meta.json' }, ] } _, itemExt = os.path.splitext(item['name']) job['kwargs']['inputs'] = { 'input': workerUtils.girderInputSpec(inputFile, resourceType='file', token=userToken, name='input' + itemExt, dataType='string', dataFormat='text') } job['kwargs']['outputs'] = { '_stdout': workerUtils.girderOutputSpec(item, parentType='item', token=userToken, name='processing_stdout.txt', dataType='string', dataFormat='text', reference='videoPlugin'), '_stderr': workerUtils.girderOutputSpec(item, parentType='item', token=userToken, name='processing_stderr.txt', dataType='string', dataFormat='text', reference='videoPlugin'), 'source': workerUtils.girderOutputSpec(item, parentType='item', token=userToken, name='source.webm', dataType='string', dataFormat='text', reference='videoPlugin'), 'meta': workerUtils.girderOutputSpec(item, parentType='item', token=userToken, name='meta.json', dataType='string', dataFormat='text', reference='videoPluginMeta'), } job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(job=job, token=jobToken, logPrint=True) job['meta'] = job.get('meta', {}) job['meta']['video_plugin'] = {'itemId': id, 'fileId': fileId} job = jobModel.save(job) jobModel.scheduleJob(job) itemVideoData['jobId'] = str(job['_id']) item['video'] = itemVideoData itemModel.save(item) result = { 'video': { 'jobCreated': True, 'message': 'Processing job created.' } } result.update(job) return result
def _generateSuperpixels(self, image): Job = self.model('job', 'jobs') Token = self.model('token') User = self.model('user', 'isic_archive') SUPERPIXEL_VERSION = 3.0 user = User.load(image['creatorId'], force=True, exc=True) # Use admin user, to ensure that worker always has access token = Token.createToken( user=getAdminUser(), days=1, scope=[TokenScope.DATA_READ, TokenScope.DATA_WRITE]) with open(os.path.join( os.path.dirname(__file__), '_generate_superpixels.py'), 'r') as scriptStream: script = scriptStream.read() title = 'superpixels v%s generation: %s' % ( SUPERPIXEL_VERSION, image['name']) job = Job.createJob( title=title, type='isic_archive_superpixels', handler='worker_handler', kwargs={ 'jobInfo': None, # will be filled after job is created 'task': { 'mode': 'python', 'script': script, 'name': title, 'inputs': [{ 'id': 'originalFile', 'type': 'string', 'format': 'text', 'target': 'filepath' }, { 'id': 'segmentation_helpersPath', 'type': 'string', 'format': 'text', }], 'outputs': [{ 'id': 'superpixelsEncodedBytes', 'type': 'string', 'format': 'text', 'target': 'memory' }] }, 'inputs': { 'originalFile': workerUtils.girderInputSpec( resource=self.originalFile(image), resourceType='file', token=token), 'segmentation_helpersPath': { 'mode': 'inline', 'format': 'text', 'data': segmentation_helpers.__path__[0] } }, 'outputs': { 'superpixelsEncodedBytes': workerUtils.girderOutputSpec( parent=image, token=token, parentType='item', name='%s_superpixels_v%s.png' % (image['name'], SUPERPIXEL_VERSION), reference='' ) }, 'auto_convert': False, 'validate': False }, user=user, public=False, save=True # must save to create an _id for workerUtils.jobInfoSpec ) job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec( job, Job.createJobToken(job), logPrint=True ) job['meta'] = { 'creator': 'isic_archive', 'task': 'generateSuperpixels', 'imageId': image['_id'], 'imageName': image['name'], 'superpixelsVersion': SUPERPIXEL_VERSION } job = Job.save(job) Job.scheduleJob(job) return job
def scoreSubmission(self, submission, apiUrl): """ Run a Girder Worker job to score a submission. """ folderModel = self.model('folder') jobModel = self.model('job', 'jobs') phaseModel = self.model('phase', 'covalic') settingModel = self.model('setting') tokenModel = self.model('token') userModel = self.model('user') phase = phaseModel.load(submission['phaseId'], force=True) folder = folderModel.load(submission['folderId'], force=True) user = userModel.load(submission['creatorId'], force=True) otherFields = {} if 'overallScore' in submission: otherFields['rescoring'] = True jobTitle = '%s submission: %s' % (phase['name'], folder['name']) ## like this: p4 submission: submission_5bfb55d1076129165ff185ea_1543198304042 job = jobModel.createJob(title=jobTitle, type='covalic_score', handler='worker_handler', user=user, otherFields=otherFields) scoreUserId = settingModel.get(PluginSettings.SCORING_USER_ID) if not scoreUserId: raise GirderException( 'No scoring user ID is set. Please set one on the plugin configuration page.' ) scoreUser = userModel.load(scoreUserId, force=True) if not scoreUser: raise GirderException('Invalid scoring user setting (%s).' % scoreUserId) scoreToken = tokenModel.createToken(user=scoreUser, days=7) folderModel.setUserAccess(folder, user=scoreUser, level=AccessType.READ, save=True) groundTruth = folderModel.load(phase['groundTruthFolderId'], force=True) if not phaseModel.hasAccess( phase, user=scoreUser, level=AccessType.ADMIN): phaseModel.setUserAccess(phase, user=scoreUser, level=AccessType.ADMIN, save=True) if not folderModel.hasAccess( groundTruth, user=scoreUser, level=AccessType.READ): folderModel.setUserAccess(groundTruth, user=scoreUser, level=AccessType.READ, save=True) task = phase.get('scoreTask', {}) image = task.get('dockerImage') or 'girder/covalic-metrics:latest' containerArgs = task.get('dockerArgs') or [ '--groundtruth=$input{groundtruth}', '--submission=$input{submission}' ] kwargs = { 'task': { 'name': jobTitle, 'mode': 'docker', 'docker_image': image, 'container_args': containerArgs, 'inputs': [{ 'id': 'submission', 'type': 'string', 'format': 'text', 'target': 'filepath', 'filename': 'submission.zip' }, { 'id': 'groundtruth', 'type': 'string', 'format': 'text', 'target': 'filepath', 'filename': 'groundtruth.zip' }], 'outputs': [{ 'id': '_stdout', 'format': 'string', 'type': 'string' }] }, 'inputs': { 'submission': utils.girderInputSpec(folder, 'folder', token=scoreToken), ############## /opt/covalic/girder/plugins/worker/server/utils.py ############## resource=folder, resourceType='folder' 'groundtruth': utils.girderInputSpec(groundTruth, 'folder', token=scoreToken) }, 'outputs': { '_stdout': { 'mode': 'http', 'method': 'POST', 'format': 'string', 'url': '/'.join((apiUrl, 'covalic_submission', str(submission['_id']), 'score')), 'headers': { 'Girder-Token': scoreToken['_id'] }, #'The score id is': submission['_id'] } }, 'jobInfo': utils.jobInfoSpec(job), ############## /opt/covalic/girder/plugins/worker/server/utils.py 'validate': False, 'auto_convert': False, 'cleanup': True } job['kwargs'] = kwargs job['covalicSubmissionId'] = submission['_id'] job = jobModel.save(job) ############## document=job in file /opt/covalic/girder/girder/models/model_base.py jobModel.scheduleJob(job) ##########################3 ? submission['jobId'] = job['_id'] return self.save(submission, validate=False)
def _transformInputs(self, inputs, token): """ Validates and sanitizes the input bindings. If they are Girder inputs, adds the necessary token info. If the token does not allow DATA_READ, or if the user does not have read access to the resource, raises an AccessException. """ transformed = {} for k, v in six.viewitems(inputs): if v['mode'] == 'girder': ensureTokenScopes(token, TokenScope.DATA_READ) rtype = v.get('resource_type', 'file') if rtype not in {'file', 'item', 'folder'}: raise ValidationException( 'Invalid input resource_type: %s.' % rtype) try: resource = self.model(rtype).load( v['id'], level=AccessType.READ, user=self.getCurrentUser(), exc=True) except ValidationException: # if we asked for a file, we may have been given an item, # which case get the first file within it. if rtype != 'file': raise item = Item().load(v['id'], level=AccessType.READ, user=self.getCurrentUser(), exc=True) # Event handlers can add a response which contains # {'file': <file document>}. Otherwise, the first file is # used. event = events.trigger('item_tasks.transforms.file', { 'item': item, 'input': v, 'input_key': k }) if (len(event.responses) and isinstance(event.responses[-1], dict) and 'file' in event.responses[-1]): resource = event.responses[-1]['file'] else: childFiles = list(Item().childFiles( item, limit=1, sort=[('_id', SortDir.ASCENDING)])) if not len(childFiles): raise ValidationException('Item %s has no files' % v['id']) resource = childFiles[0] transformed[k] = utils.girderInputSpec(resource, resourceType=rtype, token=token, dataFormat='none') elif v['mode'] == 'inline': transformed[k] = {'mode': 'inline', 'data': v['data']} else: raise ValidationException('Invalid input mode: %s.' % v['mode']) return transformed