def testWorkerStatusEndpoint(server, models): # Create a job to be handled by the worker plugin job = Job().createJob( title='title', type='foo', handler='worker_handler', user=models['admin'], public=False, args=(), kwargs={}) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [ utils.girderInputSpec(models['adminFolder'], resourceType='folder') ], 'outputs': [ utils.girderOutputSpec(models['adminFolder'], token=models['adminToken']) ] } job = Job().save(job) assert job['status'] == JobStatus.INACTIVE # Schedule the job with mock.patch('celery.Celery') as celeryMock: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() Job().scheduleJob(job) # Call the worker status endpoint resp = server.request('/worker/status', method='GET', user=models['admin']) assertStatusOk(resp) for key in ['report', 'stats', 'ping', 'active', 'reserved']: assert key in resp.json
def executeTask(self, item, jobTitle, includeJobInfo, inputs, outputs): user = self.getCurrentUser() if jobTitle is None: jobTitle = item['name'] task, handler = self._validateTask(item) if task.get('mode') == 'girder_worker': return runCeleryTask(item['meta']['itemTaskImport'], inputs) jobModel = Job() job = jobModel.createJob(title=jobTitle, type='item_task', handler=handler, user=user) # If this is a user auth token, we make an IO-enabled token token = self.getCurrentToken() tokenModel = Token() if tokenModel.hasScope(token, TokenScope.USER_AUTH): token = tokenModel.createToken(user=user, days=7, scope=(TokenScope.DATA_READ, TokenScope.DATA_WRITE)) job['itemTaskTempToken'] = token['_id'] token = tokenModel.addScope(token, 'item_tasks.job_write:%s' % job['_id']) job.update({ 'itemTaskId': item['_id'], 'itemTaskBindings': { 'inputs': inputs, 'outputs': outputs }, 'kwargs': { 'task': task, 'inputs': self._transformInputs(inputs, token), 'outputs': self._transformOutputs(outputs, token, job, task, item['_id']), 'validate': False, 'auto_convert': False, 'cleanup': True } }) if includeJobInfo: job['kwargs']['jobInfo'] = utils.jobInfoSpec(job) if 'itemTaskCeleryQueue' in item.get('meta', {}): job['celeryQueue'] = item['meta']['itemTaskCeleryQueue'] job = jobModel.save(job) jobModel.scheduleJob(job) return job
def testWorkerCancel(models): jobModel = Job() job = jobModel.createJob( title='title', type='foo', handler='worker_handler', user=models['admin'], public=False, args=(), kwargs={}) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [ utils.girderInputSpec(models['adminFolder'], resourceType='folder') ], 'outputs': [ utils.girderOutputSpec(models['adminFolder'], token=models['adminToken']) ] } job = jobModel.save(job) assert job['status'] == JobStatus.INACTIVE # Schedule the job, make sure it is sent to celery with mock.patch('celery.Celery') as celeryMock, \ mock.patch('girder_worker.girder_plugin.event_handlers.AsyncResult') as asyncResult: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() jobModel.scheduleJob(job) jobModel.cancelJob(job) asyncResult.assert_called_with('fake_id', app=mock.ANY) # Check we called revoke asyncResult.return_value.revoke.assert_called_once() job = jobModel.load(job['_id'], force=True) assert job['status'] == CustomJobStatus.CANCELING
def runSlicerCliTasksDescriptionForFolder(self, folder, image, args, pullImage, params): jobModel = Job() token = Token().createToken(days=3, scope='item_task.set_task_spec.%s' % folder['_id'], user=self.getCurrentUser()) job = jobModel.createJob(title='Read docker task specs: %s' % image, type='folder.item_task_slicer_cli_description', handler='worker_handler', user=self.getCurrentUser()) if args[-1:] == ['--xml']: args = args[:-1] jobOptions = { 'itemTaskId': folder['_id'], 'kwargs': { 'task': { 'mode': 'docker', 'docker_image': image, 'container_args': args + ['--xml'], 'pull_image': pullImage, 'outputs': [{ 'id': '_stdout', 'format': 'text' }], }, 'outputs': { '_stdout': { 'mode': 'http', 'method': 'POST', 'format': 'text', 'url': '/'.join((utils.getWorkerApiUrl(), 'folder', str(folder['_id']), 'item_task_slicer_cli_xml')), 'headers': { 'Girder-Token': token['_id'] }, 'params': { 'image': image, 'args': json.dumps(args), 'pullImage': pullImage } } }, 'jobInfo': utils.jobInfoSpec(job), 'validate': False, 'auto_convert': False } } job.update(jobOptions) job = jobModel.save(job) jobModel.scheduleJob(job) return job
def testWorker(server, models): # Test the settings resp = server.request('/system/setting', method='PUT', params={ 'list': json.dumps([{ 'key': PluginSettings.BROKER, 'value': 'amqp://[email protected]' }, { 'key': PluginSettings.BACKEND, 'value': 'amqp://[email protected]' }]) }, user=models['admin']) assertStatusOk(resp) # Create a job to be handled by the worker plugin jobModel = Job() job = jobModel.createJob( title='title', type='foo', handler='worker_handler', user=models['admin'], public=False, args=(), kwargs={}) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [ utils.girderInputSpec(models['adminFolder'], resourceType='folder') ], 'outputs': [ utils.girderOutputSpec(models['adminFolder'], token=models['adminToken']) ] } job = jobModel.save(job) assert job['status'] == JobStatus.INACTIVE # Schedule the job, make sure it is sent to celery with mock.patch('celery.Celery') as celeryMock: instance = celeryMock.return_value instance.send_task.return_value = FakeAsyncResult() jobModel.scheduleJob(job) # Make sure we sent the job to celery assert len(celeryMock.mock_calls) == 2 assert celeryMock.mock_calls[0][1] == ('girder_worker',) assert celeryMock.mock_calls[0][2] == { 'broker': 'amqp://[email protected]', 'backend': 'amqp://[email protected]' } sendTaskCalls = celeryMock.return_value.send_task.mock_calls assert len(sendTaskCalls) == 1 assert sendTaskCalls[0][1] == ( 'girder_worker.run', job['args'], job['kwargs']) assert 'headers' in sendTaskCalls[0][2] assert 'jobInfoSpec' in sendTaskCalls[0][2]['headers'] # Make sure we got and saved the celery task id job = jobModel.load(job['_id'], force=True) assert job['celeryTaskId'] == 'fake_id' assert job['status'] == JobStatus.QUEUED
def testWorkerDifferentTask(server, models): # Test the settings resp = server.request('/system/setting', method='PUT', params={ 'key': PluginSettings.API_URL, 'value': 'bad value' }, user=models['admin']) assertStatus(resp, 400) assert resp.json['message'] == 'API URL must start with http:// or https://.' resp = server.request('/system/setting', method='PUT', params={ 'list': json.dumps([{ 'key': PluginSettings.BROKER, 'value': 'amqp://[email protected]' }, { 'key': PluginSettings.BACKEND, 'value': 'amqp://[email protected]' }]) }, user=models['admin']) assertStatusOk(resp) # Create a job to be handled by the worker plugin jobModel = Job() job = jobModel.createJob( title='title', type='foo', handler='worker_handler', user=models['admin'], public=False, args=(), kwargs={}, otherFields={ 'celeryTaskName': 'some_other.task', 'celeryQueue': 'my_other_q' }) job['kwargs'] = { 'jobInfo': utils.jobInfoSpec(job), 'inputs': [ utils.girderInputSpec(models['adminFolder'], resourceType='folder') ], 'outputs': [ utils.girderOutputSpec(models['adminFolder'], token=models['adminToken']) ] } job = jobModel.save(job) # Schedule the job, make sure it is sent to celery app = celery.getCeleryApp() with mock.patch.object(app, 'send_task') as sendTask: sendTask.return_value = FakeAsyncResult() jobModel.scheduleJob(job) sendTaskCalls = sendTask.mock_calls assert len(sendTaskCalls) == 1 assert sendTaskCalls[0][1] == ( 'some_other.task', job['args'], job['kwargs']) assert 'queue' in sendTaskCalls[0][2] assert sendTaskCalls[0][2]['queue'] == 'my_other_q'
def executeTask(self, item, jobTitle, includeJobInfo, inputs, outputs): user = self.getCurrentUser() if jobTitle is None: jobTitle = item['name'] task, handler = self._validateTask(item) if task.get('mode') == 'girder_worker': return runCeleryTask(item['meta']['itemTaskImport'], inputs) jobModel = Job() job = jobModel.createJob( title=jobTitle, type='item_task', handler=handler, user=user) # If this is a user auth token, we make an IO-enabled token token = self.getCurrentToken() tokenModel = Token() if tokenModel.hasScope(token, TokenScope.USER_AUTH): token = tokenModel.createToken( user=user, days=7, scope=(TokenScope.DATA_READ, TokenScope.DATA_WRITE)) job['itemTaskTempToken'] = token['_id'] token = tokenModel.addScope(token, 'item_tasks.job_write:%s' % job['_id']) job.update({ 'itemTaskId': item['_id'], 'itemTaskBindings': { 'inputs': inputs, 'outputs': outputs }, 'kwargs': { 'task': task, 'inputs': self._transformInputs(inputs, token), 'outputs': self._transformOutputs(outputs, token, job, task, item['_id']), 'validate': False, 'auto_convert': False, 'cleanup': True } }) if includeJobInfo: job['kwargs']['jobInfo'] = utils.jobInfoSpec(job) if 'itemTaskCeleryQueue' in item.get('meta', {}): job['celeryQueue'] = item['meta']['itemTaskCeleryQueue'] job = jobModel.save(job) jobModel.scheduleJob(job) return job
def runSlicerCliTasksDescriptionForFolder(self, folder, image, args, pullImage, params): jobModel = Job() token = Token().createToken( days=3, scope='item_task.set_task_spec.%s' % folder['_id'], user=self.getCurrentUser()) job = jobModel.createJob( title='Read docker task specs: %s' % image, type='folder.item_task_slicer_cli_description', handler='worker_handler', user=self.getCurrentUser()) if args[-1:] == ['--xml']: args = args[:-1] jobOptions = { 'itemTaskId': folder['_id'], 'kwargs': { 'task': { 'mode': 'docker', 'docker_image': image, 'container_args': args + ['--xml'], 'pull_image': pullImage, 'outputs': [{ 'id': '_stdout', 'format': 'text' }], }, 'outputs': { '_stdout': { 'mode': 'http', 'method': 'POST', 'format': 'text', 'url': '/'.join((utils.getWorkerApiUrl(), 'folder', str(folder['_id']), 'item_task_slicer_cli_xml')), 'headers': {'Girder-Token': token['_id']}, 'params': { 'image': image, 'args': json.dumps(args), 'pullImage': pullImage } } }, 'jobInfo': utils.jobInfoSpec(job), 'validate': False, 'auto_convert': False } } job.update(jobOptions) job = jobModel.save(job) jobModel.scheduleJob(job) return job
def runJsonTasksDescriptionForItem(self, item, image, taskName, setName, setDescription, pullImage, params): if 'meta' not in item: item['meta'] = {} if image is None: image = item.get('meta', {}).get('itemTaskSpec', {}).get('docker_image') if not image: raise RestException( 'You must pass an image parameter, or set the itemTaskSpec.docker_image ' 'field of the item.') jobModel = Job() token = Token().createToken(days=3, scope='item_task.set_task_spec.%s' % item['_id'], user=self.getCurrentUser()) job = jobModel.createJob(title='Read docker task specs: %s' % image, type='item.item_task_json_description', handler='worker_handler', user=self.getCurrentUser()) jobOptions = { 'itemTaskId': item['_id'], 'kwargs': { 'task': { 'mode': 'docker', 'docker_image': image, 'container_args': [], 'pull_image': pullImage, 'outputs': [{ 'id': '_stdout', 'format': 'text' }], }, 'outputs': { '_stdout': { 'mode': 'http', 'method': 'PUT', 'format': 'text', 'url': '/'.join((utils.getWorkerApiUrl(), 'item', str(item['_id']), 'item_task_json_specs')), 'headers': { 'Girder-Token': token['_id'] }, 'params': { 'image': image, 'taskName': taskName, 'setName': setName, 'setDescription': setDescription, 'pullImage': pullImage } } }, 'jobInfo': utils.jobInfoSpec(job), 'validate': False, 'auto_convert': False } } job.update(jobOptions) job = jobModel.save(job) jobModel.scheduleJob(job) return job
def scoreSubmission(self, submission, apiUrl): """ Run a Girder Worker job to score a submission. """ phase = Phase().load(submission['phaseId'], force=True) folder = Folder().load(submission['folderId'], force=True) user = User().load(submission['creatorId'], force=True) otherFields = {} if 'overallScore' in submission: otherFields['rescoring'] = True jobTitle = '%s submission: %s' % (phase['name'], folder['name']) job = Job().createJob( title=jobTitle, type='covalic_score', handler='worker_handler', user=user, otherFields=otherFields) scoreUserId = Setting().get(PluginSettings.SCORING_USER_ID) if not scoreUserId: raise GirderException( 'No scoring user ID is set. Please set one on the plugin configuration page.') scoreUser = User().load(scoreUserId, force=True) if not scoreUser: raise GirderException('Invalid scoring user setting (%s).' % scoreUserId) scoreToken = Token().createToken(user=scoreUser, days=7) Folder().setUserAccess( folder, user=scoreUser, level=AccessType.READ, save=True) groundTruth = Folder().load(phase['groundTruthFolderId'], force=True) if not Phase().hasAccess(phase, user=scoreUser, level=AccessType.ADMIN): Phase().setUserAccess( phase, user=scoreUser, level=AccessType.ADMIN, save=True) if not Folder().hasAccess(groundTruth, user=scoreUser, level=AccessType.READ): Folder().setUserAccess( groundTruth, user=scoreUser, level=AccessType.READ, save=True) task = phase.get('scoreTask', {}) image = task.get('dockerImage') or 'girder/covalic-metrics:latest' containerArgs = task.get('dockerArgs') or [ '--groundtruth=$input{groundtruth}', '--submission=$input{submission}' ] kwargs = { 'task': { 'name': jobTitle, 'mode': 'docker', 'docker_image': image, 'container_args': containerArgs, 'inputs': [{ 'id': 'submission', 'type': 'string', 'format': 'text', 'target': 'filepath', 'filename': 'submission.zip' }, { 'id': 'groundtruth', 'type': 'string', 'format': 'text', 'target': 'filepath', 'filename': 'groundtruth.zip' }], 'outputs': [{ 'id': '_stdout', 'format': 'string', 'type': 'string' }] }, 'inputs': { 'submission': utils.girderInputSpec( folder, 'folder', token=scoreToken), 'groundtruth': utils.girderInputSpec( groundTruth, 'folder', token=scoreToken) }, 'outputs': { '_stdout': { 'mode': 'http', 'method': 'POST', 'format': 'string', 'url': '/'.join((apiUrl, 'covalic_submission', str(submission['_id']), 'score')), 'headers': {'Girder-Token': scoreToken['_id']} } }, 'jobInfo': utils.jobInfoSpec(job), 'validate': False, 'auto_convert': False, 'cleanup': True } job['kwargs'] = kwargs job['covalicSubmissionId'] = submission['_id'] job = Job().save(job) Job().scheduleJob(job) submission['jobId'] = job['_id'] return self.save(submission, validate=False)
def runJsonTasksDescriptionForItem(self, item, image, taskName, setName, setDescription, pullImage, params): if 'meta' not in item: item['meta'] = {} if image is None: image = item.get('meta', {}).get('itemTaskSpec', {}).get('docker_image') if not image: raise RestException( 'You must pass an image parameter, or set the itemTaskSpec.docker_image ' 'field of the item.') jobModel = Job() token = Token().createToken( days=3, scope='item_task.set_task_spec.%s' % item['_id'], user=self.getCurrentUser()) job = jobModel.createJob( title='Read docker task specs: %s' % image, type='item.item_task_json_description', handler='worker_handler', user=self.getCurrentUser()) jobOptions = { 'itemTaskId': item['_id'], 'kwargs': { 'task': { 'mode': 'docker', 'docker_image': image, 'container_args': [], 'pull_image': pullImage, 'outputs': [{ 'id': '_stdout', 'format': 'text' }], }, 'outputs': { '_stdout': { 'mode': 'http', 'method': 'PUT', 'format': 'text', 'url': '/'.join((utils.getWorkerApiUrl(), 'item', str(item['_id']), 'item_task_json_specs')), 'headers': {'Girder-Token': token['_id']}, 'params': { 'image': image, 'taskName': taskName, 'setName': setName, 'setDescription': setDescription, 'pullImage': pullImage } } }, 'jobInfo': utils.jobInfoSpec(job), 'validate': False, 'auto_convert': False } } job.update(jobOptions) job = jobModel.save(job) jobModel.scheduleJob(job) return job