def create_task_job(job_defaults, sender=None, body=None, exchange=None, routing_key=None, headers=None, properties=None, declare=None, retry_policy=None, **kwargs): from girder.utility.model_importer import ModelImporter from girder.api.rest import getCurrentUser try: # girder v2 worker plugin from girder.plugins.worker import utils except ImportError: # girder v3 worker plugin from girder_worker.girder_plugin import utils job_model = ModelImporter.model('job', 'jobs') user = headers.pop('girder_user', getCurrentUser()) # Sanitize any Transform objects task_args = tuple(_walk_obj(body[0], _maybe_model_repr)) task_kwargs = _walk_obj(body[1], _maybe_model_repr) job = job_model.createJob( **{ 'title': headers.pop('girder_job_title', job_defaults.get('girder_job_title', '')), 'type': headers.pop('girder_job_type', job_defaults.get('girder_job_type', '')), 'handler': headers.pop('girder_job_handler', job_defaults.get('girder_job_handler', '')), 'public': headers.pop('girder_job_public', job_defaults.get('girder_job_public', '')), 'user': user, 'args': task_args, 'kwargs': task_kwargs, 'otherFields': dict(celeryTaskId=headers['id'], **headers.pop('girder_job_other_fields', job_defaults.get('girder_job_other_fields', ''))) }) headers['jobInfoSpec'] = utils.jobInfoSpec(job) return job
def _resolve_direct_file_paths(args, kwargs): extra_volumes = [] def resolve(arg, **kwargs): if isinstance(arg, DirectGirderFileIdToVolume): path = arg.resolve_direct_file_path() if path: extra_volumes.append(path) return arg _walk_obj(args, resolve) _walk_obj(kwargs, resolve) return extra_volumes
def __call__(self, *args, **kwargs): default_temp_volume = _RequestDefaultTemporaryVolume() self.request._default_temp_volume = default_temp_volume volumes = kwargs.setdefault('volumes', {}) # If we have a list of volumes, the user provide a list of Volume objects, # we need to transform them. temp_volumes = [] if isinstance(volumes, list): # See if we have been passed any TemporaryVolume instances. for v in volumes: if isinstance(v, TemporaryVolume): temp_volumes.append(v) # First call the transform method, this we replace default temp volumes # with the instance associated with this task create above. That is any # reference to TemporaryVolume.default _walk_obj(volumes, self._maybe_transform_argument) # Now convert them to JSON def _json(volume): return volume._repr_json_() volumes = _walk_obj(volumes, _json) # We then need to merge them into a single dict and it will be ready # for docker-py. volumes = {k: v for volume in volumes for k, v in volume.items()} kwargs['volumes'] = volumes volumes.update(default_temp_volume._repr_json_()) super(DockerTask, self).__call__(*args, **kwargs) # Set the permission to allow cleanup of temp directories temp_volumes = [v for v in temp_volumes if os.path.exists(v.host_path)] to_chmod = temp_volumes[:] # If our default_temp_volume instance has been transformed then we # know it has been used and we have to clean it up. if default_temp_volume._transformed: to_chmod.append(default_temp_volume) temp_volumes.append(default_temp_volume) if len(to_chmod) > 0: utils.chmod_writable([v.host_path for v in to_chmod]) for v in temp_volumes: shutil.rmtree(v.host_path)
def __call__(self, *args, **kwargs): try: _t_args = _walk_obj(args, self._maybe_transform_argument) _t_kwargs = _walk_obj(kwargs, self._maybe_transform_argument) results = super(Task, self).__call__(*_t_args, **_t_kwargs) if hasattr(self.request, 'girder_result_hooks'): if isinstance(results, tuple): results = tuple([self._maybe_transform_result(i, r) for i, r in enumerate(results)]) else: results = self._maybe_transform_result(0, results) return results except Exception: if hasattr(self.request, 'girder_result_hooks'): for hook in self.request.girder_result_hooks: hook.exception() raise finally: _walk_obj(args, self._maybe_cleanup) _walk_obj(kwargs, self._maybe_cleanup) if hasattr(self.request, 'girder_result_hooks'): for hook in self.request.girder_result_hooks: self._maybe_cleanup(hook)
def __call__(self, *args, **kwargs): try: _t_args = _walk_obj(args, self._maybe_transform_argument) _t_kwargs = _walk_obj(kwargs, self._maybe_transform_argument) results = super(Task, self).__call__(*_t_args, **_t_kwargs) if hasattr(self.request, 'girder_result_hooks'): if not isinstance(results, tuple): results = (results, ) results = tuple([ self._maybe_transform_result(i, r) for i, r in enumerate(results) ]) return results finally: _walk_obj(args, self._maybe_cleanup) _walk_obj(kwargs, self._maybe_cleanup)
def girder_before_task_publish(sender=None, body=None, exchange=None, routing_key=None, headers=None, properties=None, declare=None, retry_policy=None, **kwargs): try: if 'jobInfoSpec' not in headers: try: # Note: If we can import these objects from the girder packages we # assume our producer is in a girder REST request. This allows # us to create the job model's directly. Otherwise there will be an # ImportError and we can create the job via a REST request using # the jobInfoSpec in headers. from girder.utility.model_importer import ModelImporter from girder.plugins.worker import utils from girder.api.rest import getCurrentUser job_model = ModelImporter.model('job', 'jobs') user = headers.pop('girder_user', getCurrentUser()) # Sanitize any Transform objects task_args = tuple(_walk_obj(body[0], _maybe_model_repr)) task_kwargs = _walk_obj(body[1], _maybe_model_repr) job = job_model.createJob( **{ 'title': headers.pop('girder_job_title', Task._girder_job_title), 'type': headers.pop('girder_job_type', Task._girder_job_type), 'handler': headers.pop('girder_job_handler', Task._girder_job_handler), 'public': headers.pop('girder_job_public', Task._girder_job_public), 'user': user, 'args': task_args, 'kwargs': task_kwargs, 'otherFields': dict(celeryTaskId=headers['id'], **headers.pop('girder_job_other_fields', Task._girder_job_other_fields)) }) headers['jobInfoSpec'] = utils.jobInfoSpec(job) except ImportError: # TODO: Check for self.job_manager to see if we have # tokens etc to contact girder and create a job model # we may be in a chain or a chord or some-such pass if 'girder_api_url' not in headers: try: from girder.plugins.worker import utils headers['girder_api_url'] = utils.getWorkerApiUrl() except ImportError: # TODO: handle situation where girder_worker is producing # the message Note - this may not come up at all # depending on how we pass girder_api_url through to # the next task (e.g. in the context of chaining # events) pass if 'girder_client_token' not in headers: try: from girder.utility.model_importer import ModelImporter headers['girder_client_token'] = \ ModelImporter.model('token').createToken() except ImportError: # TODO: handle situation where girder_worker is producing # the message Note - this may not come up at all # depending on how we pass girder_token through to # the next task (e.g. in the context of chaining # events) pass if 'girder_result_hooks' in headers: # Celery task headers are not automatically serialized by celery # before being passed off to ampq for byte packing. We will have # to do that here. p = jsonpickle.pickler.Pickler() headers['girder_result_hooks'] = \ [p.flatten(grh) for grh in headers['girder_result_hooks']] # Finally, remove all reserved_options from headers for key in Task.reserved_options: headers.pop(key, None) except Exception: logger.exception('An error occurred in girder_before_task_publish.') raise
def create_task_job(job_defaults, sender=None, body=None, exchange=None, routing_key=None, headers=None, properties=None, declare=None, retry_policy=None, **kwargs): parent_task = current_app.current_task try: if parent_task is None: raise MissingJobArguments('Parent task is None') if parent_task.request is None: raise MissingJobArguments("Parent task's request is None") if not hasattr(parent_task.request, 'girder_api_url'): raise MissingJobArguments( "Parent task's request does not contain girder_api_url") if not hasattr(parent_task.request, 'girder_client_token'): raise MissingJobArguments( "Parent task's request does not contain girder_client_token") if not hasattr(parent_task.request, 'id'): raise MissingJobArguments( "Parent task's request does not contain id") if 'id' not in headers: raise MissingJobArguments('id is not in headers') gc = GirderClient(apiUrl=parent_task.request.girder_api_url) gc.token = parent_task.request.girder_client_token task_args = tuple(_walk_obj(body[0], _maybe_model_repr)) task_kwargs = _walk_obj(body[1], _maybe_model_repr) parameters = { 'title': headers.pop('girder_job_title', job_defaults.get('girder_job_title', '')), 'type': headers.pop('girder_job_type', job_defaults.get('girder_job_type', '')), 'handler': headers.pop('girder_job_handler', job_defaults.get('girder_job_handler', '')), 'public': headers.pop('girder_job_public', job_defaults.get('girder_job_public', '')), 'args': json.dumps(task_args), 'kwargs': task_kwargs, 'otherFields': json.dumps( dict(celeryTaskId=headers['id'], celeryParentTaskId=parent_task.request.id, **headers.pop( 'girder_job_other_fields', job_defaults.get('girder_job_other_fields', '')))) } try: response = gc.post('job', parameters=parameters, jsonResp=False) if response.ok: headers['jobInfoSpec'] = response.json().get('jobInfoSpec') except requests.exceptions.RequestException as e: logger.warn('Failed to post job: {}'.format(e)) except MissingJobArguments as e: logger.warn('Girder job not created: {}'.format(str(e)))