def girder_before_task_publish(sender=None, body=None, exchange=None, routing_key=None, headers=None, properties=None, declare=None, retry_policy=None, **kwargs): if is_builtin_celery_task(sender): return job = None try: context = get_context() if 'jobInfoSpec' not in headers: job = context.create_task_job( Task.girder_job_defaults(), sender=sender, body=body, exchange=exchange, routing_key=routing_key, headers=headers, properties=properties, declare=declare, retry_policy=retry_policy, **kwargs) if 'girder_api_url' not in headers: context.attach_girder_api_url(sender=sender, body=body, exchange=exchange, routing_key=routing_key, headers=headers, properties=properties, declare=declare, retry_policy=retry_policy, **kwargs) if 'girder_client_token' not in headers: context.attach_girder_client_token(sender=sender, body=body, exchange=exchange, routing_key=routing_key, headers=headers, properties=properties, declare=declare, retry_policy=retry_policy, **kwargs) if 'girder_result_hooks' in headers: if job is not None: for result_hook in headers['girder_result_hooks']: if isinstance(result_hook, ResultTransform): result_hook.job = job # Celery task headers are not automatically serialized by celery # before being passed off to ampq for byte packing. We will have # to do that here. p = jsonpickle.pickler.Pickler() headers['girder_result_hooks'] = \ [p.flatten(grh) for grh in headers['girder_result_hooks']] # Finally, remove all reserved_options from headers for key in Task.reserved_options: headers.pop(key, None) except Exception: logger.exception('An error occurred in girder_before_task_publish.') raise
def gw_task_failure(sender=None, exception=None, traceback=None, **rest): if is_builtin_celery_task(sender.name): return try: msg = '%s: %s\n%s' % (exception.__class__.__name__, exception, ''.join( tb.format_tb(traceback))) sender.job_manager.write(msg) _update_status(sender, JobStatus.ERROR) except AttributeError: pass
def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, link=None, link_error=None, shadow=None, **options): if is_builtin_celery_task(self.name): return super(Task, self).apply_async( args=args, kwargs=kwargs, task_id=task_id, producer=producer, link=link, link_error=link_error, shadow=shadow, **options) # Pass girder related job information through to # the signals by adding this information to options['headers'] # This sets defaults for reserved_options based on the class defaults, # or values defined by the girder_job() dectorator headers = { 'girder_job_title': self._girder_job_title, 'girder_job_type': self._girder_job_type, 'girder_job_public': self._girder_job_public, 'girder_job_handler': self._girder_job_handler, 'girder_job_other_fields': self._girder_job_other_fields, } # Certain keys may show up in either kwargs (e.g. via # .delay(girder_token='foo') or in options (e.g. # .apply_async(args=(), kwargs={}, girder_token='foo') For # those special headers, pop them out of kwargs or options and # put them in headers so they can be picked up by the # before_task_publish signal. for key in self.reserved_headers + self.reserved_options: if kwargs is not None and key in kwargs: headers[key] = kwargs.pop(key) if key in options: headers[key] = options.pop(key) if 'headers' in options: if options['headers'] is None: options['headers'] = headers else: options['headers'].update(headers) else: options['headers'] = headers return super(Task, self).apply_async( args=args, kwargs=kwargs, task_id=task_id, producer=producer, link=link, link_error=link_error, shadow=shadow, serializer='girder_io', **options)
def gw_task_prerun(task=None, sender=None, task_id=None, args=None, kwargs=None, **rest): """Deserialize the jobInfoSpec passed in through the headers. This provides the a JobManager class as an attribute of the task before task execution. decorated functions may bind to their task and have access to the job_manager for logging and updating their status in girder. """ if is_builtin_celery_task(sender.name): return try: task.job_manager = _job_manager(task.request, task.request.headers) _update_status(task, JobStatus.RUNNING) except JobSpecNotFound: task.job_manager = None logger.warn('No jobInfoSpec. Setting job_manager to None.') except StateTransitionException: # Fetch the current status of the job status = task.job_manager.refreshStatus() # If we are canceling we want to stay in that state if status != JobStatus.CANCELING: raise try: task.girder_client = GirderClient(apiUrl=task.request.girder_api_url) task.girder_client.token = task.request.girder_client_token except AttributeError: task.girder_client = None # Deserialize girder_result_hooks if they exist if hasattr(task.request, 'girder_result_hooks'): u = jsonpickle.unpickler.Unpickler() task.request.girder_result_hooks = \ [u.restore(grh) for grh in task.request.girder_result_hooks]
def gw_task_success(sender=None, **rest): if is_builtin_celery_task(sender.name): return try: if not is_revoked(sender): _update_status(sender, JobStatus.SUCCESS) # For tasks revoked directly else: _update_status(sender, JobStatus.CANCELED) except AttributeError: pass except StateTransitionException: # Fetch the current status of the job status = sender.job_manager.refreshStatus() # If we are in CANCELING move to CANCELED if status == JobStatus.CANCELING or is_revoked(sender): _update_status(sender, JobStatus.CANCELED) else: raise