def _class_from_path(self, task_path): """Return the task class given a task's path. :param task_path: string for a class, e.g., mytask.MyTask :return: """ if task_path not in self._task_mapper: task_class = utils.class_import_from_path(task_path) self._task_mapper[task_path] = task_class return self._task_mapper[task_path]
def __init__(self, queue=None, body=''): """Constructor. :param queue: :param body: string for message body """ message.RawMessage.__init__(self, queue=queue, body=body) # Lazily instantiate the task mapper. if not self._task_mapper: self._task_mapper = {k: utils.class_import_from_path(v) for k, v in six.iteritems(settings.TASK_MAPPER)}
def __init__(self): """Initialize a worker instance.""" self._consumer = consumer.Consumer() queue_class = utils.class_import_from_path(settings.QUEUE_CLASS) q_info = queue_info.QueueInfo( config_file=settings.QUEUE_CONFIG, sqs_talk=self._consumer, queue_cls=queue_class) queue_selector_class = utils.class_import_from_path( settings.QUEUE_SELECTOR) self._queue_selector = queue_selector_class(q_info) # The worker will publish permanently failed tasks to a # dead-letter-queue. self._publisher = self._get_publisher() # Track total messages processed. self._total_messages_processed = 0 # Intialize queue variables used by each batch. self._incomplete_messages = [] self._successful_messages = [] self._failed_messages = [] self._permanent_failures = [] self._batch_stop_time = time.time() self._batch_queue = None # Monitors whether the worker has been exposed to tasks and may # have bloated in memory. self._dirty = False # Setup signal handling for cleanup. for sig in SIGNALS_TO_HANDLE: signal.signal(sig, self._cleanup_worker) # Allow the client of this library to do any setup before # starting the worker. settings.ON_WORKER_STARTUP()
def __init__(self): """Initialize a worker instance.""" self._consumer = consumer.Consumer() queue_class = utils.class_import_from_path(settings.QUEUE_CLASS) q_info = queue_info.QueueInfo( config_file=settings.QUEUE_CONFIG, sqs_talk=self._consumer, queue_cls=queue_class) queue_selector_class = utils.class_import_from_path( settings.QUEUE_SELECTOR) self._queue_selector = queue_selector_class(q_info) # The worker will publish permanently failed tasks to a # dead-letter-queue. self._publisher = publisher.Publisher() # Track total messages processed. self._total_messages_processed = 0 # Intialize queue variables used by each batch. self._incomplete_messages = [] self._successful_messages = [] self._failed_messages = [] self._permanent_failures = [] self._batch_stop_time = time.time() self._batch_queue = None # Monitors whether the worker has been exposed to tasks and may # have bloated in memory. self._dirty = False # Setup signal handling for cleanup. for sig in SIGNALS_TO_HANDLE: signal.signal(sig, self._cleanup_worker) # Allow the client of this library to do any setup before # starting the worker. settings.ON_WORKER_STARTUP()
def publish(self, task_class, task_id, payload, current_retry_num=None, current_failure_num=None, delay_sec=None): """Publish the given task type to the queue with the provided payload. :param obj task_class: class of the task that we are publishing. :param str task_id: unique identifying string for this task. :param dict payload: dictionary for the task payload. :param int current_retry_num: current task retry count. If 0, this is the first attempt to run the task. :param int current_failure_num: current task failure count. :param int delay_sec: time (in seconds) that a task should stay in the queue before being released to consumers. :raises: TaskTooChubbyException: This task is outrageously chubby. The publisher of the task should handle this exception and determine how to proceed. """ if delay_sec is not None and delay_sec > settings.SQS_MAX_TASK_DELAY_SEC: raise exceptions.InvalidTaskDelayException( 'Invalid task delay_sec (%d > %d).' % (delay_sec, settings.SQS_MAX_TASK_DELAY_SEC)) queue_class = utils.class_import_from_path(settings.QUEUE_CLASS) q_info = queue_info.QueueInfo(settings.QUEUE_CONFIG, self, queue_class) queue_obj = q_info.get_queue(task_class.queue) if task_class.time_limit >= queue_obj.visibility_timeout_sec: raise exceptions.InvalidTimeLimitTaskException( 'Invalid task time limit: %d >= %d from %s' % (task_class.time_limit, queue_obj.visibility_timeout_sec, settings.QUEUE_CONFIG)) sqs_queue = self._get_or_create_queue(queue_obj.name) kale_msg = message.KaleMessage(task_class=task_class, task_id=task_id, payload=payload, current_retry_num=current_retry_num, current_failure_num=current_failure_num) sqs_queue.send_message(MessageBody=kale_msg.encode(), DelaySeconds=delay_sec or 1) logger.debug('Published task. Task id: %s; Task name: %s' % (task_id, '%s.%s' % (task_class.__module__, task_class.__name__)))
def __init__(self, queue=None, body=''): """Constructor. :param queue: :param body: string for message body """ message.RawMessage.__init__(self, queue=queue, body=body) # Lazily instantiate the task mapper. if not self._task_mapper: self._task_mapper = { k: utils.class_import_from_path(v) for k, v in settings.TASK_MAPPER.iteritems() }
def publish(self, task_class, task_id, payload, current_retry_num=None, delay_sec=None): """Publish the given task type to the queue with the provided payload. :param obj task_class: class of the task that we are publishing. :param str task_id: unique identifying string for this task. :param dict payload: dictionary for the task payload. :param int current_retry_num: current task retry count. If 0, this is the first attempt to run the task. :param int delay_sec: time (in seconds) that a task should stay in the queue before being released to consumers. :raises: TaskTooChubbyException: This task is outrageously chubby. The publisher of the task should handle this exception and determine how to proceed. """ if delay_sec > settings.SQS_MAX_TASK_DELAY_SEC: raise exceptions.InvalidTaskDelayException( 'Invalid task delay_sec (%d > %d).' % ( delay_sec, settings.SQS_MAX_TASK_DELAY_SEC)) queue_class = utils.class_import_from_path(settings.QUEUE_CLASS) q_info = queue_info.QueueInfo(settings.QUEUE_CONFIG, self, queue_class) queue_obj = q_info.get_queue(task_class.queue) if task_class.time_limit >= queue_obj.visibility_timeout_sec: raise exceptions.InvalidTimeLimitTaskException( 'Invalid task time limit: %d >= %d from %s' % ( task_class.time_limit, queue_obj.visibility_timeout_sec, settings.QUEUE_CONFIG)) queue = self._get_or_create_queue(queue_obj.name) sqs_msg = message.KaleMessage.create_message( task_class=task_class, task_id=task_id, payload=payload, queue=queue, current_retry_num=current_retry_num) queue.write(sqs_msg, delay_seconds=delay_sec) logger.info('Published task. Task id: %s; Task name: %s' % ( task_id, '%s.%s' % (task_class.__module__, task_class.__name__)))
def __init__(self, sqs_message_id=None, sqs_receipt_handle=None, sqs_queue_name=None, task_class=None, task_name=None, task_id=None, payload=None, current_retry_num=None, enqueued_time=None, publisher_data=None, instantiate_task=False, delete_func=None ): """Constructor. :param task_class: Class of task. :param task_name: Name of task. Required if task_class is not set. :param task_id: Id of task. :param payload: Payload holds the data that the task's run_task method will be called with. :param current_retry_num: Current task retry. This will be 0 from new tasks and will be incremented for each retry. :param enqueued_time: Timestamp of when message was queued. If not provided then value set from setting's timestamp function. :param publisher_data: Str containing information about the publisher. If not provided the value from settings used. :param instantiate_task: Whether create instance of task_class. Default is false. :param delete_func: Delete function from the SQS message. """ self._validate_task_payload(payload) retry_count = current_retry_num or 0 self.id = sqs_message_id self.sqs_queue_name = sqs_queue_name self.sqs_receipt_handle = sqs_receipt_handle # This represents the path to the task. The consumer will have a # dictionary mapping these values to task classes. if task_class is not None: self.task_name = '.'.join([task_class.__module__, task_class.__name__]) else: self.task_name = task_name self.task_id = task_id self.task_args = payload.get('args') self.task_kwargs = payload.get('kwargs') self.task_app_data = payload.get('app_data') self.task_retry_num = retry_count self._enqueued_time = enqueued_time or _get_current_timestamp() self._publisher_data = publisher_data or _get_publisher_data() # Lazily instantiate the task mapper. if not self._task_mapper: self._task_mapper = {k: utils.class_import_from_path(v) for k, v in six.iteritems(settings.TASK_MAPPER)} # This will instantiate the task. if instantiate_task: self.task_inst = self._class_from_path(self.task_name)(self._get_message_body()) self.delete_func = delete_func