def _execute_task(task_id): try: task = Task.objects.get(pk=task_id) except Exception, e: logger.info('Could not get task with id %s:\n%s' % (task_id, e)) return
def _handle(self): logger.info("Worker listening on %s." % (settings.ZTASK_WORKER_URL,)) socket = context.socket(PULL) socket.connect(settings.ZTASK_WORKER_URL) while True: task_id = socket.recv_pyobj() logger.info('Worker received task (%s)' % (str(task_id),))
# use a zmq queue instead of the database for the work pipeline _func_cache = {} def _execute_task(task_id): try: task = Task.objects.get(pk=task_id) except Exception, e: logger.info('Could not get task with id %s:\n%s' % (task_id, e)) return function_name = task.function_name args = pickle.loads(str(task.args)) kwargs = pickle.loads(str(task.kwargs)) logger.info('Executing task function (%s)' % function_name) try: function = _func_cache[function_name] except KeyError: parts = function_name.split('.') module_name = '.'.join(parts[:-1]) member_name = parts[-1] if not module_name in sys.modules: __import__(module_name) function = getattr(sys.modules[module_name], member_name) _func_cache[function_name] = function try: function(*args, **kwargs) except Exception, e: