def EnqueueTasks(tasks, task_tag): """Enqueues a list of tasks in the Google Cloud task queue, for consumption by Google Compute Engine. """ q = taskqueue.Queue('clovis-queue') # Add tasks to the queue by groups. # TODO(droger): This supports thousands of tasks, but maybe not millions. # Defer the enqueuing if it times out. group_size = 100 callbacks = [] try: for i in range(0, len(tasks), group_size): group = tasks[i:i + group_size] taskqueue_tasks = [ taskqueue.Task(payload=task.ToJsonString(), method='PULL', tag=task_tag) for task in group ] rpc = taskqueue.create_rpc() q.add_async(task=taskqueue_tasks, rpc=rpc) callbacks.append(rpc) for callback in callbacks: callback.get_result() except Exception as e: clovis_logger.error('Exception:' + type(e).__name__ + ' ' + str(e.args)) return False clovis_logger.info('Pushed %i tasks with tag: %s.' % (len(tasks), task_tag)) return True
def EnqueueTasks(tasks, task_tag): """Enqueues a list of tasks in the Google Cloud task queue, for consumption by Google Compute Engine. """ q = taskqueue.Queue('clovis-queue') # Add tasks to the queue by groups. # TODO(droger): This supports thousands of tasks, but maybe not millions. # Defer the enqueuing if it times out. group_size = 100 callbacks = [] try: for i in range(0, len(tasks), group_size): group = tasks[i:i+group_size] taskqueue_tasks = [ taskqueue.Task(payload=task.ToJsonString(), method='PULL', tag=task_tag) for task in group] rpc = taskqueue.create_rpc() q.add_async(task=taskqueue_tasks, rpc=rpc) callbacks.append(rpc) for callback in callbacks: callback.get_result() except Exception as e: clovis_logger.error('Exception:' + type(e).__name__ + ' ' + str(e.args)) return False clovis_logger.info('Pushed %i tasks with tag: %s.' % (len(tasks), task_tag)) return True
def EnqueueTasks(tasks, task_tag): """Enqueues a list of tasks in the Google Cloud task queue, for consumption by Google Compute Engine. """ q = taskqueue.Queue('clovis-queue') retry_options = taskqueue.TaskRetryOptions(task_retry_limit=3) # Add tasks to the queue by groups. # TODO(droger): This support to thousands of tasks, but maybe not millions. # Defer the enqueuing if it times out. # is too large. group_size = 100 callbacks = [] try: for i in range(0, len(tasks), group_size): group = tasks[i:i+group_size] taskqueue_tasks = [ taskqueue.Task(payload=task.ToJsonDict(), method='PULL', tag=task_tag, retry_options=retry_options) for task in group] rpc = taskqueue.create_rpc() q.add_async(task=taskqueue_tasks, rpc=rpc) callbacks.append(rpc) for callback in callbacks: callback.get_result() except Exception as e: return 'Exception:' + type(e).__name__ + ' ' + str(e.args) + '\n' return 'pushed %i tasks with tag: %s\n' % (len(tasks), task_tag)
def set_up(self): self._rpc = taskqueue.create_rpc() q = Queue(self._queue_name) q.add_async(self._task, rpc=self._rpc)