Esempio n. 1
0
def task_batch_handle_notifications():
    """Batches notifications from pull queue, and forwards to push queue."""

    # Number of seconds to lease the tasks. Once it expires, the
    # tasks will be available again for the next worker.
    LEASE_SEC = 60
    # The maximum number of tasks to lease from the pull queue.
    MAX_TASKS = 1000
    queue = taskqueue.Queue('es-notify-tasks-batch')
    tasks = queue.lease_tasks(LEASE_SEC, MAX_TASKS)
    if not tasks:
        return
    requests = {}
    tasks_per_scheduler = collections.defaultdict(list)
    for task in tasks:
        proto = plugin_pb2.NotifyTasksRequest()
        payload = json.loads(task.payload)
        json_format.Parse(payload['request_json'], proto)
        s_tuple = (proto.scheduler_id, payload['es_host'])
        tasks_per_scheduler[s_tuple].append(task)
        if s_tuple not in requests:
            requests[s_tuple] = proto
        else:
            requests[s_tuple].notifications.extend(proto.notifications)

    for s_id, address in requests:
        request_json = json_format.MessageToJson(requests[s_id, address])
        enqueued = utils.enqueue_task(
            '/internal/taskqueue/important/external_scheduler/notify-tasks',
            'es-notify-tasks',
            params={
                'es_host': address,
                'request_json': request_json
            },
            transactional=ndb.in_transaction())
        if not enqueued:
            logging.warning(
                'Failed to enqueue external scheduler task, skipping')
            continue
        queue.delete_tasks(tasks_per_scheduler[s_id, address])
Esempio n. 2
0
 def post(self):
     es_host = self.request.get('es_host')
     request_json = self.request.get('request_json')
     request = plugin_pb2.NotifyTasksRequest()
     json_format.Parse(request_json, request)
     external_scheduler.notify_request_now(es_host, request)
Esempio n. 3
0
def notify_requests(es_cfg, requests, use_tq, is_callback, batch_mode=False):
    """Calls external scheduler to notify it of a task state.

  Arguments:
    - es_cfg: pools_config.ExternalSchedulerConfig for external scheduler to
        notify.
    - requests:
      A list of (task_request.TaskRequest,
                 task_result.TaskResultSummary or task_result.TaskRunResult)
      tuples.
    - use_tq: If true, make this call on a task queue (within the current
              datastore transaction).
    - is_callback: If true, indicates that this notification was in response
                   to a external-scheduler-requested callback. This is for
    - batch_mode: If true, the notifications will be sent in a batched mode
                  along with others, to reduce traffic to external scheduler.
                  Only valid when use_tq and global config's
                  enable_batch_es_notifications are true.

  Returns: Nothing.
  """
    logging.debug(
        'notify_requests(es_cfg=(%s,%s), requests=%s, use_tq=%s, '
        'is_callback=%s, batch_mode=%s)', es_cfg.address, es_cfg.id,
        [r.task_id for r, _ in requests], use_tq, is_callback, batch_mode)

    req = plugin_pb2.NotifyTasksRequest()
    req.is_callback = is_callback

    for request, result_summary in requests:
        item = req.notifications.add()
        # TODO(akeshet): This time should possibly come from the read time from
        # datastore, rather than the local server clock.
        item.time.FromDatetime(utils.utcnow())
        item.task.id = request.task_id
        item.task.tags.extend(request.tags)
        item.task.enqueued_time.FromDatetime(request.created_ts)
        for i in range(request.num_task_slices):
            s = request.task_slice(i)
            flat_dimensions = task_queues.bot_dimensions_to_flat(
                s.properties.dimensions)
            s_pb = item.task.slices.add()
            s_pb.dimensions.extend(flat_dimensions)

        res = swarming_pb2.TaskResult()
        result_summary.to_proto(res)
        item.task.state = res.state
        if result_summary.bot_id:
            # TODO(akeshet): We should only actually set this is state is running.
            item.task.bot_id = result_summary.bot_id

    req.scheduler_id = es_cfg.id

    if not use_tq:
        # Ignore return value, the response proto is empty.
        notify_request_now(es_cfg.address, req)
        return

    request_json = json_format.MessageToJson(req)
    # If enable_batch_es_notifications is true, the notifications will be sent in
    # a batched mode along with others, to reduce traffic to external scheduler.
    if batch_mode and config.settings().enable_batch_es_notifications:
        payload = {'es_host': es_cfg.address, 'request_json': request_json}
        req = taskqueue.Task(payload=json.dumps(payload), method='PULL')
        if not req.add(queue_name='es-notify-tasks-batch',
                       transactional=ndb.in_transaction()):
            raise datastore_utils.CommitError('Failed to enqueue task')
        stats = taskqueue.QueueStatistics.fetch('es-notify-kick')
        # Add a kicker task if there are fewer than 10 minutes worth.
        if stats.tasks < 600:
            job_enqueued = utils.enqueue_task(
                '/internal/taskqueue/important/external_scheduler/notify-kick',
                'es-notify-kick',
                transactional=ndb.in_transaction())
            if not job_enqueued:
                logging.info('Failed to add a notify-kick for request.')
        return

    enqueued = utils.enqueue_task(
        '/internal/taskqueue/important/external_scheduler/notify-tasks',
        'es-notify-tasks',
        params={
            'es_host': es_cfg.address,
            'request_json': request_json
        },
        transactional=ndb.in_transaction())
    if not enqueued:
        raise datastore_utils.CommitError('Failed to enqueue task')
Esempio n. 4
0
 def test_notify_request_now(self):
     r = plugin_pb2.NotifyTasksRequest()
     res = external_scheduler.notify_request_now("http://localhost:1", r)
     self.assertEqual(plugin_pb2.NotifyTasksResponse(), res)