Esempio n. 1
0
def warmup():
    """Precaches configuration in local memory, to be called from warmup handler.

  This call is optional. Everything works even if 'warmup' is never called.
  """
    settings()
    utils.get_task_queue_host()
    utils.get_app_version()
Esempio n. 2
0
def warmup():
  """Precaches configuration in local memory, to be called from warmup handler.

  This call is optional. Everything works even if 'warmup' is never called.
  """
  settings()
  utils.get_task_queue_host()
  utils.get_app_version()
Esempio n. 3
0
def register_subtoken(subtoken, rule, intent, caller_ip):
    """Creates new AuthDelegationSubtoken entity in the datastore, returns its ID.

  Args:
    subtoken: delegation_pb2.Subtoken describing the token.
    intent: intent supplied when creating the token.
    rule: config_pb2.DelegationConfig.Rule that allows the operation
    caller_ip: ipaddr.IP of the caller.

  Returns:
    int64 with ID of the new entity.
  """
    entity = AuthDelegationSubtoken(
        subtoken=subtoken.SerializeToString(),
        rule=rule.SerializeToString(),
        intent=intent,
        caller_ip=ipaddr.ip_to_string(caller_ip),
        auth_service_version=utils.get_app_version(),
        delegated_identity=subtoken.delegated_identity,
        creation_time=utils.timestamp_to_datetime(subtoken.creation_time *
                                                  1e6),
        services=list(subtoken.services),
        requestor_identity=subtoken.requestor_identity)
    entity.put(use_cache=False, use_memcache=False)
    subtoken_id = entity.key.integer_id()

    # Keep a logging entry (extractable via BigQuery) too.
    logging.info(
        'subtoken: subtoken_id=%d caller_ip=%s '
        'delegated_identity=%s requestor_identity=%s', subtoken_id,
        entity.caller_ip, entity.delegated_identity, entity.requestor_identity)

    return subtoken_id
Esempio n. 4
0
    def post(self):
        res = self._process()
        bot_management.bot_event(
            event_type='bot_connected',
            bot_id=res.bot_id,
            external_ip=self.request.remote_addr,
            authenticated_as=auth.get_peer_identity().to_bytes(),
            dimensions=res.dimensions,
            state=res.state,
            version=res.version,
            quarantined=bool(res.quarantined_msg),
            maintenance_msg=res.maintenance_msg,
            task_id='',
            task_name=None,
            message=res.quarantined_msg)

        data = {
            'bot_version': bot_code.get_bot_version(self.request.host_url)[0],
            'server_version': utils.get_app_version(),
            'bot_group_cfg_version': res.bot_group_cfg.version,
            'bot_group_cfg': {
                # Let the bot know its server-side dimensions (from bots.cfg file).
                'dimensions': res.bot_group_cfg.dimensions,
            },
        }
        if res.bot_group_cfg.bot_config_script_content:
            logging.info('Injecting %s: %d bytes',
                         res.bot_group_cfg.bot_config_script,
                         len(res.bot_group_cfg.bot_config_script_content))
            data['bot_config'] = res.bot_group_cfg.bot_config_script_content
        self.send_response(data)
Esempio n. 5
0
 def get(self):
   self.send_response({
     'app_id': app_identity.get_application_id(),
     'app_runtime': 'python27',
     'app_version': utils.get_app_version(),
     'service_account_name': app_identity.get_service_account_name(),
   })
Esempio n. 6
0
def get_bot_version(host):
    """Retrieves the current bot version (SHA256) loaded on this server.

  The memcache is first checked for the version, otherwise the value
  is generated and then stored in the memcache.

  Returns:
    tuple(hash of the current bot version, dict of additional files).
  """
    signature = _get_signature(host)
    version = memcache.get('version-' + signature, namespace='bot_code')
    if version:
        return version, None

    # Need to calculate it.
    additionals = {'config/bot_config.py': get_bot_config().content}
    bot_dir = os.path.join(ROOT_DIR, 'swarming_bot')
    version = bot_archive.get_swarming_bot_version(bot_dir, host,
                                                   utils.get_app_version(),
                                                   additionals,
                                                   local_config.settings())
    memcache.set('version-' + signature,
                 version,
                 namespace='bot_code',
                 time=60)
    return version, additionals
Esempio n. 7
0
    def post(self):
        """Responds with access token and server version."""
        try:
            request = json.loads(self.request.body)
            client_protocol = str(request["protocol_version"])
            client_app_version = str(request["client_app_version"])
            pusher = request.get("pusher", True)
            fetcher = request.get("fetcher", True)
        except (ValueError, KeyError) as exc:
            return self.send_error("Invalid body of /handshake call.\nError: %s." % exc)

        # This access token will be used to validate each subsequent request.
        access_token = self.generate_xsrf_token({"v": client_protocol})

        # Log details of the handshake to the server log.
        logging_info = {
            "Access Id": auth.get_current_identity().to_bytes(),
            "Client app version": client_app_version,
            "Client is fetcher": fetcher,
            "Client is pusher": pusher,
            "Client protocol version": client_protocol,
            "Token": access_token,
        }
        logging.info("\n".join("%s: %s" % (k, logging_info[k]) for k in sorted(logging_info)))

        # Send back the response.
        self.send_json(
            {
                "access_token": access_token,
                "protocol_version": ISOLATE_PROTOCOL_VERSION,
                "server_app_version": utils.get_app_version(),
            }
        )
Esempio n. 8
0
    def post(self):
        """Responds with access token and server version."""
        try:
            request = json.loads(self.request.body)
            client_protocol = str(request['protocol_version'])
            client_app_version = str(request['client_app_version'])
            pusher = request.get('pusher', True)
            fetcher = request.get('fetcher', True)
        except (ValueError, KeyError) as exc:
            return self.send_error(
                'Invalid body of /handshake call.\nError: %s.' % exc)

        # This access token will be used to validate each subsequent request.
        access_token = self.generate_xsrf_token({'v': client_protocol})

        # Log details of the handshake to the server log.
        logging_info = {
            'Access Id': auth.get_current_identity().to_bytes(),
            'Client app version': client_app_version,
            'Client is fetcher': fetcher,
            'Client is pusher': pusher,
            'Client protocol version': client_protocol,
            'Token': access_token,
        }
        logging.info('\n'.join('%s: %s' % (k, logging_info[k])
                               for k in sorted(logging_info)))

        # Send back the response.
        self.send_json({
            'access_token': access_token,
            'protocol_version': ISOLATE_PROTOCOL_VERSION,
            'server_app_version': utils.get_app_version(),
        })
Esempio n. 9
0
def get_bot_version(host):
  """Retrieves the bot version (SHA-1) loaded on this server.

  The memcache is first checked for the version, otherwise the value
  is generated and then stored in the memcache.

  Returns:
    The hash of the current bot version.
  """
  # This is invalidate everything bot_config is uploaded.
  bot_versions = memcache.get('versions', namespace='bot_code') or {}
  # CURRENT_VERSION_ID is unique per upload so it can be trusted.
  app_ver = host + '-' + os.environ['CURRENT_VERSION_ID']
  bot_version = bot_versions.get(app_ver)
  if bot_version:
    return bot_version

  # Need to calculate it.
  additionals = {'config/bot_config.py': get_bot_config().content}
  bot_dir = os.path.join(ROOT_DIR, 'swarming_bot')
  bot_version = bot_archive.get_swarming_bot_version(
      bot_dir, host, utils.get_app_version(), additionals)
  if len(bot_versions) > 100:
    # Lazy discard when too large.
    bot_versions = {}
  bot_versions[app_ver] = bot_version
  memcache.set('versions', bot_versions, namespace='bot_code')
  return bot_version
Esempio n. 10
0
def get_swarming_bot_zip(host):
    """Returns a zipped file of all the files a bot needs to run.

  Returns:
    A string representing the zipped file's contents.
  """
    version, additionals, bot_config_rev = get_bot_version(host)
    cached_content, cached_bot_config_rev = get_cached_swarming_bot_zip(
        version)
    # TODO(crbug.com/1087981): Compare the bot config revisions.
    # Separate deployment to be safe.
    if cached_content and cached_bot_config_rev:
        logging.debug(
            'memcached bot code %s; %d bytes with bot_config.py rev: %s',
            version, len(cached_content), cached_bot_config_rev)
        return cached_content

    # Get the start bot script from the database, if present. Pass an empty
    # file if the files isn't present.
    bot_config, bot_config_rev = get_bot_config()
    additionals = additionals or {
        'config/bot_config.py': bot_config.content,
    }
    bot_dir = os.path.join(ROOT_DIR, 'swarming_bot')
    content, version = bot_archive.get_swarming_bot_zip(
        bot_dir, host, utils.get_app_version(), additionals,
        local_config.settings())
    logging.info('generated bot code %s; %d bytes with bot_config.py rev: %s',
                 version, len(content), bot_config_rev)
    cache_swarming_bot_zip(version, content, bot_config_rev)
    return content
Esempio n. 11
0
 def setUp(self):
     super(IsolateServiceTest, self).setUp()
     self.testbed.init_blobstore_stub()
     self.testbed.init_urlfetch_stub()
     # It seems like there is a singleton state preserved across the tests,
     # making it hard to re-run the complete setUp procedure. Therefore we pre-
     # register all the possible identities being used in the tests.
     all_authed_ids = [
         auth.Identity(auth.IDENTITY_USER, '*****@*****.**'),
         auth.Identity(auth.IDENTITY_USER,
                       '*****@*****.**'),
         auth.Identity(auth.IDENTITY_SERVICE, 'adminapp'),
     ]
     admin = all_authed_ids[0]
     full_access_group = config.settings().auth.full_access_group
     auth.bootstrap_group(full_access_group, all_authed_ids)
     auth_testing.mock_get_current_identity(self, admin)
     version = utils.get_app_version()
     self.mock(utils, 'get_task_queue_host', lambda: version)
     self.testbed.setup_env(current_version_id='testbed.version')
     self.source_ip = '127.0.0.1'
     # It is needed solely for self.execute_tasks(), which processes tasks queues
     # on the backend application.
     self.app = webtest.TestApp(
         handlers_backend.create_application(debug=True),
         extra_environ={'REMOTE_ADDR': self.source_ip})
     # add a private key; signing depends on config.settings()
     make_private_key()
     # Remove the check for dev server in should_push_to_gs().
     self.mock(utils, 'is_local_dev_server', lambda: False)
Esempio n. 12
0
  def post(self):
    """Responds with access token and server version."""
    try:
      request = json.loads(self.request.body)
      client_protocol = str(request['protocol_version'])
      client_app_version = str(request['client_app_version'])
      pusher = request.get('pusher', True)
      fetcher = request.get('fetcher', True)
    except (ValueError, KeyError) as exc:
      return self.send_error(
          'Invalid body of /handshake call.\nError: %s.' % exc)

    # This access token will be used to validate each subsequent request.
    access_token = self.generate_xsrf_token({'v': client_protocol})

    # Log details of the handshake to the server log.
    logging_info = {
      'Access Id': auth.get_current_identity().to_bytes(),
      'Client app version': client_app_version,
      'Client is fetcher': fetcher,
      'Client is pusher': pusher,
      'Client protocol version': client_protocol,
      'Token': access_token,
    }
    logging.info(
        '\n'.join('%s: %s' % (k, logging_info[k])
        for k in sorted(logging_info)))

    # Send back the response.
    self.send_json(
        {
          'access_token': access_token,
          'protocol_version': ISOLATE_PROTOCOL_VERSION,
          'server_app_version': utils.get_app_version(),
        })
Esempio n. 13
0
 def get(self):
   self.send_response({
     'app_id': app_identity.get_application_id(),
     'app_runtime': 'python27',
     'app_version': utils.get_app_version(),
     'service_account_name': utils.get_service_account_name(),
   })
Esempio n. 14
0
    def post(self):
        res = self._process()
        bot_management.bot_event(
            event_type='bot_connected',
            bot_id=res.bot_id,
            external_ip=self.request.remote_addr,
            authenticated_as=auth.get_peer_identity().to_bytes(),
            dimensions=res.dimensions,
            state=res.state,
            version=res.version,
            quarantined=bool(res.quarantined_msg),
            task_id='',
            task_name=None,
            message=res.quarantined_msg)

        data = {
            'bot_version': bot_code.get_bot_version(self.request.host_url),
            'server_version': utils.get_app_version(),
            'bot_group_cfg_version': res.bot_group_cfg.version,
            'bot_group_cfg': {
                # Let the bot know its server-side dimensions (from bots.cfg file).
                'dimensions': res.bot_group_cfg.dimensions,
            },
        }
        self.send_response(data)
Esempio n. 15
0
 def post(self):
   request = self.parse_body()
   log_unexpected_keys(
       self.EXPECTED_KEYS, request, self.request, 'client', 'keys')
   data = {
     # This access token will be used to validate each subsequent request.
     'server_version': utils.get_app_version(),
     'xsrf_token': self.generate_xsrf_token(),
   }
   self.send_response(data)
Esempio n. 16
0
 def post(self):
   request = self.parse_body()
   log_unexpected_keys(
       self.EXPECTED_KEYS, request, self.request, 'client', 'keys')
   data = {
     # This access token will be used to validate each subsequent request.
     'server_version': utils.get_app_version(),
     'xsrf_token': self.generate_xsrf_token(),
   }
   self.send_response(data)
Esempio n. 17
0
def bot_kill_task(run_result_key, bot_id):
    """Terminates a task that is currently running as an internal failure.

  Returns:
    str if an error message.
  """
    result_summary_key = task_pack.run_result_key_to_result_summary_key(
        run_result_key)
    request = task_pack.result_summary_key_to_request_key(
        result_summary_key).get()
    server_version = utils.get_app_version()
    now = utils.utcnow()
    packed = task_pack.pack_run_result_key(run_result_key)

    def run():
        run_result, result_summary = ndb.get_multi(
            (run_result_key, result_summary_key))
        if bot_id and run_result.bot_id != bot_id:
            return None, 'Bot %s sent task kill for task %s owned by bot %s' % (
                bot_id, packed, run_result.bot_id)

        if run_result.state == task_result.State.BOT_DIED:
            # Ignore this failure.
            return None, None

        run_result.signal_server_version(server_version)
        run_result.state = task_result.State.BOT_DIED
        run_result.internal_failure = True
        run_result.abandoned_ts = now
        run_result.modified_ts = now
        result_summary.set_from_run_result(run_result, None)

        futures = ndb.put_multi_async((run_result, result_summary))
        _maybe_pubsub_notify_via_tq(result_summary, request)
        for f in futures:
            f.check_success()

        return run_result, None

    try:
        run_result, msg = datastore_utils.transaction(run)
    except datastore_utils.CommitError as e:
        # At worst, the task will be tagged as BOT_DIED after BOT_PING_TOLERANCE
        # seconds passed on the next cron_handle_bot_died cron job.
        return 'Failed killing task %s: %s' % (packed, e)

    if run_result:
        stats.add_run_entry('run_bot_died',
                            run_result.key,
                            bot_id=run_result.bot_id,
                            dimensions=request.properties.dimensions,
                            user=request.user)
    return msg
Esempio n. 18
0
def new_result_summary(request):
    """Returns the new and only TaskResultSummary for a TaskRequest.

  The caller must save it in the DB.
  """
    return TaskResultSummary(key=task_pack.request_key_to_result_summary_key(
        request.key),
                             created_ts=request.created_ts,
                             name=request.name,
                             server_versions=[utils.get_app_version()],
                             user=request.user,
                             tags=request.tags)
Esempio n. 19
0
def new_run_result(request, try_number, bot_id, bot_version):
    """Returns a new TaskRunResult for a TaskRequest.

  The caller must save it in the DB.
  """
    assert isinstance(request, task_request.TaskRequest)
    summary_key = task_pack.request_key_to_result_summary_key(request.key)
    return TaskRunResult(key=task_pack.result_summary_key_to_run_result_key(
        summary_key, try_number),
                         bot_id=bot_id,
                         started_ts=utils.utcnow(),
                         bot_version=bot_version,
                         server_versions=[utils.get_app_version()])
def _common_audit_tags():
    """Returns a list of tags that describe circumstances of the RPC call.

  They end up in Token Server's logs and can be used to correlate token server
  requests to Swarming requests.
  """
    # Note: particular names and format of tags is chosen to be consistent with
    # Token Server's logging.
    return [
        'swarming:gae_request_id:%s' % os.getenv('REQUEST_LOG_ID', '?'),
        'swarming:service_version:%s/%s' %
        (app_identity.get_application_id(), utils.get_app_version()),
    ]
Esempio n. 21
0
def bot_kill_task(run_result_key, bot_id):
    """Terminates a task that is currently running as an internal failure.

  Returns:
    str if an error message.
  """
    result_summary_key = task_pack.run_result_key_to_result_summary_key(run_result_key)
    request = task_pack.result_summary_key_to_request_key(result_summary_key).get()
    server_version = utils.get_app_version()
    now = utils.utcnow()
    packed = task_pack.pack_run_result_key(run_result_key)

    def run():
        run_result, result_summary = ndb.get_multi((run_result_key, result_summary_key))
        if bot_id and run_result.bot_id != bot_id:
            return None, "Bot %s sent task kill for task %s owned by bot %s" % (bot_id, packed, run_result.bot_id)

        if run_result.state == task_result.State.BOT_DIED:
            # Ignore this failure.
            return None, None

        run_result.signal_server_version(server_version)
        run_result.state = task_result.State.BOT_DIED
        run_result.internal_failure = True
        run_result.abandoned_ts = now
        run_result.modified_ts = now
        result_summary.set_from_run_result(run_result, None)

        futures = ndb.put_multi_async((run_result, result_summary))
        _maybe_pubsub_notify_via_tq(result_summary, request)
        for f in futures:
            f.check_success()

        return run_result, None

    try:
        run_result, msg = datastore_utils.transaction(run)
    except datastore_utils.CommitError as e:
        # At worst, the task will be tagged as BOT_DIED after BOT_PING_TOLERANCE
        # seconds passed on the next cron_handle_bot_died cron job.
        return "Failed killing task %s: %s" % (packed, e)

    if run_result:
        stats.add_run_entry(
            "run_bot_died",
            run_result.key,
            bot_id=run_result.bot_id,
            dimensions=request.properties.dimensions,
            user=request.user,
        )
    return msg
Esempio n. 22
0
def new_run_result(request, try_number, bot_id, bot_version):
  """Returns a new TaskRunResult for a TaskRequest.

  The caller must save it in the DB.
  """
  assert isinstance(request, task_request.TaskRequest)
  summary_key = task_pack.request_key_to_result_summary_key(request.key)
  return TaskRunResult(
      key=task_pack.result_summary_key_to_run_result_key(
          summary_key, try_number),
      bot_id=bot_id,
      started_ts=utils.utcnow(),
      bot_version=bot_version,
      server_versions=[utils.get_app_version()])
Esempio n. 23
0
def log(**kwargs):
  """Adds an error. This will indirectly notify the admins.

  Returns the entity id for the report.
  """
  try:
    identity = auth.get_current_identity().to_bytes()
  except auth.UninitializedError:
    identity = None
  try:
    # Trim all the messages to 4kb to reduce spam.
    LIMIT = 4096
    for key, value in kwargs.items():
      if key not in VALID_ERROR_KEYS:
        logging.error('Dropping unknown detail %s: %s', key, value)
        kwargs.pop(key)
      elif isinstance(value, basestring) and len(value) > LIMIT:
        value = value[:LIMIT-1] + u'\u2026'
        kwargs[key] = value

    if kwargs.get('source') == 'server':
      # Automatically use the version of the server code.
      kwargs.setdefault('version', utils.get_app_version())
      kwargs.setdefault('python_version', platform.python_version())

    error = models.Error(identity=identity, **kwargs)
    error.put()
    key_id = error.key.integer_id()
    logging.error(
        'Got a %s error\nhttps://%s/restricted/ereporter2/errors/%s\n%s',
        error.source,
        app_identity.get_default_version_hostname(),
        key_id,
        error.message)
    return key_id
  except (datastore_errors.BadValueError, TypeError) as e:
    stack = formatter._reformat_stack(traceback.format_exc())
    # That's the error about the error.
    error = models.Error(
        source='server',
        category='exception',
        message='log(%s) caused: %s' % (kwargs, str(e)),
        exception_type=str(type(e)),
        stack=stack)
    error.put()
    key_id = error.key.integer_id()
    logging.error(
        'Failed to log a %s error\n%s\n%s', error.source, key_id, error.message)
    return key_id
Esempio n. 24
0
 def commit():
   if change_log_revision_key(auth_db_rev).get():
     logging.warning('Rev %d was already processed concurrently', auth_db_rev)
     return
   rev = AuthDBLogRev(
       key=change_log_revision_key(auth_db_rev),
       when=utils.utcnow(),
       app_version=utils.get_app_version())
   ndb.put_multi(changes + [rev])
   # Enqueue a task to process previous version if not yet done.
   if auth_db_rev > 1:
     prev_rev = auth_db_rev - 1
     if not change_log_revision_key(prev_rev).get():
       logging.info('Enqueuing task to process rev %d', prev_rev)
       enqueue_process_change_task(prev_rev)
Esempio n. 25
0
 def commit():
     if change_log_revision_key(auth_db_rev).get():
         logging.warning('Rev %d was already processed concurrently',
                         auth_db_rev)
         return
     rev = AuthDBLogRev(key=change_log_revision_key(auth_db_rev),
                        when=utils.utcnow(),
                        app_version=utils.get_app_version())
     ndb.put_multi(changes + [rev])
     # Enqueue a task to process previous version if not yet done.
     if auth_db_rev > 1:
         prev_rev = auth_db_rev - 1
         if not change_log_revision_key(prev_rev).get():
             logging.info('Enqueuing task to process rev %d', prev_rev)
             enqueue_process_change_task(prev_rev)
Esempio n. 26
0
def log(**kwargs):
  """Adds an error. This will indirectly notify the admins.

  Returns the entity id for the report.
  """
  identity = None
  if not auth.get_current_identity().is_anonymous:
    identity = auth.get_current_identity().to_bytes()
  try:
    # Trim all the messages to 4kb to reduce spam.
    LIMIT = 4096
    for key, value in kwargs.items():
      if key not in VALID_ERROR_KEYS:
        logging.error('Dropping unknown detail %s: %s', key, value)
        kwargs.pop(key)
      elif isinstance(value, basestring) and len(value) > LIMIT:
        value = value[:LIMIT-1] + u'\u2026'
        kwargs[key] = value

    if kwargs.get('source') == 'server':
      # Automatically use the version of the server code.
      kwargs.setdefault('version', utils.get_app_version())
      kwargs.setdefault('python_version', platform.python_version())

    error = models.Error(identity=identity, **kwargs)
    error.put()
    key_id = error.key.integer_id()
    logging.error(
        'Got a %s error\nhttps://%s/restricted/ereporter2/errors/%s\n%s',
        error.source,
        app_identity.get_default_version_hostname(),
        key_id,
        error.message)
    return key_id
  except (datastore_errors.BadValueError, TypeError) as e:
    stack = formatter._reformat_stack(traceback.format_exc())
    # That's the error about the error.
    error = models.Error(
        source='server',
        category='exception',
        message='log(%s) caused: %s' % (kwargs, str(e)),
        exception_type=str(type(e)),
        stack=stack)
    error.put()
    key_id = error.key.integer_id()
    logging.error(
        'Failed to log a %s error\n%s\n%s', error.source, key_id, error.message)
    return key_id
 def setUp(self):
   super(IsolateServiceTest, self).setUp()
   self.testbed.init_blobstore_stub()
   self.testbed.init_urlfetch_stub()
   auth_testing.mock_get_current_identity(self)
   version = utils.get_app_version()
   self.mock(utils, 'get_task_queue_host', lambda: version)
   self.testbed.setup_env(current_version_id='testbed.version')
   self.source_ip = '127.0.0.1'
   # It is needed solely for self.execute_tasks(), which processes tasks queues
   # on the backend application.
   self.app = webtest.TestApp(
       webapp2.WSGIApplication(handlers_backend.get_routes(), debug=True),
       extra_environ={'REMOTE_ADDR': self.source_ip})
   # add a private key; signing depends on config.settings()
   make_private_key()
Esempio n. 28
0
 def setUp(self):
     super(IsolateServiceTest, self).setUp()
     self.testbed.init_blobstore_stub()
     self.testbed.init_urlfetch_stub()
     auth_testing.mock_get_current_identity(self)
     version = utils.get_app_version()
     self.mock(utils, 'get_task_queue_host', lambda: version)
     self.testbed.setup_env(current_version_id='testbed.version')
     self.source_ip = '127.0.0.1'
     # It is needed solely for self.execute_tasks(), which processes tasks queues
     # on the backend application.
     self.app = webtest.TestApp(
         webapp2.WSGIApplication(handlers_backend.get_routes(), debug=True),
         extra_environ={'REMOTE_ADDR': self.source_ip})
     # add a private key; signing depends on config.settings()
     make_private_key()
Esempio n. 29
0
def new_run_result(request, to_run, bot_id, bot_version, bot_dimensions):
    """Returns a new TaskRunResult for a TaskRequest.

  Initializes only the immutable parts.

  The caller must save it in the DB.
  """
    assert isinstance(request, task_request.TaskRequest)
    summary_key = task_pack.request_key_to_result_summary_key(request.key)
    return TaskRunResult(key=task_pack.result_summary_key_to_run_result_key(
        summary_key, to_run.try_number),
                         bot_dimensions=bot_dimensions,
                         bot_id=bot_id,
                         bot_version=bot_version,
                         current_task_slice=to_run.task_slice_index,
                         server_versions=[utils.get_app_version()])
Esempio n. 30
0
  def post(self):
    (_request, bot_id, version, state,
        dimensions, quarantined_msg) = self._process()
    bot_management.bot_event(
        event_type='bot_connected', bot_id=bot_id,
        external_ip=self.request.remote_addr, dimensions=dimensions,
        state=state, version=version, quarantined=bool(quarantined_msg),
        task_id='', task_name=None, message=quarantined_msg)

    data = {
      # This access token will be used to validate each subsequent request.
      'bot_version': bot_code.get_bot_version(self.request.host_url),
      'server_version': utils.get_app_version(),
      'xsrf_token': self.generate_xsrf_token(),
    }
    self.send_response(data)
Esempio n. 31
0
  def post(self):
    (_request, bot_id, version, state,
        dimensions, quarantined_msg) = self._process()
    bot_management.bot_event(
        event_type='bot_connected', bot_id=bot_id,
        external_ip=self.request.remote_addr, dimensions=dimensions,
        state=state, version=version, quarantined=bool(quarantined_msg),
        task_id='', task_name=None, message=quarantined_msg)

    data = {
      # This access token will be used to validate each subsequent request.
      'bot_version': bot_code.get_bot_version(self.request.host_url),
      'server_version': utils.get_app_version(),
      'xsrf_token': self.generate_xsrf_token(),
    }
    self.send_response(data)
Esempio n. 32
0
def bootstrap(paths, global_env=None, filters=None):
  """Resets cached Jinja2 env to pick up new template paths.

  This is purely additive and idempotent. So consecutive calls to this functions
  with different arguments is fine.

  Args:
    paths: dict {prefix -> template_dir}, templates under template_dir would be
        accessible as <prefix>/<path relative to template_dir>.
    global_env: dict with variables to add to global template environment.
    filters: dict with filters to add to global filter list.
  """
  assert isinstance(paths, dict), paths
  assert all(
      _TEMPLATE_PATHS.get(k, v) == v for k, v in paths.items()), paths
  assert all(os.path.isabs(p) for p in paths.values()), paths
  assert all(os.path.isdir(p) for p in paths.values()), paths

  if global_env is not None:
    assert isinstance(global_env, dict), global_env
    assert all(isinstance(k, str) for k in global_env), global_env
    assert all(
        _GLOBAL_ENV.get(k, v) == v
        for k, v in global_env.items()), global_env

  if filters is not None:
    assert isinstance(filters, dict), filters
    assert all(
        isinstance(k, str) and callable(v)
        for k, v in filters.items()), filters
    assert all(
        _GLOBAL_FILTERS.get(k, v) == v
        for k, v in filters.items()), filters

  _TEMPLATE_PATHS.update(paths)

  if global_env:
    _GLOBAL_ENV.update(global_env)
  # These are immutable.
  _GLOBAL_ENV.setdefault('app_id', app_identity.get_application_id())
  _GLOBAL_ENV.setdefault('app_version', utils.get_app_version())
  _GLOBAL_ENV.setdefault('app_revision_url', utils.get_app_revision_url())

  if filters:
    _GLOBAL_FILTERS.update(filters)
  utils.clear_cache(get_jinja_env)
Esempio n. 33
0
def yield_swarming_bot_files(root_dir, host, additionals):
  """Yields all the files to map as tuple(filename, content).

  config.json is injected with json data about the server.
  """
  items = {i: None for i in FILES}
  items.update(additionals)
  config = {
    'server': host.rstrip('/'),
    'server_version': utils.get_app_version(),
  }
  items['config.json'] = json.dumps(config)
  for item, content in sorted(items.iteritems()):
    if content is not None:
      yield item, content
    else:
      with open(os.path.join(root_dir, item), 'rb') as f:
        yield item, f.read()
Esempio n. 34
0
def yield_swarming_bot_files(root_dir, host, additionals):
    """Yields all the files to map as tuple(filename, content).

  config.json is injected with json data about the server.
  """
    items = {i: None for i in FILES}
    items.update(additionals)
    config = {
        'server': host.rstrip('/'),
        'server_version': utils.get_app_version(),
    }
    items['config.json'] = json.dumps(config)
    for item, content in sorted(items.iteritems()):
        if content is not None:
            yield item, content
        else:
            with open(os.path.join(root_dir, item), 'rb') as f:
                yield item, f.read()
Esempio n. 35
0
def bootstrap(paths, global_env=None, filters=None):
  """Resets cached Jinja2 env to pick up new template paths.

  This is purely additive and idempotent. So consecutive calls to this functions
  with different arguments is fine.

  Args:
    paths: dict {prefix -> template_dir}, templates under template_dir would be
        accessible as <prefix>/<path relative to template_dir>.
    global_env: dict with variables to add to global template environment.
    filters: dict with filters to add to global filter list.
  """
  assert isinstance(paths, dict), paths
  assert all(
      _TEMPLATE_PATHS.get(k, v) == v for k, v in paths.iteritems()), paths
  assert all(os.path.isabs(p) for p in paths.itervalues()), paths
  assert all(os.path.isdir(p) for p in paths.itervalues()), paths

  if global_env is not None:
    assert isinstance(global_env, dict), global_env
    assert all(isinstance(k, str) for k in global_env), global_env
    assert all(
        _GLOBAL_ENV.get(k, v) == v
        for k, v in global_env.iteritems()), global_env

  if filters is not None:
    assert isinstance(filters, dict), filters
    assert all(
        isinstance(k, str) and callable(v)
        for k, v in filters.iteritems()), filters
    assert all(
        _GLOBAL_FILTERS.get(k, v) == v
        for k, v in filters.iteritems()), filters

  _TEMPLATE_PATHS.update(paths)

  if global_env:
    _GLOBAL_ENV.update(global_env)
  _GLOBAL_ENV.setdefault('app_version', utils.get_app_version())
  _GLOBAL_ENV.setdefault('app_revision_url', utils.get_app_revision_url())

  if filters:
    _GLOBAL_FILTERS.update(filters)
  utils.clear_cache(get_jinja_env)
Esempio n. 36
0
    def setUp(self):
        """Creates a new app instance for every test case."""
        super(MainTest, self).setUp()
        self.testbed.init_user_stub()

        # When called during a taskqueue, the call to get_app_version() may fail so
        # pre-fetch it.
        version = utils.get_app_version()
        self.mock(utils, 'get_task_queue_host', lambda: version)
        self.source_ip = '192.168.0.1'
        self.app_frontend = webtest.TestApp(
            handlers_frontend.create_application(debug=True),
            extra_environ={'REMOTE_ADDR': self.source_ip})
        # This is awkward but both the frontend and backend applications uses the
        # same template variables.
        template.reset()
        self.app_backend = webtest.TestApp(
            handlers_backend.create_application(debug=True),
            extra_environ={'REMOTE_ADDR': self.source_ip})
        # Tasks are enqueued on the backend.
        self.app = self.app_backend

        self.auth_app = webtest.TestApp(
            auth.create_wsgi_application(debug=True),
            extra_environ={
                'REMOTE_ADDR': self.source_ip,
                'SERVER_SOFTWARE': os.environ['SERVER_SOFTWARE'],
            })

        full_access_group = config.settings().auth.full_access_group
        readonly_access_group = config.settings().auth.readonly_access_group

        auth.bootstrap_group(
            auth.ADMIN_GROUP,
            [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')])
        auth.bootstrap_group(
            readonly_access_group,
            [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')])
        auth.bootstrap_group(
            full_access_group,
            [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')])
        # TODO(maruel): Create a BOTS_GROUP.

        self.set_as_anonymous()
Esempio n. 37
0
def get_swarming_bot_zip(host):
  """Returns a zipped file of all the files a bot needs to run.

  Returns:
    A string representing the zipped file's contents.
  """
  bot_version = get_bot_version(host)
  content = memcache.get('code-%s' + bot_version, namespace='bot_code')
  if content:
    return content

  # Get the start bot script from the database, if present. Pass an empty
  # file if the files isn't present.
  additionals = {'config/bot_config.py': get_bot_config().content}
  bot_dir = os.path.join(ROOT_DIR, 'swarming_bot')
  content, bot_version = bot_archive.get_swarming_bot_zip(
      bot_dir, host, utils.get_app_version(), additionals)
  memcache.set('code-%s' + bot_version, content, namespace='bot_code')
  return content
Esempio n. 38
0
    def details(self, _request):
        """Returns information about the server."""
        host = 'https://' + os.environ['HTTP_HOST']

        cfg = config.settings()
        isolate, _cipd = pools_config.get_default_external_services()

        default_isolate_server = cfg.isolate.default_server
        default_isolate_namespace = cfg.isolate.default_namespace
        if isolate:
            default_isolate_server = isolate.server
            default_isolate_namespace = isolate.namespace

        return swarming_rpcs.ServerDetails(
            bot_version=bot_code.get_bot_version(host)[0],
            server_version=utils.get_app_version(),
            display_server_url_template=cfg.display_server_url_template,
            luci_config=config.config.config_service_hostname(),
            default_isolate_server=default_isolate_server,
            default_isolate_namespace=default_isolate_namespace)
 def setUp(self):
     """Creates a new app instance for every test case."""
     super(MainTest, self).setUp()
     self.testbed.init_blobstore_stub()
     self.testbed.init_urlfetch_stub()
     admin = auth.Identity(auth.IDENTITY_USER, '*****@*****.**')
     full_access_group = config.settings().auth.full_access_group
     auth.bootstrap_group(full_access_group, [admin])
     auth_testing.mock_get_current_identity(self, admin)
     version = utils.get_app_version()
     self.mock(utils, 'get_task_queue_host', lambda: version)
     self.testbed.setup_env(current_version_id='testbed.version')
     self.source_ip = '127.0.0.1'
     self.app = webtest.TestApp(
         handlers_backend.create_application(debug=True),
         extra_environ={'REMOTE_ADDR': self.source_ip})
     # add a private key; signing depends on config.settings()
     make_private_key()
     # Remove the check for dev server in should_push_to_gs().
     self.mock(utils, 'is_local_dev_server', lambda: False)
 def setUp(self):
     super(IsolateServiceTest, self).setUp()
     self.testbed.init_blobstore_stub()
     self.testbed.init_urlfetch_stub()
     admin = auth.Identity(auth.IDENTITY_USER, '*****@*****.**')
     auth.bootstrap_group(acl.FULL_ACCESS_GROUP, [admin])
     auth_testing.mock_get_current_identity(self, admin)
     version = utils.get_app_version()
     self.mock(utils, 'get_task_queue_host', lambda: version)
     self.testbed.setup_env(current_version_id='testbed.version')
     self.source_ip = '127.0.0.1'
     # It is needed solely for self.execute_tasks(), which processes tasks queues
     # on the backend application.
     self.app = webtest.TestApp(
         webapp2.WSGIApplication(handlers_backend.get_routes(), debug=True),
         extra_environ={'REMOTE_ADDR': self.source_ip})
     # add a private key; signing depends on config.settings()
     make_private_key()
     # Remove the check for dev server in should_push_to_gs().
     self.mock(utils, 'is_local_dev_server', lambda: False)
Esempio n. 41
0
  def setUp(self):
    """Creates a new app instance for every test case."""
    super(MainTest, self).setUp()
    self.testbed.init_user_stub()

    # When called during a taskqueue, the call to get_app_version() may fail so
    # pre-fetch it.
    version = utils.get_app_version()
    self.mock(utils, 'get_task_queue_host', lambda: version)
    self.source_ip = '192.168.0.1'
    self.app_frontend = webtest.TestApp(
        handlers_frontend.create_application(debug=True),
        extra_environ={'REMOTE_ADDR': self.source_ip})
    # This is awkward but both the frontend and backend applications uses the
    # same template variables.
    template.reset()
    self.app_backend = webtest.TestApp(
        handlers_backend.create_application(debug=True),
        extra_environ={'REMOTE_ADDR': self.source_ip})
    # Tasks are enqueued on the backend.
    self.app = self.app_backend

    self.auth_app = webtest.TestApp(
        auth.create_wsgi_application(debug=True),
        extra_environ={
          'REMOTE_ADDR': self.source_ip,
          'SERVER_SOFTWARE': os.environ['SERVER_SOFTWARE'],
        })

    auth.bootstrap_group(
        auth.ADMIN_GROUP,
        [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')])
    auth.bootstrap_group(
        acl.READONLY_ACCESS_GROUP,
        [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')])
    auth.bootstrap_group(
        acl.FULL_ACCESS_GROUP,
        [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')])
    # TODO(maruel): Create a BOTS_GROUP.

    self.set_as_anonymous()
Esempio n. 42
0
    def setUp(self):
        """Creates a new app instance for every test case."""
        super(MainTest, self).setUp()
        self.testbed.init_user_stub()

        # When called during a taskqueue, the call to get_app_version() may fail so
        # pre-fetch it.
        version = utils.get_app_version()
        self.mock(utils, 'get_task_queue_host', lambda: version)
        self.source_ip = '192.168.0.1'
        self.app_api = webtest.TestApp(
            webapp2.WSGIApplication(handlers_api.get_routes(), debug=True),
            extra_environ={'REMOTE_ADDR': self.source_ip})
        # Do not use handlers_backend.create_application() because it also
        # initializes ereporter2 cron jobs, which requires templates. We want to
        # make sure templates are not needed for APIs.
        self.app_backend = webtest.TestApp(
            webapp2.WSGIApplication(handlers_backend.get_routes(), debug=True),
            extra_environ={'REMOTE_ADDR': self.source_ip})
        # Tasks are enqueued on the backend.
        self.app = self.app_backend
Esempio n. 43
0
  def setUp(self):
    """Creates a new app instance for every test case."""
    super(MainTest, self).setUp()
    self.testbed.init_user_stub()

    # When called during a taskqueue, the call to get_app_version() may fail so
    # pre-fetch it.
    version = utils.get_app_version()
    self.mock(utils, 'get_task_queue_host', lambda: version)
    self.source_ip = '192.168.0.1'
    self.app_api = webtest.TestApp(
        webapp2.WSGIApplication(handlers_api.get_routes(), debug=True),
        extra_environ={'REMOTE_ADDR': self.source_ip})
    # Do not use handlers_backend.create_application() because it also
    # initializes ereporter2 cron jobs, which requires templates. We want to
    # make sure templates are not needed for APIs.
    self.app_backend = webtest.TestApp(
        webapp2.WSGIApplication(handlers_backend.get_routes(), debug=True),
        extra_environ={'REMOTE_ADDR': self.source_ip})
    # Tasks are enqueued on the backend.
    self.app = self.app_backend
Esempio n. 44
0
    def make_historical_copy(self, deleted, comment):
        """Returns an entity to put in the historical log.

    It's a copy of the original entity, but stored under another key and with
    indexes removed. It also has a bunch of additional properties (defined
    in _AuthDBHistoricalEntity). See 'get_historical_copy_class'.

    The key is derived from auth_db_rev and class and ID of the original entity.
    For example, AuthGroup "admins" modified at rev 123 will be copied to
    the history as ('AuthGlobalConfig', 'root', 'Rev', 123, 'AuthGroupHistory',
    'admins'), where the key prefix (first two pairs) is obtained with
    historical_revision_key(...).
    """
        assert self.key.parent() == root_key() or self.key == root_key(), self.key
        cls = self.get_historical_copy_class()
        entity = cls(id=self.key.id(), parent=historical_revision_key(self.auth_db_rev))
        for prop in self._properties:
            setattr(entity, prop, getattr(self, prop))
        entity.auth_db_deleted = deleted
        entity.auth_db_change_comment = comment
        entity.auth_db_app_version = utils.get_app_version()
        return entity
Esempio n. 45
0
    def calculate_etag(self, path):
        """Calculates the hash of the given static file or grabs it from cache.

    Returns:
      Tuple (etag, the body of the file if it was read)
    """
        version = utils.get_app_version()

        # Tainted versions are frequently overwritten, do not cache static files for
        # too long for them. Same for devserver.
        expiration_sec = 3600
        if '-tainted' in version or utils.is_local_dev_server():
            expiration_sec = 1

        key = '%s:%s' % (version, path)
        value = memcache.get(key, namespace='etag')
        if value:
            return value, None

        body = self.read_static(path)
        value = '"%s"' % hashlib.sha1(body).hexdigest()
        memcache.set(key, value, time=expiration_sec, namespace='etag')
        return value, body
Esempio n. 46
0
  def make_historical_copy(self, deleted, comment):
    """Returns an entity to put in the historical log.

    It's a copy of the original entity, but stored under another key and with
    indexes removed. It also has a bunch of additional properties (defined
    in _AuthDBHistoricalEntity). See 'get_historical_copy_class'.

    The key is derived from auth_db_rev and class and ID of the original entity.
    For example, AuthGroup "admins" modified at rev 123 will be copied to
    the history as ('AuthGlobalConfig', 'root', 'Rev', 123, 'AuthGroupHistory',
    'admins'), where the key prefix (first two pairs) is obtained with
    historical_revision_key(...).
    """
    assert self.key.parent() == root_key() or self.key == root_key(), self.key
    cls = self.get_historical_copy_class()
    entity = cls(
        id=self.key.id(),
        parent=historical_revision_key(self.auth_db_rev))
    for prop in self._properties:
      setattr(entity, prop, getattr(self, prop))
    entity.auth_db_deleted = deleted
    entity.auth_db_change_comment = comment
    entity.auth_db_app_version = utils.get_app_version()
    return entity
Esempio n. 47
0
def _handle_dead_bot(run_result_key):
    """Handles TaskRunResult where its bot has stopped showing sign of life.

  Transactionally updates the entities depending on the state of this task. The
  task may be retried automatically, canceled or left alone.

  Returns:
    True if the task was retried, False if the task was killed, None if no
    action was done.
  """
    result_summary_key = task_pack.run_result_key_to_result_summary_key(run_result_key)
    request_key = task_pack.result_summary_key_to_request_key(result_summary_key)
    request_future = request_key.get_async()
    now = utils.utcnow()
    server_version = utils.get_app_version()
    packed = task_pack.pack_run_result_key(run_result_key)
    request = request_future.get_result()
    to_run_key = task_to_run.request_to_task_to_run_key(request)

    def run():
        """Returns tuple(task_is_retried or None, bot_id)."""
        # Do one GET, one PUT at the end.
        run_result, result_summary, to_run = ndb.get_multi((run_result_key, result_summary_key, to_run_key))
        if run_result.state != task_result.State.RUNNING:
            # It was updated already or not updating last. Likely DB index was stale.
            return None, run_result.bot_id

        run_result.signal_server_version(server_version)
        run_result.modified_ts = now

        notify = False
        if result_summary.try_number != run_result.try_number:
            # Not updating correct run_result, cancel it without touching
            # result_summary.
            to_put = (run_result,)
            run_result.state = task_result.State.BOT_DIED
            run_result.internal_failure = True
            run_result.abandoned_ts = now
            task_is_retried = None
        elif result_summary.try_number == 1 and now < request.expiration_ts:
            # Retry it.
            to_put = (run_result, result_summary, to_run)
            to_run.queue_number = task_to_run.gen_queue_number(request)
            run_result.state = task_result.State.BOT_DIED
            run_result.internal_failure = True
            run_result.abandoned_ts = now
            # Do not sync data from run_result to result_summary, since the task is
            # being retried.
            result_summary.reset_to_pending()
            result_summary.modified_ts = now
            task_is_retried = True
        else:
            # Cancel it, there was more than one try or the task expired in the
            # meantime.
            to_put = (run_result, result_summary)
            run_result.state = task_result.State.BOT_DIED
            run_result.internal_failure = True
            run_result.abandoned_ts = now
            result_summary.set_from_run_result(run_result, request)
            notify = True
            task_is_retried = False

        futures = ndb.put_multi_async(to_put)
        if notify:
            _maybe_pubsub_notify_via_tq(result_summary, request)
        for f in futures:
            f.check_success()

        return task_is_retried, run_result.bot_id

    try:
        task_is_retried, bot_id = datastore_utils.transaction(run)
    except datastore_utils.CommitError:
        task_is_retried, bot_id = None, None
    if task_is_retried is not None:
        task_to_run.set_lookup_cache(to_run_key, task_is_retried)
        if not task_is_retried:
            stats.add_run_entry(
                "run_bot_died",
                run_result_key,
                bot_id=bot_id[0],
                dimensions=request.properties.dimensions,
                user=request.user,
            )
        else:
            logging.info("Retried %s", packed)
    else:
        logging.info("Ignored %s", packed)
    return task_is_retried
Esempio n. 48
0
File: ui.py Progetto: nodirt/luci-py
  def reply(self, path, env=None, status=200):
    """Render template |path| to response using given environment.

    Optional keys from |env| that base.html uses:
      css_file: URL to a file with page specific styles, relative to site root.
      js_file: URL to a file with page specific Javascript code, relative to
          site root. File should define global object named same as a filename,
          i.e. '/auth/static/js/api.js' should define global object 'api' that
          incapsulates functionality implemented in the module.
      navbar_tab_id: id a navbar tab to highlight.
      page_title: title of an HTML page.

    Args:
      path: path to a template, relative to templates/.
      env: additional environment dict to use when rendering the template.
      status: HTTP status code to return.
    """
    env = (env or {}).copy()
    env.setdefault('css_file', None)
    env.setdefault('js_file', None)
    env.setdefault('navbar_tab_id', None)
    env.setdefault('page_title', 'Untitled')

    # This goes to both Jinja2 env and Javascript config object.
    common = {
      'login_url': users.create_login_url(self.request.path),
      'logout_url': users.create_logout_url('/'),
      'xsrf_token': self.generate_xsrf_token(),
    }

    # Name of Javascript module with page code.
    js_module_name = None
    if env['js_file']:
      assert env['js_file'].endswith('.js')
      js_module_name = os.path.basename(env['js_file'])[:-3]

    # This will be accessible from Javascript as global 'config' variable.
    js_config = {
      'identity': api.get_current_identity().to_bytes(),
    }
    js_config.update(common)

    # Jinja2 environment to use to render a template.
    full_env = {
      'app_name': _ui_app_name,
      'app_revision_url': utils.get_app_revision_url(),
      'app_version': utils.get_app_version(),
      'config': json.dumps(js_config),
      'identity': api.get_current_identity(),
      'js_module_name': js_module_name,
      'navbar': [
        (cls.navbar_tab_id, cls.navbar_tab_title, cls.navbar_tab_url)
        for cls in _ui_navbar_tabs
      ],
    }
    full_env.update(common)
    full_env.update(env)

    # Render it.
    self.response.set_status(status)
    self.response.headers['Content-Type'] = 'text/html; charset=utf-8'
    self.response.write(template.render(path, full_env))
Esempio n. 49
0
 def server_details(self, _request):
   return ServerDetails(server_version=utils.get_app_version())
 def test_server_details_ok(self):
   """Assert that server_details returns the correct version."""
   response = self.call_api('server_details', {}, 200).json
   self.assertEqual(utils.get_app_version(), response['server_version'])
Esempio n. 51
0
  def reply(self, path, env=None, status=200):
    """Render template |path| to response using given environment.

    Optional keys from |env| that base.html uses:
      css_file: URL to a file with page specific styles, relative to site root.
      js_file: URL to a file with page specific Javascript code, relative to
          site root. File should define global object named same as a filename,
          i.e. '/auth/static/js/api.js' should define global object 'api' that
          incapsulates functionality implemented in the module.
      navbar_tab_id: id a navbar tab to highlight.
      page_title: title of an HTML page.

    Args:
      path: path to a template, relative to templates/.
      env: additional environment dict to use when rendering the template.
      status: HTTP status code to return.
    """
    env = (env or {}).copy()
    env.setdefault('css_file', None)
    env.setdefault('js_file', None)
    env.setdefault('navbar_tab_id', None)
    env.setdefault('page_title', 'Untitled')

    # This goes to both Jinja2 env and Javascript config object.
    user = self.get_current_user()
    common = {
      'account_picture': user.picture() if user else None,
      'auth_service_config_locked': False, # overridden in auth_service
      'is_admin': api.is_admin(),
      'login_url': self.create_login_url(self.request.url),
      'logout_url': self.create_logout_url('/'),
      'using_gae_auth': self.auth_method == handler.gae_cookie_authentication,
      'xsrf_token': self.generate_xsrf_token(),
    }
    if _ui_env_callback:
      common.update(_ui_env_callback(self))

    # Name of Javascript module with page code.
    js_module_name = None
    if env['js_file']:
      assert env['js_file'].endswith('.js')
      js_module_name = os.path.basename(env['js_file'])[:-3]

    # This will be accessible from Javascript as global 'config' variable.
    js_config = {
      'identity': api.get_current_identity().to_bytes(),
    }
    js_config.update(common)

    # Prepare URL to explore app API.
    schema, netloc = urlparse.urlparse(self.request.url)[:2]
    api_url = (
        'https://apis-explorer.appspot.com/apis-explorer/?'
        'base=%s://%s/_ah/api' % (schema, netloc))

    # Jinja2 environment to use to render a template.
    full_env = {
      'app_name': _ui_app_name,
      'app_revision_url': utils.get_app_revision_url(),
      'app_version': utils.get_app_version(),
      'config': json.dumps(js_config),
      'identity': api.get_current_identity(),
      'js_module_name': js_module_name,
      'api_url': api_url,
      'navbar': [
        (cls.navbar_tab_id, cls.navbar_tab_title, cls.navbar_tab_url)
        for cls in _ui_navbar_tabs
        if cls.is_visible()
      ],
    }
    full_env.update(common)
    full_env.update(env)

    # Render it.
    self.response.set_status(status)
    self.response.headers['Content-Type'] = 'text/html; charset=utf-8'
    self.response.write(template.render(path, full_env))
Esempio n. 52
0
def bot_update_task(
    run_result_key,
    bot_id,
    output,
    output_chunk_start,
    exit_code,
    duration,
    hard_timeout,
    io_timeout,
    cost_usd,
    outputs_ref,
):
    """Updates a TaskRunResult and TaskResultSummary, along TaskOutput.

  Arguments:
  - run_result_key: ndb.Key to TaskRunResult.
  - bot_id: Self advertised bot id to ensure it's the one expected.
  - output: Data to append to this command output.
  - output_chunk_start: Index of output in the stdout stream.
  - exit_code: Mark that this command is terminated.
  - duration: Time spent in seconds for this command.
  - hard_timeout: Bool set if an hard timeout occured.
  - io_timeout: Bool set if an I/O timeout occured.
  - cost_usd: Cost in $USD of this task up to now.
  - outputs_ref: Serialized FilesRef instance or None.

  Invalid states, these are flat out refused:
  - A command is updated after it had an exit code assigned to.

  Returns:
    tuple(bool, bool); first is if the update succeeded, second is if the task
    completed.
  """
    assert output_chunk_start is None or isinstance(output_chunk_start, int)
    assert output is None or isinstance(output, str)
    if cost_usd is not None and cost_usd < 0.0:
        raise ValueError("cost_usd must be None or greater or equal than 0")

    packed = task_pack.pack_run_result_key(run_result_key)
    logging.debug(
        "bot_update_task(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
        packed,
        bot_id,
        len(output) if output else output,
        output_chunk_start,
        exit_code,
        duration,
        hard_timeout,
        io_timeout,
        cost_usd,
        outputs_ref,
    )

    result_summary_key = task_pack.run_result_key_to_result_summary_key(run_result_key)
    request_key = task_pack.result_summary_key_to_request_key(result_summary_key)
    request_future = request_key.get_async()
    server_version = utils.get_app_version()
    request = request_future.get_result()
    now = utils.utcnow()

    def run():
        # 2 consecutive GETs, one PUT.
        run_result_future = run_result_key.get_async()
        result_summary_future = result_summary_key.get_async()
        run_result = run_result_future.get_result()
        if not run_result:
            result_summary_future.wait()
            return None, None, False, "is missing"

        if run_result.bot_id != bot_id:
            result_summary_future.wait()
            return None, None, False, ("expected bot (%s) but had update from bot %s" % (run_result.bot_id, bot_id))

        if not run_result.started_ts:
            return None, None, False, "TaskRunResult is broken; %s" % (run_result.to_dict())

        # This happens as an HTTP request is retried when the DB write succeeded but
        # it still returned HTTP 500.
        if run_result.exit_code is not None and exit_code is not None:
            if run_result.exit_code != exit_code:
                result_summary_future.wait()
                return None, None, False, "got 2 different exit_code; %s then %s" % (run_result.exit_code, exit_code)

        if run_result.durations and duration is not None:
            if run_result.durations[0] != duration:
                result_summary_future.wait()
                return None, None, False, "got 2 different durations; %s then %s" % (run_result.durations[0], duration)

        if (duration is None) != (exit_code is None):
            result_summary_future.wait()
            return (
                None,
                None,
                False,
                (
                    "had unexpected duration; expected iff a command completes\n"
                    "duration: %s vs %s; exit: %s vs %s"
                    % (run_result.durations, duration, run_result.exit_code, exit_code)
                ),
            )

        # If the command completed. Check if the value wasn't set already.
        if duration is not None and not run_result.durations:
            run_result.durations.append(duration)
        if exit_code is not None and run_result.exit_code is None:
            run_result.exit_codes.append(exit_code)

        if outputs_ref:
            run_result.outputs_ref = task_request.FilesRef(**outputs_ref)

        task_completed = run_result.exit_code is not None
        if run_result.state in task_result.State.STATES_RUNNING:
            if hard_timeout or io_timeout:
                run_result.state = task_result.State.TIMED_OUT
                run_result.completed_ts = now
            elif task_completed:
                run_result.state = task_result.State.COMPLETED
                run_result.completed_ts = now

        run_result.signal_server_version(server_version)
        to_put = [run_result]
        if output:
            # This does 1 multi GETs. This also modifies run_result in place.
            to_put.extend(run_result.append_output(0, output, output_chunk_start or 0))

        run_result.cost_usd = max(cost_usd, run_result.cost_usd or 0.0)
        run_result.modified_ts = now

        result_summary = result_summary_future.get_result()
        if result_summary.try_number and result_summary.try_number > run_result.try_number:
            # The situation where a shard is retried but the bot running the previous
            # try somehow reappears and reports success, the result must still show
            # the last try's result. We still need to update cost_usd manually.
            result_summary.costs_usd[run_result.try_number - 1] = run_result.cost_usd
            result_summary.modified_ts = now
        else:
            result_summary.set_from_run_result(run_result, request)

        to_put.append(result_summary)
        ndb.put_multi(to_put)

        return result_summary, run_result, task_completed, None

    try:
        smry, run_result, task_completed, error = datastore_utils.transaction(run)
    except datastore_utils.CommitError as e:
        logging.info("Got commit error: %s", e)
        # It is important that the caller correctly surface this error.
        return False, False

    if run_result:
        # Caller must retry if PubSub enqueue fails.
        if not _maybe_pubsub_notify_now(smry, request):
            return False, False
        _update_stats(run_result, bot_id, request, task_completed)
    if error:
        logging.error("Task %s %s", packed, error)
    return True, task_completed
Esempio n. 53
0
    def reply(self, path, env=None, status=200):
        """Render template |path| to response using given environment.

    Optional keys from |env| that base.html uses:
      css_file: URL to a file with page specific styles, relative to site root.
      js_file: URL to a file with page specific Javascript code, relative to
          site root. File should define global object named same as a filename,
          i.e. '/auth/static/js/api.js' should define global object 'api' that
          incapsulates functionality implemented in the module.
      navbar_tab_id: id a navbar tab to highlight.
      page_title: title of an HTML page.

    Args:
      path: path to a template, relative to templates/.
      env: additional environment dict to use when rendering the template.
      status: HTTP status code to return.
    """
        env = (env or {}).copy()
        env.setdefault("css_file", None)
        env.setdefault("js_file", None)
        env.setdefault("navbar_tab_id", None)
        env.setdefault("page_title", "Untitled")

        # This goes to both Jinja2 env and Javascript config object.
        common = {
            "auth_service_config_locked": False,  # overridden in auth_service
            "login_url": users.create_login_url(self.request.path),
            "logout_url": users.create_logout_url("/"),
            "xsrf_token": self.generate_xsrf_token(),
        }
        if _ui_env_callback:
            common.update(_ui_env_callback(self))

        # Name of Javascript module with page code.
        js_module_name = None
        if env["js_file"]:
            assert env["js_file"].endswith(".js")
            js_module_name = os.path.basename(env["js_file"])[:-3]

        # This will be accessible from Javascript as global 'config' variable.
        js_config = {"identity": api.get_current_identity().to_bytes()}
        js_config.update(common)

        # Prepare URL to explore app API.
        schema, netloc, _, _, _, _ = urlparse.urlparse(self.request.url)
        api_url = "https://apis-explorer.appspot.com/apis-explorer/?" "base=%s://%s/_ah/api" % (schema, netloc)

        # Jinja2 environment to use to render a template.
        full_env = {
            "app_name": _ui_app_name,
            "app_revision_url": utils.get_app_revision_url(),
            "app_version": utils.get_app_version(),
            "config": json.dumps(js_config),
            "identity": api.get_current_identity(),
            "js_module_name": js_module_name,
            "api_url": api_url,
            "navbar": [(cls.navbar_tab_id, cls.navbar_tab_title, cls.navbar_tab_url) for cls in _ui_navbar_tabs],
        }
        full_env.update(common)
        full_env.update(env)

        # Render it.
        self.response.set_status(status)
        self.response.headers["Content-Type"] = "text/html; charset=utf-8"
        self.response.write(template.render(path, full_env))
Esempio n. 54
0
 def details(self, _request):
   """Returns information about the server."""
   return swarming_rpcs.ServerDetails(server_version=utils.get_app_version())
Esempio n. 55
0
 def test_details(self):
   """Asserts that server_details returns the correct version."""
   response = self.call_api('details')
   self.assertEqual({'server_version': utils.get_app_version()}, response.json)