Exemple #1
0
def create_html_app():
  """Returns WSGI app that serves HTML pages."""
  routes = []
  routes.extend(ereporter2.get_frontend_routes())
  routes.extend(ereporter2.get_backend_routes())
  routes.append((r'.*', MainPage))
  return webapp2.WSGIApplication(routes, debug=utils.is_local_dev_server())
Exemple #2
0
  def switch_to_dev_mode():
    """Enables GS mock for a local dev server.

    Returns:
      List of webapp2.Routes objects to add to the application.
    """
    assert utils.is_local_dev_server(), 'Must not be run in production'
    if not URLSigner.DEV_MODE_ENABLED:
      # Replace GS_URL with a mocked one.
      URLSigner.GS_URL = (
          'http://%s/_gcs_mock/' % config.get_local_dev_server_host())
      URLSigner.GS_URL += '%(bucket)s/%(filename)s?%(query)s'
      URLSigner.DEV_MODE_ENABLED = True

    class LocalStorageHandler(webapp2.RequestHandler):
      """Handles requests to a mock GS implementation."""

      def get(self, bucket, filepath):
        """Read a file from a mocked GS, return 404 if not found."""
        try:
          with cloudstorage.open('/%s/%s' % (bucket, filepath), 'r') as f:
            self.response.out.write(f.read())
          self.response.headers['Content-Type'] = 'application/octet-stream'
        except cloudstorage.errors.NotFoundError:
          self.abort(404)

      def put(self, bucket, filepath):
        """Stores a file in a mocked GS."""
        with cloudstorage.open('/%s/%s' % (bucket, filepath), 'w') as f:
          f.write(self.request.body)

    endpoint = r'/_gcs_mock/<bucket:[a-z0-9\.\-_]+>/<filepath:.*>'
    return [webapp2.Route(endpoint, LocalStorageHandler)]
def get_routes():
  # Use special syntax on dev server to specify where app is running.
  app_id_re = r'[0-9a-zA-Z_\-\:\.]*'
  if utils.is_local_dev_server():
    app_id_re += r'(@localhost:[0-9]+)?'

  # Auth service extends the basic UI and API provided by Auth component.
  routes = []
  routes.extend(rest_api.get_rest_api_routes())
  routes.extend(ui.get_ui_routes())
  routes.extend([
    # UI routes.
    webapp2.Route(
        r'/', webapp2.RedirectHandler, defaults={'_uri': '/auth/groups'}),
    webapp2.Route(r'/_ah/mail/<to:.+>', EmailHandler),
    webapp2.Route(r'/_ah/warmup', WarmupHandler),

    # API routes.
    webapp2.Route(
        r'/auth_service/api/v1/importer/config',
        ImporterConfigHandler),
    webapp2.Route(
        r'/auth_service/api/v1/internal/link_replica',
        LinkRequestHandler),
    webapp2.Route(
        r'/auth_service/api/v1/services',
        ServiceListingHandler),
    webapp2.Route(
        r'/auth_service/api/v1/services/<app_id:%s>/linking_url' % app_id_re,
        GenerateLinkingURL),
  ])
  return routes
Exemple #4
0
def get_cas_service():
  """Factory method that returns configured CASService instance.

  If the service is not configured, returns None. Also acts as a mocking point
  for unit tests.
  """
  conf = config.cached()
  if not conf.cas_gs_path or not conf.cas_gs_temp:
    return None
  try:
    cloudstorage.validate_file_path(conf.cas_gs_path.rstrip('/'))
    cloudstorage.validate_file_path(conf.cas_gs_temp.rstrip('/'))
  except ValueError as err:
    logging.error("Invalid CAS config: %s", err)
    return None
  service_account_key = auth.ServiceAccountKey(
      client_email=conf.service_account_email,
      private_key=conf.service_account_pkey,
      private_key_id=conf.service_account_pkey_id)
  if utils.is_local_dev_server():  # pragma: no branch
    from . import hacks
    hacks.patch_cloudstorage_lib(service_account_key)
  return CASService(
      conf.cas_gs_path.rstrip('/'),
      conf.cas_gs_temp.rstrip('/'),
      service_account_key)
Exemple #5
0
def patch_cloudstorage_lib(service_account_key):
  """Makes cloudstorage library talk to real GCS using our own token.

  Note that cloudstorage.set_access_token() is partially broken. _RestApi class
  ignores it. See rest_api._RestApi.urlfetch_async (get_token_async call that
  unconditionally overwrites previously set token). Setting the token disables
  the usage of local mocks though, so we set it anyway (to some garbage, it
  doesn't matter).
  """
  assert utils.is_local_dev_server()
  common.set_access_token('lalala')

  global _original_get_token_async
  if _original_get_token_async is None:
    logging.warning('Monkey patching GCS library to use valid token')
    _original_get_token_async = rest_api._RestApi.get_token_async

  # pylint: disable=unused-argument
  @functools.wraps(_original_get_token_async)
  def patched_get_token_async(self, refresh=False):
    fut = ndb.Future()
    fut.set_result(auth.get_access_token(self.scopes, service_account_key)[0])
    return fut

  rest_api._RestApi.get_token_async = patched_get_token_async
Exemple #6
0
def get_service_public_certificates(service_url):
  """Returns jsonish object with public certificates of a service.

  Service at |service_url| must have 'auth' component enabled (to serve
  the certificates).
  """
  cache_key = 'pub_certs:%s' % service_url
  certs = memcache.get(cache_key)
  if not certs:
    protocol = 'http://' if utils.is_local_dev_server() else 'https://'
    assert service_url.startswith(protocol)
    result = urlfetch.fetch(
        url='%s/auth/api/v1/server/certificates' % service_url,
        method='GET',
        headers={'X-URLFetch-Service-Id': utils.get_urlfetch_service_id()},
        follow_redirects=False,
        deadline=10,
        validate_certificate=True)
    if result.status_code != 200:
      raise CertificateError(
          'Failed to grab public certs from %s: HTTP %d' %
          (service_url, result.status_code))
    certs = json.loads(result.content)
    memcache.set(cache_key, certs, time=3600)
  return certs
Exemple #7
0
def create_application(debug):
  replication.configure_as_primary()
  rest_api.set_config_locked(config.is_remote_configured)

  # Configure UI appearance, add all custom tabs.
  ui.configure_ui(
      app_name='Auth Service',
      ui_tabs=[
        ui.GroupsHandler,
        ui.ChangeLogHandler,
        ServicesHandler,
        ui.OAuthConfigHandler,
        ui.IPWhitelistsHandler,
        ConfigHandler,
      ],
      env_callback=get_additional_ui_environment)
  template.bootstrap({'auth_service': TEMPLATES_DIR})

  # Add a fake admin for local dev server.
  if utils.is_local_dev_server():
    auth.bootstrap_group(
        auth.ADMIN_GROUP,
        [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')],
        'Users that can manage groups')
  return webapp2.WSGIApplication(get_routes(), debug=debug)
Exemple #8
0
def get_routes():
  # Namespace can be letters, numbers, '-', '.' and '_'.
  namespace = r'/<namespace:%s>' % model.NAMESPACE_RE
  # Do not enforce a length limit to support different hashing algorithm. This
  # should represent a valid hex value.
  hashkey = r'/<hash_key:[a-f0-9]{4,}>'
  # This means a complete key is required.
  namespace_key = namespace + hashkey
  routes = [
    webapp2.Route(r'/content-gs/handshake', HandshakeHandler),
    webapp2.Route(
        r'/content-gs/pre-upload/<namespace:.*>',
        PreUploadContentHandler),
    webapp2.Route(
        r'/content-gs/retrieve%s' % namespace_key,
        RetrieveContentHandler),
    webapp2.Route(
        r'/content-gs/store%s' % namespace_key,
        StoreContentHandler,
        name='store-gs'),
  ]
  # Routes added to WSGIApplication only a dev mode.
  if utils.is_local_dev_server():
    routes.extend(gcs.URLSigner.switch_to_dev_mode())
  return routes
Exemple #9
0
def create_endpoints_app():
  """Returns WSGI app that serves cloud endpoints requests."""
  apis = [
    admin.AdminApi,
    cas.CASServiceApi,
    cipd.PackageRepositoryApi,
  ]
  return endpoints.api_server(apis, restricted=not utils.is_local_dev_server())
Exemple #10
0
def get_config():
  """Fetches AuthOpenIDConfig from datastore or returns default instance."""
  conf = AuthOpenIDConfig.get_by_id(id='default')
  if not conf:
    uri = '%s://%s/auth/openid/callback' % (
        'http' if utils.is_local_dev_server() else 'https',
        app_identity.get_default_version_hostname())
    conf = AuthOpenIDConfig(id='default', redirect_uri=uri)
  return conf
Exemple #11
0
def create_app():
  if utils.is_local_dev_server():
    handler_utils.init_local_dev_server()

  admin_handlers = [
      (r'/admin/(.*)', AdminDispatch),
  ]

  return webapp2.WSGIApplication(admin_handlers, debug=True)
Exemple #12
0
def nuke_gae_cookies(response):
    """Removes GAE authentication related cookies.

  To reduce confusion when OpenID cookies are used. Having users to be logged
  in with two different methods at once is extremely weird.
  """
    response.delete_cookie("SACSID")
    if utils.is_local_dev_server():
        response.delete_cookie("dev_appserver_login")
Exemple #13
0
def get_config():
    """Fetches AuthOpenIDConfig from datastore or returns default instance."""
    conf = AuthOpenIDConfig.get_by_id(id="default")
    if not conf:
        uri = "%s://%s/auth/openid/callback" % (
            "http" if utils.is_local_dev_server() else "https",
            app_identity.get_default_version_hostname(),
        )
        conf = AuthOpenIDConfig(id="default", redirect_uri=uri)
    return conf
Exemple #14
0
 def wrapper(self, *args, **kwargs):
   assert isinstance(self, webapp2.RequestHandler)
   assert self.request.method == 'GET'
   if model.is_replica():
     primary_url = model.get_replication_state().primary_url
     protocol = 'http://' if utils.is_local_dev_server() else 'https://'
     assert primary_url and primary_url.startswith(protocol), primary_url
     assert self.request.path_qs.startswith('/'), self.request.path_qs
     self.redirect(primary_url.rstrip('/') + self.request.path_qs, abort=True)
   return method(self, *args, **kwargs)
Exemple #15
0
def forward_data(data, ip):
  """Forwards the raw data to the backend.

  The request contains all the required headers, incliding a special
  Endpoint-Url header with the endpoint URL, and the correct
  Authorization: header for that endpoint.

  Args:
    data (str): raw binary data to forward.
    ip (str):   the IP address of the data source (used for traffic split).

  Raises:
    AdminError when endpoint data is not entered in the admin console.
  """
  lb = LoadBalancer()
  module_name = lb.choose_module()
  logging.info('Forwarding request (%d bytes) to module: %s',
               len(data), module_name)
  hostname = app_identity.get_default_version_hostname()
  if utils.is_local_dev_server():
    protocol = 'http'
    hostname = 'localhost:808%s' % module_name[-1]
  else:
    protocol = 'https'

  config_data = _get_config_data()
  if not config_data:
    raise AdminError('Endpoints are not defined')

  # Make the traffic split deterministic in the source IP.

  # TODO(sergeyberezin): make it truly random. Most of our sources
  # are behind NAT boxes, and appear as the same IP.
  random_state = random.getstate()
  random.seed(ip)
  if random.uniform(0, 100) < config_data.secondary_endpoint_load:
    endpoint = config_data.secondary_endpoint
  else:
    endpoint = config_data.primary_endpoint
  random.setstate(random_state)

  url = '%s://%s/%s' % (protocol, hostname, module_name)
  service_account_key = _get_credentials(endpoint.credentials)
  headers = {
      common.ENDPOINT_URL_HEADER: endpoint.url,
      'Content-Type': 'application/x-protobuf',
  }
  headers.update(endpoint.headers)
  net.request(
      url=url,
      method='POST',
      payload=data,
      headers=headers,
      scopes=endpoint.scopes,
      service_account_key=service_account_key)
Exemple #16
0
def create_app():
  logging.basicConfig(level=logging.DEBUG)
  if utils.is_local_dev_server():
    handler_utils.init_local_dev_server()

  main_handlers = [
      (r'/', MainHandler),
      (r'/monacq', MonacqHandler),
  ]

  return webapp2.WSGIApplication(main_handlers, debug=True)
Exemple #17
0
def bootstrap_loopback_ips():
    """Adds 127.0.0.1 and ::1 to 'bots' IP whitelist.

  Useful on local dev server and in tests. Must not be used in production.

  Returns list of corresponding bot Identities.
  """
    # See api.py, AuthDB.verify_ip_whitelisted for IP -> Identity conversion.
    assert utils.is_local_dev_server()
    bootstrap_ip_whitelist(BOTS_IP_WHITELIST, ["127.0.0.1", "::1"], "Local bots")
    return [Identity(IDENTITY_BOT, "127.0.0.1"), Identity(IDENTITY_BOT, "0-0-0-0-0-0-0-1")]
Exemple #18
0
def create_application(debug):
  template.bootstrap()
  utils.set_task_queue_module('default')

  routes = [
      # Frontend pages. They return HTML.
      # Public pages.
      ('/', RootHandler),
      ('/stats', stats_gviz.StatsSummaryHandler),

      # User pages.
      ('/user/tasks', TasksHandler),
      ('/user/task/<task_id:[0-9a-fA-F]+>', TaskHandler),
      ('/user/task/<task_id:[0-9a-fA-F]+>/retry', TaskRetryHandler),
      ('/user/tasks/cancel', TaskCancelHandler),

      # Privileged user pages.
      ('/restricted/bots', BotsListHandler),
      ('/restricted/bot/<bot_id:[^/]+>', BotHandler),
      ('/restricted/bot/<bot_id:[^/]+>/delete', BotDeleteHandler),

      # Admin pages.
      ('/restricted/config', RestrictedConfigHandler),
      ('/restricted/upload/bot_config', UploadBotConfigHandler),
      ('/restricted/upload/bootstrap', UploadBootstrapHandler),

      # Mapreduce related urls.
      (r'/restricted/launch_mapreduce', RestrictedLaunchMapReduceJob),

      # The new APIs:
      ('/swarming/api/v1/stats/summary/<resolution:[a-z]+>',
        stats_gviz.StatsGvizSummaryHandler),
      ('/swarming/api/v1/stats/dimensions/<dimensions:.+>/<resolution:[a-z]+>',
        stats_gviz.StatsGvizDimensionsHandler),

      ('/_ah/mail/<to:.+>', EmailHandler),
      ('/_ah/warmup', WarmupHandler),
  ]
  routes = [webapp2.Route(*i) for i in routes]

  # If running on a local dev server, allow bots to connect without prior
  # groups configuration. Useful when running smoke test.
  if utils.is_local_dev_server():
    acl.bootstrap_dev_server_acls()

  # TODO(maruel): Split backend into a separate module. For now add routes here.
  routes.extend(handlers_backend.get_routes())
  routes.extend(handlers_api.get_routes())
  routes.extend(handlers_bot.get_routes())

  return webapp2.WSGIApplication(routes, debug=debug)
Exemple #19
0
def get_service_public_certificates(service_url):
  """Returns jsonish object with public certificates of a service.

  Service at |service_url| must have 'auth' component enabled (to serve
  the certificates).
  """
  cache_key = 'pub_certs:%s' % service_url
  certs = memcache.get(cache_key)
  if certs:
    return certs

  protocol = 'http://' if utils.is_local_dev_server() else 'https://'
  assert service_url.startswith(protocol)
  url = '%s/auth/api/v1/server/certificates' % service_url

  # Retry code is adapted from components/net.py. net.py can't be used directly
  # since it depends on components.auth (and dependency cycles between
  # components are bad).
  attempt = 0
  result = None
  while attempt < 4:
    if attempt:
      logging.info('Retrying...')
    attempt += 1
    logging.info('GET %s', url)
    try:
      result = urlfetch.fetch(
          url=url,
          method='GET',
          headers={'X-URLFetch-Service-Id': utils.get_urlfetch_service_id()},
          follow_redirects=False,
          deadline=5,
          validate_certificate=True)
    except (apiproxy_errors.DeadlineExceededError, urlfetch.Error) as e:
      # Transient network error or URL fetch service RPC deadline.
      logging.warning('GET %s failed: %s', url, e)
      continue
    # It MUST return 200 on success, it can't return 403, 404 or >=500.
    if result.status_code != 200:
      logging.warning(
          'GET %s failed, HTTP %d: %r', url, result.status_code, result.content)
      continue
    # Success.
    certs = json.loads(result.content)
    memcache.set(cache_key, certs, time=3600)
    return certs

  # All attempts failed, give up.
  msg = 'Failed to grab public certs from %s (HTTP code %s)' % (
      service_url, result.status_code if result else '???')
  raise CertificateError(msg, transient=True)
Exemple #20
0
def get_access_token(scopes, service_account_key=None):
  """Returns an OAuth2 access token for a service account.

  If 'service_account_key' is specified, will use it to generate access token
  for corresponding @developer.gserviceaccount.com account. Otherwise will
  invoke app_identity.get_access_token(...) to use app's
  @appspot.gserviceaccount.com account.

  On dev server (if service_account_key is not passed or empty) reads the token
  from 'access_token' DevServerAccessToken entity.

  Args:
    scopes: the requested API scope string, or a list of strings.
    service_account_key: optional instance of ServiceAccountKey.

  Returns:
    Tuple (access token, expiration time in seconds since the epoch). The token
    should be valid for at least 5 minutes. It will be cached across multiple
    calls using memcache (e.g. get_access_token call can be considered cheap).

  Raises:
    AccessTokenError on errors.
  """
  if service_account_key:
    # Empty private_key_id probably means that the app is not configured yet.
    if not service_account_key.private_key_id:
      # On dev server fallback to reading hardcoded token from the datastore.
      if utils.is_local_dev_server():
        return _get_dev_server_token()
      raise AccessTokenError('Service account secret key is not initialized')
    return _get_jwt_based_token(scopes, service_account_key)

  # app_identity.get_access_token returns nonsense on dev server.
  if utils.is_local_dev_server():
    return _get_dev_server_token()

  # Running on real GAE, and no secret key is passed -> app_identity API.
  return app_identity.get_access_token(scopes)
Exemple #21
0
def create_backend_app():  # pragma: no cover
  """Returns WSGI app for backend."""
  routes = handlers.get_backend_routes() + swarming.get_routes()
  app = webapp2.WSGIApplication(routes, debug=utils.is_local_dev_server())
  gae_ts_mon.initialize(app, cron_module='backend')
  gae_ts_mon.register_global_metrics([
      metrics.CURRENTLY_PENDING,
      metrics.CURRENTLY_RUNNING,
      metrics.LEASE_LATENCY,
      metrics.SCHEDULING_LATENCY,
  ])
  gae_ts_mon.register_global_metrics_callback(
      'send_metrics', metrics.send_all_metrics)
  return app
Exemple #22
0
  def post(self, app_id):
    # On local dev server |app_id| may use @localhost:8080 to specify where
    # app is running.
    custom_host = None
    if utils.is_local_dev_server():
      app_id, _, custom_host = app_id.partition('@')

    # Generate an opaque ticket that would be passed back to /link_replica.
    # /link_replica will verify HMAC tag and will ensure the request came from
    # application with ID |app_id|.
    ticket = LinkTicketToken.generate([], {'app_id': app_id})

    # ServiceLinkTicket contains information that is needed for Replica
    # to figure out how to contact Primary.
    link_msg = replication_pb2.ServiceLinkTicket()
    link_msg.primary_id = app_identity.get_application_id()
    link_msg.primary_url = self.request.host_url
    link_msg.generated_by = auth.get_current_identity().to_bytes()
    link_msg.ticket = ticket

    # Special case for dev server to simplify local development.
    if custom_host:
      assert utils.is_local_dev_server()
      host = 'http://%s' % custom_host
    else:
      # Use same domain as auth_service. Usually it's just appspot.com.
      current_hostname = app_identity.get_default_version_hostname()
      domain = current_hostname.partition('.')[2]
      naked_app_id = app_id
      if ':' in app_id:
        naked_app_id = app_id[app_id.find(':')+1:]
      host = 'https://%s.%s' % (naked_app_id, domain)

    # URL to a handler on Replica that initiates Replica <-> Primary handshake.
    url = '%s/auth/link?t=%s' % (
        host, tokens.base64_encode(link_msg.SerializeToString()))
    self.send_response({'url': url}, http_code=201)
 def post(self):
   job_id = self.request.get('job_id')
   assert job_id in mapreduce_jobs.MAPREDUCE_JOBS
   # Do not use 'backend' module when running from dev appserver. Mapreduce
   # generates URLs that are incompatible with dev appserver URL routing when
   # using custom modules.
   success = utils.enqueue_task(
       url='/internal/taskqueue/mapreduce/launch/%s' % job_id,
       queue_name=mapreduce_jobs.MAPREDUCE_TASK_QUEUE,
       use_dedicated_module=not utils.is_local_dev_server())
   # New tasks should show up on the status page.
   if success:
     self.redirect('/restricted/mapreduce/status')
   else:
     self.abort(500, 'Failed to launch the job')
Exemple #24
0
def bootstrap():
  """Adds 127.0.0.1 as a whitelisted IP when testing."""
  if not utils.is_local_dev_server() or auth.is_replica():
    return

  # Allow local bots full access.
  bots = auth.bootstrap_loopback_ips()
  auth.bootstrap_group(
      FULL_ACCESS_GROUP, bots, 'Can read and write from/to Isolate')

  # Add a fake admin for local dev server.
  auth.bootstrap_group(
      auth.ADMIN_GROUP,
      [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')],
      'Users that can manage groups')
Exemple #25
0
def _get_dev_server_token():
  """Reads token from DevServerAccessToken entity."""
  assert utils.is_local_dev_server()
  token = DevServerAccessToken.get_or_insert('access_token')

  # Dump token URL to log, so that it easy to find and change it.
  edit_url = 'http://localhost:8000/datastore/edit/%s' % token.key.urlsafe()
  logging.info('Using token from %s', edit_url)

  if not token.access_token:
    raise AccessTokenError(
        'Dev server access token is not initialized: %s' % edit_url)

  # Fake expiration time as 5 min from now.
  return token.access_token, utils.time_time() + 300
Exemple #26
0
def get_access_token():  # pragma: no cover
  """Returns OAuth token to use when talking to Gitiles servers."""
  # On real GAE use app service account.
  if not utils.is_local_dev_server():
    return app_identity.get_access_token(
        ['https://www.googleapis.com/auth/gerritcodereview'])[0]
  # On dev server allow custom tokens loaded from local_dev_config. Use 'imp'
  # because dev_appserver tries to emulate app sandbox and hacks 'import' to
  # respect 'skip_files:' section in app.yaml.
  try:
    import imp
    local_dev_config = imp.load_source(
        'local_dev_config', 'local_dev_config.py')
    # Copy your chrome-internal .netrc token there.
    return local_dev_config.GITILES_OAUTH_TOKEN
  except (ImportError, IOError):
    return 'fake_token'
Exemple #27
0
def bootstrap_dev_server_acls():
  """Adds localhost to IP whitelist and Swarming groups."""
  assert utils.is_local_dev_server()
  if auth.is_replica():
    return

  bots = auth.bootstrap_loopback_ips()
  auth.bootstrap_group(BOTS_GROUP, bots, 'Swarming bots')
  auth.bootstrap_group(USERS_GROUP, bots, 'Swarming users')

  # Add a swarming admin. [email protected] is used in
  # server_smoke_test.py
  admin = auth.Identity(auth.IDENTITY_USER, '*****@*****.**')
  auth.bootstrap_group(ADMINS_GROUP, [admin], 'Swarming administrators')

  # Add an instance admin (for easier manual testing when running dev server).
  auth.bootstrap_group(
      auth.ADMIN_GROUP,
      [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')],
      'Users that can manage groups')
Exemple #28
0
def publish_authdb_change(state):
  """Publishes AuthDB change notification to the topic.

  Args:
    state: AuthReplicationState with version info.
  """
  if utils.is_local_dev_server():
    return

  msg = replication_pb2.ReplicationPushRequest()
  msg.revision.primary_id = app_identity.get_application_id()
  msg.revision.auth_db_rev = state.auth_db_rev
  msg.revision.modified_ts = utils.datetime_to_timestamp(state.modified_ts)

  blob = msg.SerializeToString()
  key_name, sig = signature.sign_blob(blob)

  pubsub.publish(topic_name(), blob, {
    'X-AuthDB-SigKey-v1': key_name,
    'X-AuthDB-SigVal-v1': base64.b64encode(sig),
  })
Exemple #29
0
def oauth_authentication(request):
    """OAuth2 based authentication via oauth.get_current_user()."""
    if not request.headers.get("Authorization"):
        return None
    if not utils.is_local_dev_server():
        return api.extract_oauth_caller_identity()

    # OAuth2 library is mocked on dev server to return some nonsense. Use (slow,
    # but real) OAuth2 API endpoint instead to validate access_token. It is also
    # what Cloud Endpoints do on a local server. For simplicity ignore client_id
    # on dev server.
    header = request.headers["Authorization"].split(" ", 1)
    if len(header) != 2 or header[0] not in ("OAuth", "Bearer"):
        raise api.AuthenticationError("Invalid authorization header")

    # Adapted from endpoints/users_id_tokens.py, _set_bearer_user_vars_local.
    base_url = "https://www.googleapis.com/oauth2/v1/tokeninfo"
    result = urlfetch.fetch(
        url="%s?%s" % (base_url, urllib.urlencode({"access_token": header[1]})),
        follow_redirects=False,
        validate_certificate=True,
    )
    if result.status_code != 200:
        try:
            error = json.loads(result.content)["error_description"]
        except (KeyError, ValueError):
            error = repr(result.content)
        raise api.AuthenticationError("Failed to validate the token: %s" % error)

    token_info = json.loads(result.content)
    if "email" not in token_info:
        raise api.AuthenticationError("Token doesn't include an email address")
    if not token_info.get("verified_email"):
        raise api.AuthenticationError("Token email isn't verified")

    email = token_info["email"]
    try:
        return model.Identity(model.IDENTITY_USER, email)
    except ValueError:
        raise api.AuthenticationError("Unsupported user email: %s" % email)
def get_routes():
    # Use special syntax on dev server to specify where app is running.
    app_id_re = r'[0-9a-zA-Z_\-\:\.]*'
    if utils.is_local_dev_server():
        app_id_re += r'(@localhost:[0-9]+)?'

    # Auth service extends the basic UI and API provided by Auth component.
    routes = []
    routes.extend(rest_api.get_rest_api_routes())
    routes.extend(ui.get_ui_routes())
    routes.extend([
        # UI routes.
        webapp2.Route(r'/',
                      webapp2.RedirectHandler,
                      defaults={'_uri': '/auth/groups'}),
        webapp2.Route(r'/_ah/mail/<to:.+>', EmailHandler),
        webapp2.Route(r'/_ah/warmup', WarmupHandler),

        # API routes.
        webapp2.Route(
            r'/auth_service/api/v1/authdb/revisions/<rev:(latest|[0-9]+)>',
            AuthDBRevisionsHandler),
        webapp2.Route(
            r'/auth_service/api/v1/authdb/subscription/authorization',
            AuthDBSubscriptionAuthHandler),
        webapp2.Route(r'/auth_service/api/v1/importer/config',
                      ImporterConfigHandler),
        webapp2.Route(
            r'/auth_service/api/v1/importer/ingest_tarball/<name:.+>',
            ImporterIngestTarballHandler),
        webapp2.Route(r'/auth_service/api/v1/internal/link_replica',
                      LinkRequestHandler),
        webapp2.Route(r'/auth_service/api/v1/services', ServiceListingHandler),
        webapp2.Route(
            r'/auth_service/api/v1/services/<app_id:%s>/linking_url' %
            app_id_re, GenerateLinkingURL),
    ])
    return routes
Exemple #31
0
    def calculate_etag(self, path):
        """Calculates the hash of the given static file or grabs it from cache.

    Returns:
      Tuple (etag, the body of the file if it was read)
    """
        version = utils.get_app_version()

        # Tainted versions are frequently overwritten, do not cache static files for
        # too long for them. Same for devserver.
        expiration_sec = 3600
        if '-tainted' in version or utils.is_local_dev_server():
            expiration_sec = 1

        key = '%s:%s' % (version, path)
        value = memcache.get(key, namespace='etag')
        if value:
            return value, None

        body = self.read_static(path)
        value = '"%s"' % hashlib.sha1(body).hexdigest()
        memcache.set(key, value, time=expiration_sec, namespace='etag')
        return value, body
Exemple #32
0
def create_application():
    ereporter2.register_formatter()
    utils.set_task_queue_module('backend')
    template.bootstrap()

    # If running on a local dev server, allow bots to connect without prior
    # groups configuration. Useful when running smoke test.
    if utils.is_local_dev_server():
        acl.bootstrap_dev_server_acls()

    def is_enabled_callback():
        return config.settings().enable_ts_monitoring

    # App that serves HTML pages and old API.
    frontend_app = handlers_frontend.create_application(False)
    gae_ts_mon.initialize(frontend_app, is_enabled_fn=is_enabled_callback)

    # App that contains crons and task queues.
    backend_app = handlers_backend.create_application(False)
    gae_ts_mon.initialize(backend_app, is_enabled_fn=is_enabled_callback)

    # Local import, because it instantiates the mapreduce app.
    from mapreduce import main
    gae_ts_mon.initialize(main.APP, is_enabled_fn=is_enabled_callback)

    # TODO(maruel): Remove this once there is no known client anymore.
    api = endpoints.api_server([
        handlers_endpoints.swarming_api,
        # components.config endpoints for validation and configuring of luci-config
        # service URL.
        config.ConfigApi,
    ])

    event_mon_metrics.initialize()
    ts_mon_metrics.initialize()
    return frontend_app, api, backend_app, main.APP
Exemple #33
0
def create_application(debug):
    replication.configure_as_primary()
    rest_api.set_config_locked(config.is_remote_configured)

    # Configure UI appearance, add all custom tabs.
    ui.configure_ui(app_name='Auth Service',
                    ui_tabs=[
                        ui.GroupsHandler,
                        ui.ChangeLogHandler,
                        ServicesHandler,
                        ui.OAuthConfigHandler,
                        ui.IPWhitelistsHandler,
                        ConfigHandler,
                    ],
                    env_callback=get_additional_ui_environment)
    template.bootstrap({'auth_service': TEMPLATES_DIR})

    # Add a fake admin for local dev server.
    if utils.is_local_dev_server():
        auth.bootstrap_group(
            auth.ADMIN_GROUP,
            [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')],
            'Users that can manage groups')
    return webapp2.WSGIApplication(get_routes(), debug=debug)
Exemple #34
0
def get_cas_service():
    """Factory method that returns configured CASService instance.

  If the service is not configured, returns None. Also acts as a mocking point
  for unit tests.
  """
    conf = config.cached()
    if not conf.cas_gs_path or not conf.cas_gs_temp:
        return None
    try:
        cloudstorage.validate_file_path(conf.cas_gs_path.rstrip('/'))
        cloudstorage.validate_file_path(conf.cas_gs_temp.rstrip('/'))
    except ValueError as err:
        logging.error("Invalid CAS config: %s", err)
        return None
    service_account_key = auth.ServiceAccountKey(
        client_email=conf.service_account_email,
        private_key=conf.service_account_pkey,
        private_key_id=conf.service_account_pkey_id)
    if utils.is_local_dev_server():  # pragma: no branch
        from . import hacks
        hacks.patch_cloudstorage_lib(service_account_key)
    return CASService(conf.cas_gs_path.rstrip('/'),
                      conf.cas_gs_temp.rstrip('/'), service_account_key)
Exemple #35
0
def fetch_file_async(url, oauth_scopes):
  """Fetches a file optionally using OAuth2 for authentication.

  Args:
    url: url to a file to fetch.
    oauth_scopes: list of OAuth scopes to use when generating access_token for
        accessing |url|, if not set or empty - do not use OAuth.

  Returns:
    Byte buffer with file's body.

  Raises:
    BundleImportError on fetch errors.
  """
  if utils.is_local_dev_server():
    protocols = ('http://', 'https://')
  else:
    protocols = ('https://',)
  assert url.startswith(protocols), url

  headers = {}
  if oauth_scopes:
    headers['Authorization'] = 'OAuth %s' % (
        app_identity.get_access_token(oauth_scopes)[0])

  ctx = ndb.get_context()
  result = yield ctx.urlfetch(
      url=url,
      method='GET',
      headers=headers,
      follow_redirects=False,
      deadline=5*60,
      validate_certificate=True)
  if result.status_code != 200:
    raise BundleFetchError(url, result.status_code, result.content)
  raise ndb.Return(result.content)
Exemple #36
0
def create_application(debug):
  replication.configure_as_primary()

  # Configure UI appearance, add all custom tabs.
  ui.configure_ui(
      app_name='Auth Service',
      ui_tabs=[
        # Standard tabs provided by auth component.
        ui.GroupsHandler,
        ui.OAuthConfigHandler,
        ui.IPWhitelistsHandler,
        # Additional tabs available only on auth service.
        ConfigHandler,
        ServicesHandler,
      ])
  template.bootstrap({'auth_service': TEMPLATES_DIR})

  # Add a fake admin for local dev server.
  if utils.is_local_dev_server():
    auth.bootstrap_group(
        auth.ADMIN_GROUP,
        [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')],
        'Users that can manage groups')
  return webapp2.WSGIApplication(get_routes(), debug=debug)
Exemple #37
0
def _creds():
    """Get the correct credentials argument for this environment."""
    return (None if utils.is_local_dev_server() else
            client.service_account_credentials())
Exemple #38
0
import os
import sys
import webapp2

BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.join(BASE_DIR, 'components', 'third_party'))

from components import ereporter2
from components import utils
from components import auth

import buildbot
# import milotic  # LUCI Endpoints

main_file = ('html/main.html'
             if utils.is_local_dev_server() else 'vulcanized_main.html')
path = os.path.join(os.path.dirname(__file__), main_file)


@utils.cache
def get_main_page():
    with open(path, 'r') as f:
        return f.read()


class MainPage(auth.AuthenticatingHandler):
    @auth.public
    def get(self):
        self.response.headers['Strict-Transport-Security'] = (
            'max-age=10886400; includeSubDomains')
        self.response.headers['Content-Type'] = 'text/html'
Exemple #39
0
from components import utils


# Logs prefix.
PREFIX = 'Stats: '


# Supported resolutions. In theory, 'weeks' and 'months' could be added one day
# if desired.
RESOLUTIONS = ('days', 'hours', 'minutes')


# Number of minutes to ignore because they are too fresh. This is done so that
# eventual log consistency doesn't have to be managed explicitly. On the dev
# server, there's no eventual inconsistency so process up to the last minute.
TOO_RECENT = 5 if not utils.is_local_dev_server() else 1


# One handled HTTP request and the associated statistics if any.
StatsEntry = collections.namedtuple('StatsEntry', ('request', 'entries'))


class StatisticsFramework(object):
  def __init__(
      self, root_key_id, snapshot_cls, generate_snapshot,
      max_backtrack_days=5, max_minutes_per_process=120):
    """Creates an instance to do bookkeeping of statistics.

    Arguments:
    - root_key_id: Root key id of the entity to use for transaction. It must be
          unique to the instance and application.
Exemple #40
0
def become_replica(ticket, initiated_by):
  """Converts current service to a replica of a primary specified in a ticket.

  Args:
    ticket: replication_pb2.ServiceLinkTicket passed from a primary.
    initiated_by: Identity of a user that accepted linking request, for logging.

  Raises:
    ProtocolError in case the request to primary fails.
  """
  assert model.is_standalone()

  # On dev appserver emulate X-Appengine-Inbound-Appid header.
  headers = {'Content-Type': 'application/octet-stream'}
  protocol = 'https'
  if utils.is_local_dev_server():
    headers['X-Appengine-Inbound-Appid'] = app_identity.get_application_id()
    protocol = 'http'
  headers['X-URLFetch-Service-Id'] = utils.get_urlfetch_service_id()

  # Pass back the ticket for primary to verify it, tell the primary to use
  # default version hostname to talk to us.
  link_request = replication_pb2.ServiceLinkRequest()
  link_request.ticket = ticket.ticket
  link_request.replica_url = (
      '%s://%s' % (protocol, app_identity.get_default_version_hostname()))
  link_request.initiated_by = initiated_by.to_bytes()

  # Primary will look at X-Appengine-Inbound-Appid and compare it to what's in
  # the ticket.
  try:
    result = urlfetch.fetch(
        url='%s/auth_service/api/v1/internal/link_replica' % ticket.primary_url,
        payload=link_request.SerializeToString(),
        method='POST',
        headers=headers,
        follow_redirects=False,
        deadline=30,
        validate_certificate=True)
  except urlfetch.Error as exc:
    raise ProtocolError(
        replication_pb2.ServiceLinkResponse.TRANSPORT_ERROR,
        'URLFetch error (%s): %s' % (exc.__class__.__name__, exc))

  # Protobuf based protocol is not using HTTP codes (handler always replies with
  # HTTP 200, providing error details if needed in protobuf serialized body).
  # So any other status code here means there was a transport level error.
  if result.status_code != 200:
    raise ProtocolError(
        replication_pb2.ServiceLinkResponse.TRANSPORT_ERROR,
        'Request to the primary failed with HTTP %d.' % result.status_code)

  link_response = replication_pb2.ServiceLinkResponse.FromString(result.content)
  if link_response.status != replication_pb2.ServiceLinkResponse.SUCCESS:
    message = LINKING_ERRORS.get(
        link_response.status,
        'Request to the primary failed with status %d.' % link_response.status)
    raise ProtocolError(link_response.status, message)

  # Become replica. Auth DB will be overwritten on a first push from Primary.
  state = model.AuthReplicationState(
      key=model.replication_state_key(),
      primary_id=ticket.primary_id,
      primary_url=ticket.primary_url)
  state.put()
Exemple #41
0
def create_backend_app():  # pragma: no cover
    """Returns WSGI app for backend."""
    return webapp2.WSGIApplication(handlers.get_backend_routes(),
                                   debug=utils.is_local_dev_server())
Exemple #42
0
def create_wsgi_application():
    ui.configure()
    routes = []
    routes.extend(handlers.get_frontend_routes())
    routes.extend(handlers.get_backend_routes())
    return webapp2.WSGIApplication(routes, debug=utils.is_local_dev_server())
Exemple #43
0
def authenticated_request_async(url, method='GET', payload=None, params=None):
  """Sends an authenticated JSON API request, returns deserialized response.

  Raises:
    TokenCreationError if request failed or response is malformed.
    TokenAuthorizationError on HTTP 401 or 403 response from service.
  """
  scope = 'https://www.googleapis.com/auth/userinfo.email'
  access_token = get_access_token(scope)[0]
  headers = {
    'Accept': 'application/json; charset=utf-8',
    'Authorization': 'Bearer %s' % access_token,
  }

  if payload is not None:
    assert method in ('CREATE', 'POST', 'PUT'), method
    headers['Content-Type'] = 'application/json; charset=utf-8'
    payload = utils.encode_to_json(payload)

  if utils.is_local_dev_server():
    protocols = ('http://', 'https://')
  else:
    protocols = ('https://',)
  assert url.startswith(protocols) and '?' not in url, url
  if params:
    url += '?' + urllib.urlencode(params)

  try:
    res = yield _urlfetch_async(
        url=url,
        payload=payload,
        method=method,
        headers=headers,
        follow_redirects=False,
        deadline=10,
        validate_certificate=True)
  except (apiproxy_errors.DeadlineExceededError, urlfetch.Error) as e:
    raise exceptions.TokenCreationError(str(e))

  if res.status_code == 404:
    logging.warning('Token server HTTP %d: %s', res.status_code, res.content)
    raise exceptions.NotFoundError(
        'HTTP %d: %s' % (res.status_code, res.content))

  if res.status_code in (401, 403):
    logging.error('Token server HTTP %d: %s', res.status_code, res.content)
    raise exceptions.TokenAuthorizationError(
        'HTTP %d: %s' % (res.status_code, res.content))

  if res.status_code >= 300:
    logging.error('Token server HTTP %d: %s', res.status_code, res.content)
    raise exceptions.TokenCreationError(
        'HTTP %d: %s' % (res.status_code, res.content))

  try:
    content = res.content
    if content.startswith(")]}'\n"):
      content = content[5:]
    json_res = json.loads(content)
  except ValueError as e:
    raise exceptions.TokenCreationError('Bad JSON response: %s' % e)
  raise ndb.Return(json_res)
Exemple #44
0
def create_backend_application():
    app = webapp2.WSGIApplication(handlers.get_backend_routes(),
                                  debug=utils.is_local_dev_server())
    utils.report_memory(app)
    return app
def _fetch_pools_config():
    """Loads pools.cfg and parses it into a _PoolsCfg instance."""
    # store_last_good=True tells config components to update the config file
    # in a cron job. Here we just read from the datastore. In case it's the first
    # call ever, or config doesn't exist, it returns (None, None).
    rev, cfg = config.get_self_config(POOLS_CFG_FILENAME,
                                      pools_pb2.PoolsCfg,
                                      store_last_good=True)
    if not cfg:
        if _LOCAL_FAKE_CONFIG:
            assert utils.is_local_dev_server()
            return _LOCAL_FAKE_CONFIG
        logging.error('There is no pools.cfg, no task is accepted')
        return _PoolsCfg({}, (None, None))

    # The config is already validated at this point.

    ctx = validation.Context.logging()
    template_map = _resolve_task_template_inclusions(ctx, cfg.task_template)
    deployment_map = _resolve_task_template_deployments(
        ctx, template_map, cfg.task_template_deployment)
    bot_monitorings = _resolve_bot_monitoring(ctx, cfg.bot_monitoring)

    default_isolate = default_cipd = None
    if cfg.HasField('default_external_services'):
        ext = cfg.default_external_services
        default_isolate = IsolateServer(ext.isolate.server,
                                        ext.isolate.namespace)
        default_cipd = CipdServer(ext.cipd.server,
                                  ext.cipd.client_package.package_name,
                                  ext.cipd.client_package.version)

    pools = {}
    for msg in cfg.pool:
        for name in msg.name:
            pools[name] = init_pool_config(
                name=name,
                rev=rev,
                scheduling_users=frozenset(
                    _to_ident(u) for u in msg.schedulers.user),
                scheduling_groups=frozenset(msg.schedulers.group),
                trusted_delegatees={
                    _to_ident(d.peer_id):
                    TrustedDelegatee(peer_id=_to_ident(d.peer_id),
                                     required_delegation_tags=frozenset(
                                         d.require_any_of.tag))
                    for d in msg.schedulers.trusted_delegation
                },
                service_accounts=frozenset(msg.allowed_service_account),
                service_accounts_groups=tuple(
                    msg.allowed_service_account_group),
                realm=msg.realm if msg.realm else None,
                default_task_realm=(msg.default_task_realm
                                    if msg.default_task_realm else None),
                enforced_realm_permissions=frozenset(
                    msg.enforced_realm_permissions),
                task_template_deployment=_resolve_deployment(
                    ctx, msg, template_map, deployment_map),
                bot_monitoring=bot_monitorings.get(name),
                external_schedulers=_resolve_external_schedulers(
                    msg.external_schedulers),
                default_isolate=default_isolate,
                default_cipd=default_cipd)
    return _PoolsCfg(pools, (default_isolate, default_cipd))
Exemple #46
0
def create_backend_app():
    """Returns WSGI app that serves task queue and cron handlers."""
    routes = []
    routes.extend(cas.get_backend_routes())
    routes.extend(cipd.get_backend_routes())
    return webapp2.WSGIApplication(routes, debug=utils.is_local_dev_server())
Exemple #47
0
def create_frontend_app():
    """Returns WSGI app that serves HTML pages."""
    routes = [webapp2.Route(r'/', MainHandler)]
    routes.extend(cipd.get_frontend_routes())
    return webapp2.WSGIApplication(routes, debug=utils.is_local_dev_server())
Exemple #48
0
def create_endpoints_app():
    """Returns WSGI app that serves cloud endpoints requests."""
    apis = [buildbot.BuildbotApi]
    return endpoints.api_server(apis,
                                restricted=not utils.is_local_dev_server())
Exemple #49
0
def initialize_request_auth(remote_address, headers):
    """Grabs caller identity and initializes request local authentication context.

  Called before executing a cloud endpoints method. May raise AuthorizationError
  or AuthenticationError exceptions.
  """
    config.ensure_configured()

    # Endpoints library always does authentication before invoking a method. Just
    # grab the result of that authentication: it's doesn't make any RPCs.
    current_user = endpoints.get_current_user()

    # Cloud Endpoints auth on local dev server works much better compared to OAuth
    # library since endpoints is using real authentication backend, while oauth.*
    # API is mocked. It makes API Explorer work with local apps. Always use Cloud
    # Endpoints auth on the dev server. It has a side effect: client_id whitelist
    # is ignored, there's no way to get client_id on dev server via endpoints API.
    identity = None
    if utils.is_local_dev_server():
        if current_user:
            identity = model.Identity(model.IDENTITY_USER,
                                      current_user.email())
        else:
            identity = model.Anonymous
    else:
        # Use OAuth API directly to grab both client_id and email and validate them.
        # endpoints.get_current_user() itself is implemented in terms of OAuth API,
        # with some additional code to handle id_token that we currently skip (see
        # TODO at the top of this file). OAuth API calls below will just reuse
        # cached values without making any additional RPCs.
        if headers.get('Authorization'):
            # Raises error for forbidden client_id, never returns None or Anonymous.
            identity = api.extract_oauth_caller_identity(
                extra_client_ids=[endpoints.API_EXPLORER_CLIENT_ID])
            # Double check that we used same cached values as endpoints did.
            assert identity and not identity.is_anonymous, identity
            assert current_user is not None
            assert identity.name == current_user.email(), (
                identity.name, current_user.email())
        else:
            # 'Authorization' header is missing. Endpoints still could have found
            # id_token in GET request parameters. Ignore it for now, the code is
            # complicated without it already.
            if current_user is not None:
                raise api.AuthenticationError(
                    'Unsupported authentication method')
            identity = model.Anonymous

    # Thread local (and request local) auth state.
    auth_context = api.get_request_cache()

    # Extract caller host name from host token header, if present and valid.
    tok = headers.get(host_token.HTTP_HEADER)
    if tok:
        validated_host = host_token.validate_host_token(tok)
        if validated_host:
            auth_context.set_current_identity_host(validated_host)

    # Verify IP is whitelisted and authenticate requests from bots. It raises
    # AuthorizationError if IP is not allowed.
    assert identity is not None
    assert remote_address
    ip = ipaddr.ip_from_string(remote_address)
    auth_context.set_current_identity_ip(ip)
    auth_context.set_current_identity(api.verify_ip_whitelisted(identity, ip))
Exemple #50
0
def endpoints_api(name,
                  version,
                  auth_level=None,
                  allowed_client_ids=None,
                  **kwargs):
    """Same as @endpoints.api but tweaks default auth related properties.

  By default API marked with this decorator will use same authentication scheme
  as non-endpoints request handlers (i.e. fetch a whitelist of OAuth client_id's
  from the datastore, recognize service accounts, etc.), disabling client_id
  checks performed by Cloud Endpoints frontend (and doing them on the backend,
  see 'initialize_auth' below).

  Using service accounts with vanilla Cloud Endpoints auth is somewhat painful:
  every service account should be whitelisted in the 'allowed_client_ids' list
  in the source code of the application (when calling @endpoints.api). By moving
  client_id checks to the backend we can support saner logic.
  """
    # 'audiences' is used with id_token auth, it's not supported yet.
    assert 'audiences' not in kwargs, 'Not supported'

    # On prod, make sure Cloud Endpoints frontend validates OAuth tokens for us.
    # On dev instances we will validate them ourselves to support custom token
    # validation endpoint.
    if auth_level is not None:
        if utils.is_local_dev_server() or utils.is_dev():
            # AUTH_LEVEL.NONE: Frontend authentication will be skipped. If
            # authentication is desired, it will need to be performed by the backend.
            auth_level = endpoints.AUTH_LEVEL.NONE
        else:
            # AUTH_LEVEL.OPTIONAL: Authentication is optional. If authentication
            # credentials are supplied they must be valid. Backend will be called if
            # the request contains valid authentication credentials or no
            # authentication credentials.
            auth_level = endpoints.AUTH_LEVEL.OPTIONAL

    # We love API Explorer.
    if allowed_client_ids is None:
        allowed_client_ids = endpoints.SKIP_CLIENT_ID_CHECK
    if allowed_client_ids != endpoints.SKIP_CLIENT_ID_CHECK:
        allowed_client_ids = sorted(
            set(allowed_client_ids) | set([endpoints.API_EXPLORER_CLIENT_ID]))

    # Someone was looking for job security here:
    # - api() returns _ApiDecorator class instance.
    # - One of the following is done:
    #   - _ApiDecorator.__call__() is called with the remote.Service class as
    #     argument.
    #   - api_class() is explicitly called which returns a function, which is then
    #     called with the  remote.Service class as argument.
    api_decorator = endpoints.api(name,
                                  version,
                                  auth_level=auth_level,
                                  allowed_client_ids=allowed_client_ids,
                                  **kwargs)

    def fn(cls):
        if not cls.all_remote_methods():
            raise TypeError(
                'Service %s must have at least one auth.endpoints_method method'
                % name)
        for method, func in cls.all_remote_methods().items():
            if func and not api.is_decorated(
                    func.remote._RemoteMethodInfo__method):
                raise TypeError(
                    'Method \'%s\' of \'%s\' is not protected by @require or @public '
                    'decorator' % (method, name))
        return cls

    # Monkey patch api_decorator to make 'api_class' to return wrapped decorator.
    orig = api_decorator.api_class

    def patched_api_class(*args, **kwargs):
        wrapper = orig(*args, **kwargs)
        return lambda cls: fn(wrapper(cls))

    api_decorator.api_class = patched_api_class

    return api_decorator
Exemple #51
0
def create_html_app():  # pragma: no cover
    """Returns WSGI app that serves HTML pages."""
    return webapp2.WSGIApplication(handlers.get_frontend_routes(),
                                   debug=utils.is_local_dev_server())
Exemple #52
0
def request_async(url,
                  method='GET',
                  payload=None,
                  params=None,
                  headers=None,
                  scopes=None,
                  service_account_key=None,
                  delegation_token=None,
                  deadline=None,
                  max_attempts=None):
    """Sends a REST API request, returns raw unparsed response.

  Retries the request on transient errors for up to |max_attempts| times.

  Args:
    url: url to send the request to.
    method: HTTP method to use, e.g. GET, POST, PUT.
    payload: raw data to put in the request body.
    params: dict with query GET parameters (i.e. ?key=value&key=value).
    headers: additional request headers.
    scopes: OAuth2 scopes for the access token (ok skip auth if None).
    service_account_key: auth.ServiceAccountKey with credentials.
    delegation_token: delegation token returned by auth.delegate.
    deadline: deadline for a single attempt (10 sec by default).
    max_attempts: how many times to retry on errors (4 times by default).

  Returns:
    Buffer with raw response.

  Raises:
    NotFoundError on 404 response.
    AuthError on 401 or 403 response.
    Error on any other non-transient error.
  """
    deadline = 10 if deadline is None else deadline
    max_attempts = 4 if max_attempts is None else max_attempts

    if utils.is_local_dev_server():
        protocols = ('http://', 'https://')
    else:
        protocols = ('https://', )
    assert url.startswith(protocols) and '?' not in url, url
    if params:
        url += '?' + urllib.urlencode(params)

    if scopes:
        access_token = auth.get_access_token(scopes, service_account_key)[0]
        headers = (headers or {}).copy()
        headers['Authorization'] = 'Bearer %s' % access_token

    if delegation_token:
        if isinstance(delegation_token, auth.DelegationToken):
            delegation_token = delegation_token.token
        assert isinstance(delegation_token, basestring)
        headers[delegation.HTTP_HEADER] = delegation_token

    if payload is not None:
        assert isinstance(payload, str), type(payload)
        assert method in ('CREATE', 'POST', 'PUT'), method

    attempt = 0
    response = None
    last_status_code = None
    while attempt < max_attempts:
        if attempt:
            logging.info('Retrying...')
        attempt += 1
        logging.info('%s %s', method, url)
        try:
            response = yield urlfetch_async(url=url,
                                            payload=payload,
                                            method=method,
                                            headers=headers or {},
                                            follow_redirects=False,
                                            deadline=deadline,
                                            validate_certificate=True)
        except (apiproxy_errors.DeadlineExceededError, urlfetch.Error) as e:
            # Transient network error or URL fetch service RPC deadline.
            logging.warning('%s %s failed: %s', method, url, e)
            continue

        last_status_code = response.status_code

        # Transient error on the other side.
        if is_transient_error(response, url):
            logging.warning('%s %s failed with HTTP %d: %r', method, url,
                            response.status_code, response.content)
            continue

        # Non-transient error.
        if 300 <= response.status_code < 500:
            logging.warning('%s %s failed with HTTP %d: %r', method, url,
                            response.status_code, response.content)
            raise _error_class_for_status(response.status_code)(
                'Failed to call %s: HTTP %d' % (url, response.status_code),
                response.status_code, response.content)

        # Success. Beware of large responses.
        if len(response.content) > 1024 * 1024:
            logging.warning('Response size: %.1f KiB',
                            len(response.content) / 1024.0)
        raise ndb.Return(response.content)

    raise _error_class_for_status(last_status_code)(
        'Failed to call %s after %d attempts' % (url, max_attempts),
        response.status_code if response else None,
        response.content if response else None)
Exemple #53
0
def push_to_replica(replica_url, auth_db_blob, key_name, sig):
    """Pushes |auth_db_blob| to a replica via URLFetch POST.

  Args:
    replica_url: root URL of a replica (i.e. https://<host>).
    auth_db_blob: binary blob with serialized Auth DB.
    key_name: name of a RSA key used to generate a signature.
    sig: base64 encoded signature of |auth_db_blob|.

  Returns:
    Tuple:
      AuthDB revision reporter by a replica (as replication_pb2.AuthDBRevision).
      Auth component version used by replica (see components.auth.version).

  Raises:
    FatalReplicaUpdateError if replica rejected the push.
    TransientReplicaUpdateError if push should be retried.
  """
    replica_url = replica_url.rstrip('/')
    logging.info('Updating replica %s', replica_url)
    protocol = 'http://' if utils.is_local_dev_server() else 'https://'
    assert replica_url.startswith(protocol)

    # Pass signature via the headers.
    headers = {
        'Content-Type': 'application/octet-stream',
        'X-URLFetch-Service-Id': utils.get_urlfetch_service_id(),
        'X-AuthDB-SigKey-v1': key_name,
        'X-AuthDB-SigVal-v1': sig,
    }

    # On dev appserver emulate X-Appengine-Inbound-Appid header.
    if utils.is_local_dev_server():
        headers['X-Appengine-Inbound-Appid'] = app_identity.get_application_id(
        )

    # 'follow_redirects' set to False is required for 'X-Appengine-Inbound-Appid'
    # to work. 70 sec deadline correspond to 60 sec GAE foreground requests
    # deadline plus 10 seconds to account for URL fetch own lags.
    ctx = ndb.get_context()
    result = yield ctx.urlfetch(url=replica_url +
                                '/auth/api/v1/internal/replication',
                                payload=auth_db_blob,
                                method='POST',
                                headers=headers,
                                follow_redirects=False,
                                deadline=70,
                                validate_certificate=True)

    # Any transport level error is transient.
    if result.status_code != 200:
        raise TransientReplicaUpdateError(
            'Push request failed with HTTP code %d' % result.status_code)

    # Deserialize the response.
    cls = replication_pb2.ReplicationPushResponse
    response = cls.FromString(result.content)
    if not response.HasField('status'):
        raise FatalReplicaUpdateError('Incomplete response, status is missing')

    # Convert errors to exceptions.
    if response.status == cls.TRANSIENT_ERROR:
        raise TransientReplicaUpdateError('Transient error (error code %d).' %
                                          response.error_code)
    if response.status == cls.FATAL_ERROR:
        raise FatalReplicaUpdateError('Fatal error (error code %d).' %
                                      response.error_code)
    if response.status not in (cls.APPLIED, cls.SKIPPED):
        raise FatalReplicaUpdateError('Unexpected response status: %d' %
                                      response.status)

    # Replica applied the update, current_revision should be set.
    if not response.HasField('current_revision'):
        raise FatalReplicaUpdateError(
            'Incomplete response, current_revision is missing')

    # Extract auth component version used by replica if proto is recent enough.
    auth_code_version = None
    if response.HasField('auth_code_version'):
        auth_code_version = response.auth_code_version

    raise ndb.Return((response.current_revision, auth_code_version))
Exemple #54
0
def create_backend_application():
    return webapp2.WSGIApplication(handlers.get_backend_routes(),
                                   debug=utils.is_local_dev_server())
Exemple #55
0
def create_html_app():  # pragma: no cover
    """Returns WSGI app that serves HTML pages."""
    app = webapp2.WSGIApplication(handlers.get_frontend_routes(),
                                  debug=utils.is_local_dev_server())
    gae_ts_mon.initialize(app, cron_module='backend')
    return app
Exemple #56
0
def check_oauth_access_token(headers):
    """Verifies the access token of the current request.

  This function uses slightly different strategies for prod, dev and local
  environments:
    * In prod it always require real OAuth2 tokens, validated by GAE OAuth2 API.
    * On local devserver it uses URL Fetch and prod token info endpoint.
    * On '-dev' instances or on dev server it can also fallback to a custom
      token info endpoint, defined in AuthDevConfig datastore entity. This is
      useful to "stub" authentication when running integration or load tests.

  In addition to checking the correctness of OAuth token, this function also
  verifies that the client_id associated with the token is whitelisted in the
  auth config.

  The client_id check is skipped on the local devserver or when using custom
  token info endpoint (e.g. on '-dev' instances).

  Args:
    headers: a dict with request headers.

  Returns:
    Identity of the caller in case the request was successfully validated.
    Always 'user:...', never anonymous.

  Raises:
    AuthenticationError in case the access token is invalid.
    AuthorizationError in case the access token is not allowed.
  """
    header = headers.get('Authorization')
    if not header:
        raise AuthenticationError('No "Authorization" header')

    # Non-development instances always use real OAuth API.
    if not utils.is_local_dev_server() and not utils.is_dev():
        return extract_oauth_caller_identity()

    # OAuth2 library is mocked on dev server to return some nonsense. Use (slow,
    # but real) OAuth2 API endpoint instead to validate access_token. It is also
    # what Cloud Endpoints do on a local server.
    if utils.is_local_dev_server():
        auth_call = lambda: dev_oauth_authentication(header,
                                                     TOKEN_INFO_ENDPOINT)
    else:
        auth_call = extract_oauth_caller_identity

    # Do not fallback to custom endpoint if not configured. This call also has a
    # side effect of initializing AuthDevConfig entity in the datastore, to make
    # it editable in Datastore UI.
    cfg = model.get_dev_config()
    if not cfg.token_info_endpoint:
        return auth_call()

    # Try the real call first, then fallback to the custom validation endpoint.
    try:
        return auth_call()
    except AuthenticationError:
        ident = dev_oauth_authentication(header, cfg.token_info_endpoint,
                                         '.dev')
        logging.warning('Authenticated as dev account: %s', ident.to_bytes())
        return ident
Exemple #57
0
# Copyright 2014 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.

import webapp2

from components import auth
from components import utils


class WarmupHandler(webapp2.RequestHandler):
    def get(self):
        auth.warmup()
        self.response.headers['Content-Type'] = 'text/plain; charset=utf-8'
        self.response.write('ok')


assert utils.is_local_dev_server()
auth.disable_process_cache()

# Add a fake admin for local dev server.
if not auth.is_replica():
    auth.bootstrap_group(
        auth.ADMIN_GROUP,
        [auth.Identity(auth.IDENTITY_USER, '*****@*****.**')],
        'Users that can manage groups')

# /_ah/warmup is used by the smoke test to detect that app is alive.
app = webapp2.WSGIApplication([webapp2.Route(r'/_ah/warmup', WarmupHandler)],
                              debug=True)
Exemple #58
0
def _get_client(address):
    """Get a prpc client instance for given address."""
    return client.Client(address,
                         plugin_prpc_pb2.ExternalSchedulerServiceDescription,
                         insecure=utils.is_local_dev_server())
Exemple #59
0
# Copyright 2013 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.

"""Configures includes (components.auth).

https://developers.google.com/appengine/docs/python/tools/appengineconfig
"""

from components import utils

# OpenID requires real accounts, it's not convenient on dev server, especially
# for smoke tests.
components_auth_USE_OPENID = not utils.is_local_dev_server()

# Auth component UI is tweaked manually, see handlers_frontend.py.
components_auth_UI_CUSTOM_CONFIG = True

# Use backend module and dedicated task queue for change log generation.
components_auth_BACKEND_MODULE = 'backend'
components_auth_PROCESS_CHANGE_TASK_QUEUE = 'process-auth-db-change'
Exemple #60
0
from components import auth
from components import datastore_utils
from components import utils
from server import task_pack


# Maximum acceptable priority value, which is effectively the lowest priority.
MAXIMUM_PRIORITY = 255


# One day in seconds. Add 10s to account for small jitter.
_ONE_DAY_SECS = 24*60*60 + 10


# Minimum value for timeouts.
_MIN_TIMEOUT_SECS = 1 if utils.is_local_dev_server() else 30


# The world started on 2010-01-01 at 00:00:00 UTC. The rationale is that using
# EPOCH (1970) means that 40 years worth of keys are wasted.
#
# Note: This creates a 'naive' object instead of a formal UTC object. Note that
# datetime.datetime.utcnow() also return naive objects. That's python.
_BEGINING_OF_THE_WORLD = datetime.datetime(2010, 1, 1, 0, 0, 0, 0)


# Parameters for make_request().
# The content of the 'data' parameter. This relates to the context of the
# request, e.g. who wants to run a task.
_REQUIRED_DATA_KEYS = frozenset(
    ['name', 'priority', 'properties', 'scheduling_expiration_secs', 'tags',