Exemple #1
0
    def test_custom_service_account(self):
        scope = "http://www.googleapis.com/scope"
        account_id = "*****@*****.**"
        m = mox.Mox()
        m.StubOutWithMock(app_identity, "get_access_token")
        app_identity.get_access_token([scope], service_account_id=account_id).AndReturn(("a_token_456", None))
        m.ReplayAll()

        credentials = AppAssertionCredentials(scope, service_account_id=account_id)
        http = httplib2.Http()
        credentials.refresh(http)
        m.VerifyAll()
        m.UnsetStubs()
        self.assertEqual("a_token_456", credentials.access_token)
        self.assertEqual(scope, credentials.scope)
    def refresh(self, request):
        # pylint: disable=unused-argument
        token, ttl = app_identity.get_access_token(
            self._scopes, self._service_account_id)
        expiry = _helpers.utcnow() + datetime.timedelta(seconds=ttl)

        self.token, self.expiry = token, expiry
def update_cq_list(_request):
  """/restricted/update_cq_list - Updates list of known CQs."""
  scope = 'https://www.googleapis.com/auth/userinfo.email'
  url = 'https://commit-queue.appspot.com/api/list'
  auth_token, _ = app_identity.get_access_token(scope)
  response = urlfetch.fetch(
      url, headers={'Authorization': 'Bearer ' + auth_token}, deadline=60)
  if response.status_code != 200:
    msg = 'Received non-200 status code when loading %s' % url
    return HttpResponseServerError(msg, content_type='text/plain')

  try:
    cq_names = json.loads(response.content)
  except ValueError as e:
    msg = 'Failed to parse CQ list from %s: %s' % (url, e)
    return HttpResponseServerError(msg, content_type='text/plain')

  @ndb.transactional
  def update():
    key = ndb.Key(models.CQList, 'singleton')
    cq_list = key.get() or models.CQList(key=key)
    cq_list.names = cq_names
    cq_list.put()

  try:
    update()
  except datastore_errors.TransactionFailedError as e:
    msg = 'Failed update CQ list in transaction: %s' % e
    return HttpResponseServerError(msg, content_type='text/plain')

  return HttpResponse('success', content_type='text/plain')
def get(url):
    context = ndb.get_context()

    headers = {
        'accept-encoding': 'gzip, *',
        'x-goog-api-version': '2',
    }

    url_result = urlparse.urlparse(url)
    if url_result.netloc.endswith('.googleapis.com'):
        auth_token, _ = app_identity.get_access_token(
            'https://www.googleapis.com/auth/cloud-platform')
        if auth_token:
            headers['Authorization'] = 'OAuth %s' % auth_token

    for retry in xrange(6):
        result = yield context.urlfetch(url, headers=headers)
        status = result.status_code
        if status == 429 or 500 <= status < 600:
            yield ndb.sleep(2**retry)
            continue
        if status in (200, 206):
            content = result.content
            if result.headers.get('content-encoding') == 'gzip':
                dec = zlib.decompressobj(15 | 16)
                content = dec.decompress(result.content, MAX_SIZE)
                if dec.unconsumed_tail:
                    logging.warning(
                        'only decompressed %d KB, %d KB remain in buffer.',
                        len(content) / 1024,
                        len(dec.unconsumed_tail) / 1024)
            raise ndb.Return(content)
        logging.error("unable to fetch '%s': status code %d", url, status)
        raise ndb.Return(None)
    def refresh(self, request):
        # pylint: disable=unused-argument
        token, ttl = app_identity.get_access_token(
            self._scopes, self._service_account_id)
        expiry = datetime.datetime.utcfromtimestamp(ttl)

        self.token, self.expiry = token, expiry
def get_access_token(scopes, service_account_key=None):
  """Returns an OAuth2 access token for a service account.

  If 'service_account_key' is specified, will use it to generate access token
  for corresponding @developer.gserviceaccount.com account. Otherwise will
  invoke app_identity.get_access_token(...) to use app's
  @appspot.gserviceaccount.com account.

  Args:
    scopes: the requested API scope string, or a list of strings.
    service_account_key: optional instance of ServiceAccountKey.

  Returns:
    Tuple (access token, expiration time in seconds since the epoch). The token
    should be valid for at least 5 minutes. It will be cached across multiple
    calls using memcache (e.g. get_access_token call can be considered cheap).

  Raises:
    AccessTokenError on errors.
  """
  if service_account_key:
    # Empty private_key_id probably means that the app is not configured yet.
    if not service_account_key.private_key_id:
      raise AccessTokenError('Service account secret key is not initialized')
    return _get_jwt_based_token(scopes, service_account_key)
  return app_identity.get_access_token(scopes)
Exemple #7
0
def get(url):
    context = ndb.get_context()

    headers = {
        'accept-encoding': 'gzip, *',
        'x-goog-api-version': '2',
        }

    url_result = urlparse.urlparse(url)
    if url_result.netloc.endswith('.googleapis.com'):
        auth_token, _ = app_identity.get_access_token(
            'https://www.googleapis.com/auth/cloud-platform')
        if auth_token:
            headers['Authorization'] = 'OAuth %s' % auth_token

    for retry in xrange(6):
        result = yield context.urlfetch(url, headers=headers)
        status = result.status_code
        if status == 429 or 500 <= status < 600:
            yield ndb.sleep(2 ** retry)
            continue
        if status in (200, 206):
            content = result.content
            if result.headers.get('content-encoding') == 'gzip':
                dec = zlib.decompressobj(15 | 16)
                content = dec.decompress(result.content, MAX_SIZE)
                if dec.unconsumed_tail:
                    logging.warning('only decompressed %d KB, %d KB remain in buffer.',
                                    len(content) / 1024,
                                    len(dec.unconsumed_tail) / 1024)
            raise ndb.Return(content)
        logging.error("unable to fetch '%s': status code %d", url, status)
        raise ndb.Return(None)
Exemple #8
0
    def _fetch(self,
               path,
               method='GET',
               body=None,
               expect_status=(httplib.OK, httplib.NOT_FOUND)):
        """Makes a single authenticated blocking request using urlfetch.

    Raises
      AuthenticationError if authentication fails.
      Error if response status is not in expect_status tuple.

    Returns parsed json contents.
    """
        if not hasattr(expect_status, '__contains__'):  # pragma: no cover
            expect_status = (expect_status, )

        auth_token, _ = app_identity.get_access_token(AUTH_SCOPE)
        payload = json.dumps(body) if body else None

        assert not path.startswith('/')
        url = urlparse.urljoin('https://' + self.hostname, 'a/' + path)
        request_headers = {
            'Accept': 'application/json',
            'Content-Type': 'application/json',
            'Authorization': 'OAuth %s' % auth_token,
        }

        try:
            logging.debug('%s %s' % (method, url))
            response = urlfetch.fetch(url,
                                      payload=payload,
                                      method=method,
                                      headers=request_headers,
                                      follow_redirects=False,
                                      validate_certificate=True)
        except urlfetch.Error as err:  # pragma: no cover
            raise Error(None, err.message)

        # Check if this is an authentication issue.
        auth_failed = response.status_code in (httplib.UNAUTHORIZED,
                                               httplib.FORBIDDEN)
        if auth_failed:
            reason = 'Authentication failed for %s' % self.hostname
            logging.error(reason)
            raise AuthenticationError(response.status_code, reason)

        if response.status_code not in expect_status:  # pragma: no cover
            raise Error(response.status_code, response.content)

        if response.status_code == httplib.NOT_FOUND:
            return None
        content = response.content
        logging.info('Response: %s' % content)
        if not content.startswith(RESPONSE_PREFIX):
            msg = (
                'Unexpected response format. Expected prefix %s. Received: %s'
                % (RESPONSE_PREFIX, content))
            raise Error(response.status_code, msg)
        content = content[len(RESPONSE_PREFIX):]
        return json.loads(content)
Exemple #9
0
def get_access_token(scopes, service_account_key=None):
    """Returns an OAuth2 access token for a service account.

  If 'service_account_key' is specified, will use it to generate access token
  for corresponding @developer.gserviceaccount.com account. Otherwise will
  invoke app_identity.get_access_token(...) to use app's
  @appspot.gserviceaccount.com account.

  Args:
    scopes: the requested API scope string, or a list of strings.
    service_account_key: optional instance of ServiceAccountKey.

  Returns:
    Tuple (access token, expiration time in seconds since the epoch). The token
    should be valid for at least 5 minutes. It will be cached across multiple
    calls using memcache (e.g. get_access_token call can be considered cheap).

  Raises:
    AccessTokenError on errors.
  """
    if service_account_key:
        # Empty private_key_id probably means that the app is not configured yet.
        if not service_account_key.private_key_id:
            raise AccessTokenError(
                'Service account secret key is not initialized')
        return _get_jwt_based_token(scopes, service_account_key)
    return app_identity.get_access_token(scopes)
Exemple #10
0
  def post(self):
    payload = {
    }

    authorization_token, _ = app_identity.get_access_token(COMPUTE_SCOPE)
    headers = {
      'Content-Type': 'application/json',
      'Authorization': 'OAuth ' + authorization_token,
    }

    url = INSTANCES_URL + '/' + INSTANCE
    result = urlfetch.fetch(url,
      payload=json.dumps(payload),
      method='DELETE',
      headers=headers,
      follow_redirects=False,
      deadline=60,
      validate_certificate=True)

    pretty_payload=json.dumps(payload, indent=2)
    pretty_headers=json.dumps(headers, indent=2)

    self.response.headers['Content-Type'] = 'text/plain'
    if result.status_code == 200:
      self.response.write('OK. Instance DELETED.\n\n')
    else:
      logging.error('RESPONSE ERROR CODE: {}'.format(result.status_code))
      logging.error('RESPONSE BODY:\n{}'.format(result.content))
      self.response.status_int = result.status_code
      self.response.write('RESPONSE ERROR CODE:\n{}\n\n'.format(result.status_code))
      self.response.write('RESPONSE BODY:\n{}\n\n'.format(result.content))
    self.response.write('-' * 80 + '\n\n')
    self.response.write('REQUEST URL:\n{}\n\n'.format(url))
    self.response.write('REQUEST HEADERS:\n{}\n\n'.format(pretty_headers))
    self.response.write('REQUEST PAYLOAD:\n{}\n\n\n'.format(pretty_payload))
    def refresh(self, request):
        scopes = self._scopes if self._scopes is not None else self._default_scopes
        # pylint: disable=unused-argument
        token, ttl = app_identity.get_access_token(scopes,
                                                   self._service_account_id)
        expiry = datetime.datetime.utcfromtimestamp(ttl)

        self.token, self.expiry = token, expiry
Exemple #12
0
 def get_access_token(self):
   '''Get OAuth2 access token to sign requests.'''
   scopes = [
     'https://www.googleapis.com/auth/devstorage.read_write',
     'https://www.googleapis.com/auth/devstorage.full_control',
     'https://www.googleapis.com/auth/devstorage.read_only',
   ]
   return app_identity.get_access_token(scopes)
def is_accessible_bucket_name(bucket_name):
  """Returns True if the application has access to the specified bucket."""
  scope = 'https://www.googleapis.com/auth/devstorage.read_write'
  url = 'https://%s.commondatastorage.googleapis.com/' % bucket_name
  auth_token, _ = app_identity.get_access_token(scope)
  result = urlfetch.fetch(url, method=urlfetch.HEAD, headers={
      'Authorization': 'OAuth %s' % auth_token,
      'x-goog-api-version': '2'})
  return result and result.status_code == 200
Exemple #14
0
def is_accessible_bucket_name(bucket_name):
  """Returns True if the application has access to the specified bucket."""
  scope = 'https://www.googleapis.com/auth/devstorage.read_write'
  url = 'https://%s.commondatastorage.googleapis.com/' % bucket_name
  auth_token, _ = app_identity.get_access_token(scope)
  result = urlfetch.fetch(url, method=urlfetch.HEAD, headers={
      'Authorization': 'OAuth %s' % auth_token,
      'x-goog-api-version': '2'})
  return result and result.status_code == 200
Exemple #15
0
    def test_custom_service_account(self):
        scope = "http://www.googleapis.com/scope"
        account_id = "*****@*****.**"
        m = mox.Mox()
        m.StubOutWithMock(app_identity, 'get_access_token')
        app_identity.get_access_token([scope],
                                      service_account_id=account_id).AndReturn(
                                          ('a_token_456', None))
        m.ReplayAll()

        credentials = AppAssertionCredentials(scope,
                                              service_account_id=account_id)
        http = httplib2.Http()
        credentials.refresh(http)
        m.VerifyAll()
        m.UnsetStubs()
        self.assertEqual('a_token_456', credentials.access_token)
        self.assertEqual(scope, credentials.scope)
def cloud_datastore_export():
    logging.info("starting export")

    access_token, _ = app_identity.get_access_token('https://www.googleapis.com/auth/datastore')

    app_id = app_identity.get_application_id()
    timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')

    output_url_prefix = request.args.get('output_url_prefix')
    assert output_url_prefix and output_url_prefix.startswith('gs://')

    if output_url_prefix[-1] != '/':
        output_url_prefix += '/' + timestamp
    else:
        output_url_prefix += timestamp

    entity_filter = {
        'kinds': request.args['kind'].split(','),
        'namespace_ids': request.args.get('namespace_id')
    }

    request_data = {
        'project_id': app_id,
        'output_url_prefix': output_url_prefix,
        'entity_filter': entity_filter
    }

    headers = {
        'Content-Type': 'application/json',
        'Authorization': 'Bearer ' + access_token
    }

    url = 'https://datastore.googleapis.com/v1/projects/%s:export' % app_id

    logging.info("making api call (entity_filter: {}, request_data: {})".format(entity_filter, request_data))

    result = urlfetch.fetch(
        url=url,
        payload=json.dumps(request_data),
        method=urlfetch.POST,
        deadline=60,
        headers=headers
    )

    logging.info("got response with status '{}' and contents '{}'".format(result.status_code, result.content))

    job_state = json.loads(result.content).get('metadata', {}).get('common', {}).get('state')

    if result.status_code == 200 and job_state == "PROCESSING":
        logging.info("export started successfully")
        return '', 204

    logging.info("export failed to start")

    return '', 400
Exemple #17
0
def get_gs_object(bucket_name, path):
  """Returns a listing of of a bucket that matches the given prefix."""
  scope = 'https://www.googleapis.com/auth/devstorage.read_only'
  url = 'https://%s.commondatastorage.googleapis.com/%s' % (bucket_name, path)
  auth_token, _ = app_identity.get_access_token(scope)
  result = urlfetch.fetch(url, method=urlfetch.GET, headers={
      'Authorization': 'OAuth %s' % auth_token,
      'x-goog-api-version': '2'})
  if result and result.status_code == 200:
    return result.content
  raise BackupValidationException('Requested path was not found')
Exemple #18
0
def fetch_json(url):
  logging.info('Fetching %s' % url)
  authorization_token, _ = app_identity.get_access_token(
      'https://www.googleapis.com/auth/userinfo.email')
  response = urlfetch.fetch(
      url, follow_redirects=False, validate_certificate=True,
      headers={'Authorization': 'Bearer ' + authorization_token},
      deadline=30)
  logging.debug(response.content)
  # TODO(phajdan.jr): Handle responses other than HTTP 200.
  return json.loads(response.content)
def get_gs_object(bucket_name, path):
  """Returns a listing of of a bucket that matches the given prefix."""
  scope = 'https://www.googleapis.com/auth/devstorage.read_only'
  url = 'https://%s.commondatastorage.googleapis.com/%s' % (bucket_name, path)
  auth_token, _ = app_identity.get_access_token(scope)
  result = urlfetch.fetch(url, method=urlfetch.GET, headers={
      'Authorization': 'OAuth %s' % auth_token,
      'x-goog-api-version': '2'})
  if result and result.status_code == 200:
    return result.content
  raise BackupValidationException('Requested path was not found')
Exemple #20
0
    def get(self):  # pylint: disable=g-bad-name

        # Only run backups in prod.
        if not env_utils.RunningInProd():
            logging.info('Datastore backups are only run in prod')
            return

        logging.info('Starting a new Datastore backup')

        access_token, _ = app_identity.get_access_token(
            'https://www.googleapis.com/auth/datastore')
        app_id = app_identity.get_application_id()

        # Configure a backup of all Datastore kinds, stored in a separate Cloud
        # Storage bucket for each day.
        output_url_prefix = 'gs://%s/%s/' % (
            env_utils.ENV.DATASTORE_BACKUP_BUCKET,
            datetime.datetime.utcnow().strftime('%Y_%m_%d'))
        kinds = [k for k in metadata.get_kinds() if not k.startswith('_')]
        request = {
            'project_id': app_id,
            'output_url_prefix': output_url_prefix,
            'entity_filter': {
                'kinds': kinds
            }
        }
        headers = {
            'Content-Type': 'application/json',
            'Authorization': 'Bearer ' + access_token
        }
        url = 'https://datastore.googleapis.com/v1/projects/%s:export' % app_id

        logging.info('Backing up %d kind(s) to %s', len(kinds),
                     output_url_prefix)

        try:
            result = urlfetch.fetch(url=url,
                                    payload=json.dumps(request),
                                    method=urlfetch.POST,
                                    deadline=60,
                                    headers=headers)

            if result.status_code == httplib.OK:
                logging.info(result.content)
                _DATASTORE_BACKUPS.Increment()
            else:
                logging.warning(result.content)

            self.response.status_int = result.status_code

        except urlfetch.Error:
            logging.exception('Datastore backup failed')
            self.response.status_int = httplib.INTERNAL_SERVER_ERROR
Exemple #21
0
  def _refresh(self, _):
    """Refresh self.access_token.

    Args:
      _: (ignored) A function matching httplib2.Http.request's signature.
    """
    from google.appengine.api import app_identity
    try:
      token, _ = app_identity.get_access_token(self._scopes)
    except app_identity.Error as e:
      raise exceptions.CredentialsError(str(e))
    self.access_token = token
Exemple #22
0
  def _fetch(self, path, method='GET', body=None,
             expect_status=(httplib.OK, httplib.NOT_FOUND)):
    """Makes a single authenticated blocking request using urlfetch.

    Raises
      AuthenticationError if authentication fails.
      Error if response status is not in expect_status tuple.

    Returns parsed json contents.
    """
    if not hasattr(expect_status, '__contains__'):  # pragma: no cover
      expect_status = (expect_status,)

    auth_token, _ = app_identity.get_access_token(AUTH_SCOPE)
    payload = json.dumps(body) if body else None

    assert not path.startswith('/')
    url = urlparse.urljoin('https://' + self.hostname, 'a/' + path)
    request_headers = {
        'Accept': 'application/json',
        'Content-Type': 'application/json',
        'Authorization': 'OAuth %s' % auth_token,
    }

    try:
      logging.debug('%s %s' % (method, url))
      response = urlfetch.fetch(url, payload=payload, method=method,
                                headers=request_headers, follow_redirects=False,
                                validate_certificate=True)
    except urlfetch.Error as err:  # pragma: no cover
      raise Error(None, err.message)

    # Check if this is an authentication issue.
    auth_failed = response.status_code in (httplib.UNAUTHORIZED,
                                           httplib.FORBIDDEN)
    if auth_failed:
      reason = 'Authentication failed for %s' % self.hostname
      logging.error(reason)
      raise AuthenticationError(response.status_code, reason)

    if response.status_code not in expect_status:  # pragma: no cover
      raise Error(response.status_code, response.content)

    if response.status_code == httplib.NOT_FOUND:
      return None
    content = response.content
    logging.info('Response: %s' % content)
    if not content.startswith(RESPONSE_PREFIX):
      msg = ('Unexpected response format. Expected prefix %s. Received: %s' %
             (RESPONSE_PREFIX, content))
      raise Error(response.status_code, msg)
    content = content[len(RESPONSE_PREFIX):]
    return json.loads(content)
    def _refresh(self, _):
        """Refresh self.access_token.

    Args:
      _: (ignored) A function matching httplib2.Http.request's signature.
    """
        from google.appengine.api import app_identity  # pylint: disable=g-import-not-at-top
        try:
            token, _ = app_identity.get_access_token(self._scopes)
        except app_identity.Error as e:
            raise exceptions.CredentialsError(str(e))
        self.access_token = token
Exemple #24
0
    def get(self):
        GCS_BUCKET_URL_PREFIX = 'gs://'

        access_token, _ = app_identity.get_access_token(
            'https://www.googleapis.com/auth/datastore')
        app_id = app_identity.get_application_id()
        timestamp = datetime.datetime.utcnow().strftime('%Y%m%d-%H%M%S')

        output_url_prefix = self.request.get('output_url_prefix')
        assert output_url_prefix and output_url_prefix.startswith(
            GCS_BUCKET_URL_PREFIX)

        # Look for slash in the portion of the bucket URL that comes
        # after 'gs://'. If not present, then only a bucket name has been
        # provided and we append a trailing slash.
        if '/' not in output_url_prefix[len(GCS_BUCKET_URL_PREFIX):]:
            # Only a bucket name has been provided - no prefix or trailing
            # slash.
            output_url_prefix += '/' + timestamp
        else:
            output_url_prefix += timestamp

        entity_filter = {
            'kinds': self.request.get_all('kind'),
            'namespace_ids': self.request.get_all('namespace_id')
        }
        request = {
            'project_id': app_id,
            'output_url_prefix': output_url_prefix,
            'entity_filter': entity_filter
        }
        headers = {
            'Content-Type': 'application/json',
            'Authorization': 'Bearer %s' % access_token
        }
        url = 'https://datastore.googleapis.com/v1/projects/%s:export' % app_id
        try:
            result = urlfetch.fetch(url=url,
                                    payload=json.dumps(request),
                                    method=urlfetch.POST,
                                    deadline=60,
                                    headers=headers)
            if result.status_code == httplib.OK:
                logging.info(result.content)
            elif result.status_code >= 500:
                logging.error(result.content)
            else:
                logging.warning(result.content)
            self.response.status_int = result.status_code
        except urlfetch.Error:
            logging.exception('Failed to initiate export.')
            self.response.status_int = httplib.INTERNAL_SERVER_ERROR
Exemple #25
0
 def get(self):
     docname = self.request.get('docname')
     scope = "https://script.google.com/macros/s/AKfycbxAIMNcXOcTnlz3MS-AAeqJ_iGpPVDogFd9l80u8qEC/dev"
     authorization_token, _ = app_identity.get_access_token(scope)
     result = urlfetch.fetch(
         url="https://script.google.com/macros/s/AKfycbwKLckJael18kaWSQFazXExzsSTSwrlRglbOAKWuuMQ4-nae_ra/exec",
         payload=urllib.urlencode({"docname":docname}),
         method=urlfetch.POST,
         follow_redirects=False,
         headers={'Content-Type': 'application/x-www-form-urlencoded'})
     if result.status_code == 200:
         logging.info(result.content)
         self.response.out.write(result.content)
     else:
         self.response.out.write(result.status_code)
Exemple #26
0
    def get(self, instance, db, bucket):
        date = datetime.date.today().strftime(config.iso_date_format)

        access_token, _ = app_identity.get_access_token([
            'https://www.googleapis.com/auth/sqlservice.admin',
            'https://www.googleapis.com/auth/cloud-platform',
        ])

        payload = {
            'exportContext': {
                'fileType':
                'SQL',
                'uri':
                'gs://{bucket}/{db}_{date}.sql'.format(bucket=bucket,
                                                       db=db,
                                                       date=date),
                'databases': [db],
            },
        }
        headers = {
            'Authorization': 'Bearer ' + access_token,
            'Content-Type': 'application/json',
        }
        url = ('https://www.googleapis.com/sql/v1beta4/projects/{app_id}/'
               'instances/{instance}/export').format(
                   app_id=app_identity.get_application_id(),
                   instance=instance,
               )

        try:
            result = urlfetch.fetch(
                url=url,
                payload=json.dumps(payload),
                method=urlfetch.POST,
                deadline=60,
                headers=headers,
            )
            if result.status_code == httplib.OK:
                logging.info(result.content)
            elif result.status_code >= 500:
                logging.error(result.content)
            else:
                logging.warning(result.content)
            self.response.status_int = result.status_code
        except urlfetch.Error:
            raise Exception("Failed to initiate SQL export.")

        self.response.write("SQL export initiated.")
  def get(self):
    logging.info('start export datastore entities')
    access_token, _ = app_identity.get_access_token(
        'https://www.googleapis.com/auth/datastore')
    app_id = app_identity.get_application_id()
    logging.info('app_id: %s' % app_id)
    timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%S')

    output_url_prefix = self.request.get('output_url_prefix')
    assert output_url_prefix and output_url_prefix.startswith('gs://')
    if '/' not in output_url_prefix[5:]:
      # Only a bucket name has been provided - no prefix or trailing slash
      output_url_prefix += '/' + timestamp
    else:
      output_url_prefix += timestamp

    entity_filter = {
        'kinds': self.request.get_all('kind'),
        'namespace_ids': self.request.get_all('namespace_id')
    }
    request = {
        'project_id': app_id,
        'output_url_prefix': output_url_prefix,
        'entity_filter': entity_filter
    }
    headers = {
        'Content-Type': 'application/json',
        'Authorization': 'Bearer ' + access_token
    }
    url = 'https://datastore.googleapis.com/v1beta1/projects/%s:export' % app_id
    try:
      result = urlfetch.fetch(
          url=url,
          payload=json.dumps(request),
          method=urlfetch.POST,
          deadline=60,
          headers=headers)
      logging.info('export project request status: %s' % result.status_code)
      if result.status_code == httplib.OK:
        logging.info(result.content)
      elif result.status_code >= 500:
        logging.error(result.content)
      else:
        logging.warning(result.content)
      self.response.status_int = result.status_code
    except urlfetch.Error:
      logging.exception('Failed to initiate export.')
      self.response.status_int = httplib.INTERNAL_SERVER_ERROR
Exemple #28
0
    def remote_content(self):

        from google.appengine.api import app_identity, urlfetch

        prod_url = 'http://storage.googleapis.com' + self.full_path
        LogUtil().info('GCSFile fetch: ' + prod_url)
        scope = 'https://www.googleapis.com/auth/devstorage.full_control'
        token, _ = app_identity.get_access_token(scope)
        response = urlfetch.fetch(
            prod_url,
            deadline=15,
            headers={'Authorization': 'OAuth %s' % token}
        )
        content = response.content

        return content
Exemple #29
0
def get_service_account_names():
  """ AppScale: Fetch list of service accounts from IAM API. """
  project_id = app_identity.get_application_id()
  iam_location = 'https://127.0.0.1:17441'
  url = iam_location + '/v1/projects/{}/serviceAccounts'.format(project_id)
  token = app_identity.get_access_token(
      ['https://www.googleapis.com/auth/cloud-platform'])[0]
  headers = {'Authorization': 'Bearer {}'.format(token)}
  response = urlfetch.fetch(url, headers=headers, validate_certificate=False)
  try:
    accounts = json.loads(response.content)['accounts']
  except (KeyError, ValueError):
    raise ValueError('Invalid list of service accounts: '
                     '{}'.format(response.content))

  return tuple(account['email'] for account in accounts)
def create_short_url(long_url):
    scope = "https://www.googleapis.com/auth/urlshortener"
    authorization_token, _ = app_identity.get_access_token(scope)
    logging.info("Using token %s to represent identity %s",
                 authorization_token, app_identity.get_service_account_name())
    payload = json.dumps({"longUrl": long_url})
    response = urlfetch.fetch(
            "https://www.googleapis.com/urlshortener/v1/url?pp=1",
            method=urlfetch.POST,
            payload=payload,
            headers = {"Content-Type": "application/json",
                       "Authorization": "OAuth " + authorization_token})
    if response.status_code == 200:
        result = json.loads(response.content)
        return result["id"]
    raise Exception("Call failed. Status code %s. Body %s",
                    response.status_code, response.content)
Exemple #31
0
    def get(self):
        access_token, _ = app_identity.get_access_token(
            "https://www.googleapis.com/auth/datastore")
        app_id = app_identity.get_application_id()
        timestamp = datetime.datetime.now().strftime("%Y-%m-%d")

        output_url_prefix = self.request.get("output_url_prefix")
        assert output_url_prefix and output_url_prefix.startswith("gs://")
        if "/" not in output_url_prefix[5:]:
            # Only a bucket name has been provided - no prefix or trailing slash
            output_url_prefix += "/" + timestamp
        else:
            output_url_prefix += timestamp

        entity_filter = {
            "kinds": self.request.get_all("kind"),
            "namespace_ids": self.request.get_all("namespace_id"),
        }
        request = {
            "project_id": app_id,
            "output_url_prefix": output_url_prefix,
            "entity_filter": entity_filter,
        }
        headers = {
            "Content-Type": "application/json",
            "Authorization": "Bearer " + access_token,
        }
        url = "https://datastore.googleapis.com/v1/projects/%s:export" % app_id
        try:
            result = urlfetch.fetch(
                url=url,
                payload=json.dumps(request),
                method=urlfetch.POST,
                deadline=60,
                headers=headers,
            )
            if result.status_code == httplib.OK:
                logging.info(result.content)
            elif result.status_code >= 500:
                logging.error(result.content)
            else:
                logging.warning(result.content)
            self.response.status_int = result.status_code
        except urlfetch.Error:
            logging.exception("Failed to initiate export.")
            self.response.status_int = httplib.INTERNAL_SERVER_ERROR
def list_bucket_files(
    bucket_name, prefix, max_keys=1000):
  """Returns a listing of of a bucket that matches the given prefix."""
  scope = 'https://www.googleapis.com/auth/devstorage.read_only'
  url = 'https://%s.commondatastorage.googleapis.com/?' % bucket_name
  query = [('max-keys', max_keys)]
  if prefix:
    query.append(('prefix', prefix))
  url += urllib.urlencode(query)
  auth_token, _ = app_identity.get_access_token(scope)
  result = urlfetch.fetch(url, method=urlfetch.GET, headers={
      'Authorization': 'OAuth %s' % auth_token,
      'x-goog-api-version': '2'})
  if result and result.status_code == 200:
    doc = xml.dom.minidom.parseString(result.content)
    return [node.childNodes[0].data for node in doc.getElementsByTagName('Key')]
  raise BackupValidationException('Request to Google Cloud Storage failed')
Exemple #33
0
def list_bucket_files(
    bucket_name, prefix, max_keys=1000):
  """Returns a listing of of a bucket that matches the given prefix."""
  scope = 'https://www.googleapis.com/auth/devstorage.read_only'
  url = 'https://%s.commondatastorage.googleapis.com/?' % bucket_name
  query = [('max-keys', max_keys)]
  if prefix:
    query.append(('prefix', prefix))
  url += urllib.urlencode(query)
  auth_token, _ = app_identity.get_access_token(scope)
  result = urlfetch.fetch(url, method=urlfetch.GET, headers={
      'Authorization': 'OAuth %s' % auth_token,
      'x-goog-api-version': '2'})
  if result and result.status_code == 200:
    doc = xml.dom.minidom.parseString(result.content)
    return [node.childNodes[0].data for node in doc.getElementsByTagName('Key')]
  raise BackupValidationException('Request to Google Cloud Storage failed')
Exemple #34
0
    def do(self):
        kinds = self.request.get_all('kind'),
        if not kinds:
            raise Exception("Backup handler requires kinds.")

        bucket = self.request.get('bucket', None)
        if not bucket:
            raise Exception("Backup handler requires bucket.")

        access_token, _ = app_identity.get_access_token(
            'https://www.googleapis.com/auth/datastore')
        app_id = app_identity.get_application_id()

        entity_filter = {
            'kinds': kinds,
            'namespace_ids': self.request.get_all('namespace_id')
        }
        request = {
            'project_id': app_id,
            'output_url_prefix': 'gs://{}'.format(bucket),
            'entity_filter': entity_filter
        }
        headers = {
            'Content-Type': 'application/json',
            'Authorization': 'Bearer ' + access_token
        }
        url = 'https://datastore.googleapis.com/v1/projects/{}:export'.format(
            app_id)

        try:
            result = urlfetch.fetch(url=url,
                                    payload=json.dumps(request),
                                    method=urlfetch.POST,
                                    deadline=60,
                                    headers=headers)
            if result.status_code == httplib.OK:
                logging.info(result.content)
            elif result.status_code >= 500:
                logging.error(result.content)
            else:
                logging.warning(result.content)
            self.response.status_int = result.status_code
        except urlfetch.Error:
            raise Exception('Failed to initiate export.')

        self.response.write("Export initiated.")
Exemple #35
0
def get_access_token():  # pragma: no cover
  """Returns OAuth token to use when talking to Gitiles servers."""
  # On real GAE use app service account.
  if not utils.is_local_dev_server():
    return app_identity.get_access_token(
        ['https://www.googleapis.com/auth/gerritcodereview'])[0]
  # On dev server allow custom tokens loaded from local_dev_config. Use 'imp'
  # because dev_appserver tries to emulate app sandbox and hacks 'import' to
  # respect 'skip_files:' section in app.yaml.
  try:
    import imp
    local_dev_config = imp.load_source(
        'local_dev_config', 'local_dev_config.py')
    # Copy your chrome-internal .netrc token there.
    return local_dev_config.GITILES_OAUTH_TOKEN
  except (ImportError, IOError):
    return 'fake_token'
  def get(self):
    access_token, _ = app_identity.get_access_token(
        'https://www.googleapis.com/auth/datastore')
    app_id = app_identity.get_application_id()
    timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')

    output_url_prefix = self.request.get('output_url_prefix')
    assert output_url_prefix and output_url_prefix.startswith('gs://')
    if '/' not in output_url_prefix[5:]:
      # Only a bucket name has been provided - no prefix or trailing slash
      output_url_prefix += '/' + timestamp
    else:
      output_url_prefix += timestamp

    entity_filter = {
        'kinds': self.request.get_all('kind'),
        'namespace_ids': self.request.get_all('namespace_id')
    }
    request = {
        'project_id': app_id,
        'output_url_prefix': output_url_prefix,
        'entity_filter': entity_filter
    }
    headers = {
        'Content-Type': 'application/json',
        'Authorization': 'Bearer ' + access_token
    }
    url = 'https://datastore.googleapis.com/v1/projects/%s:export' % app_id
    try:
      result = urlfetch.fetch(
          url=url,
          payload=json.dumps(request),
          method=urlfetch.POST,
          deadline=60,
          headers=headers)
      if result.status_code == httplib.OK:
        logging.info(result.content)
      elif result.status_code >= 500:
        logging.error(result.content)
      else:
        logging.warning(result.content)
      self.response.status_int = result.status_code
    except urlfetch.Error:
      logging.exception('Failed to initiate export.')
      self.response.status_int = httplib.INTERNAL_SERVER_ERROR
Exemple #37
0
  def _refresh(self, http_request):
    """Refreshes the access_token.

    Since the underlying App Engine app_identity implementation does its own
    caching we can skip all the storage hoops and just to a refresh using the
    API.

    Args:
      http_request: callable, a callable that matches the method signature of
        httplib2.Http.request, used to make the refresh request.

    Raises:
      AccessTokenRefreshError: When the refresh fails.
    """
    try:
      (token, _) = app_identity.get_access_token(self.scope)
    except app_identity.Error, e:
      raise AccessTokenRefreshError(str(e))
Exemple #38
0
    def _refresh(self, http_request):
        """Refreshes the access_token.

    Since the underlying App Engine app_identity implementation does its own
    caching we can skip all the storage hoops and just to a refresh using the
    API.

    Args:
      http_request: callable, a callable that matches the method signature of
        httplib2.Http.request, used to make the refresh request.

    Raises:
      AccessTokenRefreshError: When the refresh fails.
    """
        try:
            (token, _) = app_identity.get_access_token(self.scope)
        except app_identity.Error, e:
            raise AccessTokenRefreshError(str(e))
Exemple #39
0
 def get(self):
   token, _ = app_identity.get_access_token('https://www.googleapis.com/auth/cloud-platform')
   response = urlfetch.fetch(
       'https://cloudbuild.googleapis.com/v1/projects/clementine-data/triggers/08f31055-68ed-4a66-a3f4-5edace8a1836:run',
       method=urlfetch.POST,
       payload=json.dumps({
           'projectId': 'clementine-data',
           'repoName': 'github-clementine-player-clementine',
           'branchName': 'master',
       }),
       headers={
         'Authorization': 'Bearer {}'.format(token),
         'Content-Type': 'application/json',
       })
   if response.status_code != 200:
     raise Exception('Triggering build failed: {}'.format(response.content))
   result = json.loads(response.content)
   self.response.headers['Content-Type'] = 'application/json'
   self.response.write(json.dumps(result, indent=2))
Exemple #40
0
def create_short_url(long_url):
    scope = 'https://www.googleapis.com/auth/urlshortener'
    authorization_token, _ = app_identity.get_access_token(scope)
    logging.debug(authorization_token)
    logging.info('Using token %s to represent identity %s', authorization_token, app_identity.get_service_account_name())
    payload = json.dumps({'longUrl': long_url})
    response = urlfetch.fetch(
		'https://www.googleapis.com/urlshortener/v1/url?pp=1',
		method=urlfetch.POST,
		payload=payload,
        headers = {
        	'Content-Type': 'application/json',
			'Authorization': 'OAuth '+authorization_token
		}
	)
    if response.status_code == 200:
        result = json.loads(response.content)
        return result['id']
    raise Exception("Google URL Shortener call failed. Status code %s. Body %s", response.status_code, response.content)
    def get(self):
        auth_token, _ = app_identity.get_access_token(
            'https://www.googleapis.com/auth/cloud-platform')
        logging.info('Using token {} to represent identity {}'.format(
            auth_token, app_identity.get_service_account_name()))

        response = urlfetch.fetch(
            'https://www.googleapis.com/storage/v1/b?project={}'.format(
                app_identity.get_application_id()),
            method=urlfetch.GET,
            headers={'Authorization': 'Bearer {}'.format(auth_token)})

        if response.status_code != 200:
            raise Exception('Call failed. Status code {}. Body {}'.format(
                response.status_code, response.content))

        result = json.loads(response.content)
        self.response.headers['Content-Type'] = 'application/json'
        self.response.write(json.dumps(result, indent=2))
Exemple #42
0
    def get(self):
        access_token, _ = app_identity.get_access_token(
            'https://www.googleapis.com/auth/datastore')
        app_id = app_identity.get_application_id()
        timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%S')

        # Bucket name must starts with 'gs://' keyword.
        output_url_prefix = 'gs://{}/{}'.format(BUCKET, PREFIX)
        if not output_url_prefix.endswith('/'):
            output_url_prefix += '/'
        output_url_prefix += timestamp

        # 保存するentityの条件をrequest bodyに含めます。
        # 今回はAppEngineアプリ内のModel定義からkind名を取得しています。
        entity_filter = {'kinds': TARGET_KINDS, 'namespace_ids': []}
        # Below is same as the tutorial.
        request = {
            'project_id': app_id,
            'output_url_prefix': output_url_prefix,
            'entity_filter': entity_filter
        }
        headers = {
            'Content-Type': 'application/json',
            'Authorization': 'Bearer ' + access_token
        }
        url = 'https://datastore.googleapis.com/v1beta1/projects/{}:export'.format(
            app_id)
        try:
            result = urlfetch.fetch(url=url,
                                    payload=json.dumps(request),
                                    method=urlfetch.POST,
                                    deadline=60,
                                    headers=headers)
            if result.status_code == httplib.OK:
                logging.info(result.content)
            elif result.status_code >= 500:
                logging.error(result.content)
            else:
                logging.warning(result.content)
            self.response.status_int = result.status_code
        except urlfetch.Error:
            logging.exception('Failed to initiate export.')
            self.response.status_int = httplib.INTERNAL_SERVER_ERROR
  def get(self):
    bucket_name = config_model.Config.get('gcp_cloud_storage_bucket')
    if config_model.Config.get('enable_backups') and bucket_name:
      access_token, _ = app_identity.get_access_token(
          'https://www.googleapis.com/auth/datastore')

      # We strip the first 2 characters because os.environ.get returns the
      # application id with a partitiona separated by tilde, eg `s~`, which is
      # not needed here.
      app_id = constants.APPLICATION_ID.split('~')[1]

      request = {
          'project_id': app_id,
          'output_url_prefix': _format_full_path(bucket_name),
      }
      headers = {
          'Content-Type': 'application/json',
          'Authorization': 'Bearer ' + access_token
      }

      logging.info(
          'Attempting to export cloud datastore to bucket %r.', bucket_name)
      try:
        result = urlfetch.fetch(
            url=_DATASTORE_API_URL % app_id,
            payload=json.dumps(request),
            method=urlfetch.POST,
            deadline=60,
            headers=headers)
        if result.status_code == httplib.OK:
          logging.info('Cloud Datastore export completed.')
          logging.info(result.content)
        elif result.status_code >= 500:
          logging.error(result.content)
        else:
          logging.warning(result.content)
        self.response.status_int = result.status_code
      except urlfetch.Error:
        logging.error('Failed to initiate datastore export.')
        self.response.status_int = httplib.INTERNAL_SERVER_ERROR
    else:
      logging.info('Backups are not enabled, skipping.')
Exemple #44
0
    def _refresh(self, http):
        """Refreshes the access token.

        Since the underlying App Engine app_identity implementation does its
        own caching we can skip all the storage hoops and just to a refresh
        using the API.

        Args:
            http: unused HTTP object

        Raises:
            AccessTokenRefreshError: When the refresh fails.
        """
        try:
            scopes = self.scope.split()
            (token, _) = app_identity.get_access_token(
                scopes, service_account_id=self.service_account_id)
        except app_identity.Error as e:
            raise client.AccessTokenRefreshError(str(e))
        self.access_token = token
Exemple #45
0
    def _refresh(self, http):
        """Refreshes the access token.

        Since the underlying App Engine app_identity implementation does its
        own caching we can skip all the storage hoops and just to a refresh
        using the API.

        Args:
            http: unused HTTP object

        Raises:
            AccessTokenRefreshError: When the refresh fails.
        """
        try:
            scopes = self.scope.split()
            (token, _) = app_identity.get_access_token(
                scopes, service_account_id=self.service_account_id)
        except app_identity.Error as e:
            raise client.AccessTokenRefreshError(str(e))
        self.access_token = token
def auth_check():
    credentials, project = google.auth.default()
    key_name, signature = app_identity.sign_blob(b'abc')
    scope = 'https://www.googleapis.com/auth/userinfo.email'
    token, expiry = app_identity.get_access_token(scope)
    return code_block(
        '>>> import google.auth',
        '>>> credentials, project = google.auth.default()',
        '>>> credentials',
        repr(credentials),
        '>>> project',
        repr(project),
        '>>> credentials.__dict__',
        repr(credentials.__dict__),
        '>>> from google.appengine.api import app_identity',
        '>>> app_identity',
        repr(app_identity),
        # ALSO: get_access_token_uncached
        # (scopes, service_account_id=None)
        '>>> scope = \'https://www.googleapis.com/auth/userinfo.email\'',
        '>>> token, expiry = app_identity.get_access_token(scope)',
        '>>> token',
        repr(token[:6] + b'...'),
        '>>> expiry',
        repr(expiry),
        '>>> app_identity.get_application_id()',
        repr(app_identity.get_application_id()),
        '>>> app_identity.get_default_gcs_bucket_name()',
        repr(app_identity.get_default_gcs_bucket_name()),
        '>>> app_identity.get_default_version_hostname()',
        repr(app_identity.get_default_version_hostname()),
        '>>> app_identity.get_public_certificates()',
        repr(app_identity.get_public_certificates()),
        '>>> app_identity.get_service_account_name()',
        repr(app_identity.get_service_account_name()),
        '>>> key_name, signature = app_identity.sign_blob(b\'abc\')',
        '>>> key_name',
        repr(key_name),
        '>>> signature',
        repr(signature[:16] + b'...'),
    )
Exemple #47
0
    def get(self):
        # https://cloud.google.com/appengine/docs/standard/python/ndb/admin#Metadata_queries
        kinds = [k for k in metadata.get_kinds() if not k.startswith('_')]
        kinds.remove('Response')
        kinds.remove('SyndicatedPost')
        logging.info('Backing up %s', kinds)

        access_token, _ = app_identity.get_access_token(
            'https://www.googleapis.com/auth/datastore')
        app_id = app_identity.get_application_id()

        request = {
            'project_id':
            app_id,
            'output_url_prefix': ('gs://brid-gy.appspot.com/weekly/' +
                                  datetime.datetime.now().strftime('%Y%m%d')),
            'entity_filter': {
                'kinds': kinds,
                # 'namespace_ids': self.request.get_all('namespace_id'),
            },
        }
        headers = {
            'Content-Type': 'application/json',
            'Authorization': 'Bearer ' + access_token,
        }

        try:
            result = urlfetch.fetch(
                url='https://datastore.googleapis.com/v1/projects/%s:export' %
                app_id,
                payload=json_dumps(request),
                method=urlfetch.POST,
                headers=headers)
            if result.status_code == http.client.OK:
                logging.info(result.content)
            else:
                logging.error(result.content)
                self.abort(result.status_code)
        except urlfetch.Error as e:
            util.interpret_http_exception(e)
            raise
def _GetAuthenticatedClient(scope):
  """Creates authenticated client for docs or spreadsheets gdata API.

  It uses Appengine's robot account. Spreadsheets must be shared with user
  [email protected]

  Args:
    scope: OAuth2 scope

  Returns:
    Authenticated client
  """

  if scope == SCOPE_DOCUMENTS:
    client = docs_client.DocsClient()
  else:
    client = spreadsheets_client.SpreadsheetsClient()
  token = app_identity.get_access_token(scope)[0]
  client.auth_token = gauth.OAuth2Token(None, None, None, None, None,
                                        access_token=token)
  return client
Exemple #49
0
 def get(self):
     token, _ = app_identity.get_access_token(
         'https://www.googleapis.com/auth/cloud-platform')
     response = urlfetch.fetch(
         'https://cloudbuild.googleapis.com/v1/projects/clementine-web/triggers/e19d2c38-5478-4282-a475-ee54d6d5363a:run',
         method=urlfetch.POST,
         payload=json.dumps({
             'projectId': 'clementine-web',
             'repoName': 'github-clementine-player-website',
             'branchName': 'master',
         }),
         headers={
             'Authorization': 'Bearer {}'.format(token),
             'Content-Type': 'application/json',
         })
     if response.status_code != 200:
         raise Exception('Triggering build failed: {}'.format(
             response.content))
     result = json.loads(response.content)
     self.response.headers['Content-Type'] = 'application/json'
     self.response.write(json.dumps(result, indent=2))
Exemple #50
0
def _Fetch(reason, url, method='GET', payload=None):
  if shared.IsDevMode():
    authorization_value = GetDevModeAccessToken()
  else:
    Authorization_token, _ = app_identity.get_access_token(settings.COMPUTE_SCOPE)
    authorization_value = 'OAuth {}'.format(Authorization_token)
  assert authorization_value
  response = urlfetch.fetch(url=url,
                            method=method,
                            payload=payload,
                            follow_redirects=False,
                            headers = {
                              'Content-Type': settings.JSON_MIME_TYPE,
                              'Authorization': authorization_value,
                            })
  shared.i('COMPUTE: {} -> {}'.format(reason, httplib.responses[response.status_code]))

  if response.status_code != httplib.OK:
    Abort(response.status_code, 'UrlFetch() {} {}\nWith Payload: {}\nResulted in:\n{}'
                                .format(method, url, payload, response.content))
  return json.loads(response.content)
Exemple #51
0
def get_access_token(scopes, service_account_key=None):
    """Returns an OAuth2 access token for a service account.

  If 'service_account_key' is specified, will use it to generate access token
  for corresponding @developer.gserviceaccount.com account. Otherwise will
  invoke app_identity.get_access_token(...) to use app's
  @appspot.gserviceaccount.com account.

  On dev server (if service_account_key is not passed or empty) reads the token
  from 'access_token' DevServerAccessToken entity.

  Args:
    scopes: the requested API scope string, or a list of strings.
    service_account_key: optional instance of ServiceAccountKey.

  Returns:
    Tuple (access token, expiration time in seconds since the epoch). The token
    should be valid for at least 5 minutes. It will be cached across multiple
    calls using memcache (e.g. get_access_token call can be considered cheap).

  Raises:
    AccessTokenError on errors.
  """
    if service_account_key:
        # Empty private_key_id probably means that the app is not configured yet.
        if not service_account_key.private_key_id:
            # On dev server fallback to reading hardcoded token from the datastore.
            if utils.is_local_dev_server():
                return _get_dev_server_token()
            raise AccessTokenError(
                'Service account secret key is not initialized')
        return _get_jwt_based_token(scopes, service_account_key)

    # app_identity.get_access_token returns nonsense on dev server.
    if utils.is_local_dev_server():
        return _get_dev_server_token()

    # Running on real GAE, and no secret key is passed -> app_identity API.
    return app_identity.get_access_token(scopes)
Exemple #52
0
  def get(self):
    # https://cloud.google.com/appengine/docs/standard/python/ndb/admin#Metadata_queries
    kinds = [k for k in metadata.get_kinds() if not k.startswith('_')]
    kinds.remove('Response')
    kinds.remove('SyndicatedPost')
    logging.info('Backing up %s', kinds)

    access_token, _ = app_identity.get_access_token(
      'https://www.googleapis.com/auth/datastore')
    app_id = app_identity.get_application_id()

    request = {
        'project_id': app_id,
        'output_url_prefix': ('gs://brid-gy.appspot.com/weekly/' +
                              datetime.datetime.now().strftime('%Y%m%d')),
        'entity_filter': {
          'kinds': kinds,
          # 'namespace_ids': self.request.get_all('namespace_id'),
        },
    }
    headers = {
        'Content-Type': 'application/json',
        'Authorization': 'Bearer ' + access_token,
    }

    try:
      result = urlfetch.fetch(
          url='https://datastore.googleapis.com/v1/projects/%s:export' % app_id,
          payload=json.dumps(request),
          method=urlfetch.POST,
          headers=headers)
      if result.status_code == httplib.OK:
        logging.info(result.content)
      else:
        logging.error(result.content)
        self.abort(result.status_code)
    except urlfetch.Error as e:
      util.interpret_http_exception(e)
      raise
Exemple #53
0
def SignUrl(bucket, object_id):
  """Get a signed URL to download a GCS object.

  Args:
    bucket: string name of the GCS bucket.
    object_id: string object ID of the file within that bucket.

  Returns:
    A signed URL, or '/mising-gcs-url' if signing failed.
  """
  try:
    cache_key = 'gcs-object-url-%s' % object_id
    cached = memcache.get(key=cache_key)
    if cached is not None:
      return cached

    if IS_DEV_APPSERVER:
      attachment_url = '/_ah/gcs/%s%s' % (bucket, object_id)
    else:
      result = ('https://www.googleapis.com/storage/v1/b/'
          '{bucket}/o/{object_id}?access_token={token}&alt=media')
      scopes = ['https://www.googleapis.com/auth/devstorage.read_only']
      if object_id[0] == '/':
        object_id = object_id[1:]
      url = result.format(
          bucket=bucket,
          object_id=urllib.quote_plus(object_id),
          token=app_identity.get_access_token(scopes)[0])
      attachment_url = _FetchSignedURL(url)

    if not memcache.set(key=cache_key, value=attachment_url, time=GCS_SIG_TTL):
      logging.error('Could not cache gcs url %s for %s', attachment_url,
          object_id)

    return attachment_url

  except Exception as e:
    logging.exception(e)
    return '/missing-gcs-url'
Exemple #54
0
    def get(self, *args):
        """HTTP 'GET' method handler for incoming requests."""

        if self.request.path.startswith('/_ah/'):
            # Ignore /_ah/start, /_ah/stop requests.
            self.response.status = '200 OK'
            return

        del args
        url = 'https://us-central1-{}.cloudfunctions.net{}'.format(
            app_identity.get_application_id(), self.request.path_qs)
        auth_token, _ = app_identity.get_access_token(
            'https://www.googleapis.com/auth/cloud-platform')

        # "result" is of type _URLFetchResult,
        # https://cloud.google.com/appengine/docs/standard/python/refdocs/modules/google/appengine/api/urlfetch
        result = urlfetch.fetch(url,
                                headers={
                                    'Authorization':
                                    'Bearer {}'.format(auth_token),
                                },
                                deadline=180,
                                validate_certificate=True)

        logging.info('Got status %s with text: %s', result.status_code,
                     result.content)
        if result.status_code != 200:
            logging.error('Request failed: %s', result.status_code)

        try:
            self.response.status = result.status_code
        except KeyError:
            # response.status setter tries to look up a status message in its
            # predefined dictionary (which is very small). When the message
            # is not found, and status is not of '429 Quota Exceeded' form,
            # it raises an exception.
            # https://github.com/GoogleCloudPlatform/webapp2/blob/deb34447ef8927c940bed2d80c7eec75f9f01be8/webapp2.py#L451
            self.response.status = '{} Unknown Error'.format(
                result.status_code)
Exemple #55
0
def get_access_token(scopes, service_account_key=None):
  """Returns an OAuth2 access token for a service account.

  If 'service_account_key' is specified, will use it to generate access token
  for corresponding @developer.gserviceaccount.com account. Otherwise will
  invoke app_identity.get_access_token(...) to use app's
  @appspot.gserviceaccount.com account.

  On dev server (if service_account_key is not passed or empty) reads the token
  from 'access_token' DevServerAccessToken entity.

  Args:
    scopes: the requested API scope string, or a list of strings.
    service_account_key: optional instance of ServiceAccountKey.

  Returns:
    Tuple (access token, expiration time in seconds since the epoch). The token
    should be valid for at least 5 minutes. It will be cached across multiple
    calls using memcache (e.g. get_access_token call can be considered cheap).

  Raises:
    AccessTokenError on errors.
  """
  if service_account_key:
    # Empty private_key_id probably means that the app is not configured yet.
    if not service_account_key.private_key_id:
      # On dev server fallback to reading hardcoded token from the datastore.
      if utils.is_local_dev_server():
        return _get_dev_server_token()
      raise AccessTokenError('Service account secret key is not initialized')
    return _get_jwt_based_token(scopes, service_account_key)

  # app_identity.get_access_token returns nonsense on dev server.
  if utils.is_local_dev_server():
    return _get_dev_server_token()

  # Running on real GAE, and no secret key is passed -> app_identity API.
  return app_identity.get_access_token(scopes)
Exemple #56
0
def fetch_file_async(url, oauth_scopes):
  """Fetches a file optionally using OAuth2 for authentication.

  Args:
    url: url to a file to fetch.
    oauth_scopes: list of OAuth scopes to use when generating access_token for
        accessing |url|, if not set or empty - do not use OAuth.

  Returns:
    Byte buffer with file's body.

  Raises:
    BundleImportError on fetch errors.
  """
  if utils.is_local_dev_server():
    protocols = ('http://', 'https://')
  else:
    protocols = ('https://',)
  assert url.startswith(protocols), url

  headers = {}
  if oauth_scopes:
    headers['Authorization'] = 'OAuth %s' % (
        app_identity.get_access_token(oauth_scopes)[0])

  ctx = ndb.get_context()
  result = yield ctx.urlfetch(
      url=url,
      method='GET',
      headers=headers,
      follow_redirects=False,
      deadline=5*60,
      validate_certificate=True)
  if result.status_code != 200:
    raise BundleFetchError(url, result.status_code, result.content)
  raise ndb.Return(result.content)
Exemple #57
0
def createShortUrl(long_url):
    scope = "https://www.googleapis.com/auth/urlshortener"
    authorization_token, _ = app_identity.get_access_token(scope)
    payload = json.dumps({"longUrl": long_url})

    try:
        response = urlfetch.fetch(
            "https://www.googleapis.com/urlshortener/v1/url?pp=1",
            method=urlfetch.POST,
            payload=payload,
            deadline=4,
            headers={
                "Content-Type": "application/json",
                "Authorization": "OAuth " + authorization_token
            })
        if response.status_code == 200:
            result = json.loads(response.content)
            return result["id"]
        logging.error('Urlshortener call failed. Status code %s. Body %s',
                      response.status_code, response.content)
    except:
        logging.error('Urlshortener call failed')

    return long_url
Exemple #58
0
    def get(self):
        auth_token, _ = app_identity.get_access_token(
            'https://www.googleapis.com/auth/cloud-platform')
        logging.info(
            'Using token {} to represent identity {}'.format(
                auth_token, app_identity.get_service_account_name()))

        response = urlfetch.fetch(
            'https://www.googleapis.com/storage/v1/b?project={}'.format(
                app_identity.get_application_id()),
            method=urlfetch.GET,
            headers={
                'Authorization': 'Bearer {}'.format(auth_token)
            }
        )

        if response.status_code != 200:
            raise Exception(
                'Call failed. Status code {}. Body {}'.format(
                    response.status_code, response.content))

        result = json.loads(response.content)
        self.response.headers['Content-Type'] = 'application/json'
        self.response.write(json.dumps(result, indent=2))
def modify_object(obj,
                  content_encoding=None,
                  content_type=None,
                  content_disposition=None,
                  acl=None,
                  copy_source=None,
                  copy_source_if_match=None,
                  copy_source_if_none_match=None,
                  copy_source_if_modified_since=None,
                  copy_source_if_unmodified_since=None,
                  copy_metadata=True,
                  metadata={}):
    """Modifies or copies a cloud storage object.

    Most arguments are identical to the form fields listed in
    https://developers.google.com/storage/docs/reference-methods#putobject, but
    there are a few differences:

    * The copy_metadata argument can be True, indicating that the metadata
      should be copied, or False, indicating that it should be replaced.
    * The metadata argument is a dictionary of metadata header names to values.
      Each one is transformed into an x-goog-meta- field. The keys should not
      include "x-goog-meta-". Null values are ignored.
    """

    if not handlers.is_production():
        # The only way to modify an existing object using only the Python API
        # seems to be to copy it over itself. It's not a big deal since this is
        # only for development.
        if copy_source is None: copy_source = obj
        contents = None
        with files.open(_appengine_object_path(copy_source), 'r') as f:
            contents = f.read()

        if content_type is None: content_type = 'application/octet-stream'
        write_path = files.gs.create(_appengine_object_path(obj),
                                     mime_type=content_type,
                                     acl=acl,
                                     content_encoding=content_encoding,
                                     content_disposition=content_disposition,
                                     user_metadata=metadata)
        with files.open(write_path, 'a') as f: f.write(contents)
        files.finalize(write_path)
        return

    auth = "OAuth " + app_identity.get_access_token(_FULL_CONTROL_SCOPE)[0]
    headers = {
        "Authorization": auth,
        "Content-Encoding": content_encoding,
        "Content-Type": content_type,
        "Content-Disposition": content_disposition,
        "x-goog-api-version": "2",
        "x-goog-acl": acl,
        "x-goog-copy-source": _object_path(copy_source),
        "x-goog-copy-source-if-match": copy_source_if_match,
        "x-goog-copy-source-if-none-match": copy_source_if_none_match,
        "x-goog-copy-source-if-modified-since": copy_source_if_modified_since,
        "x-goog-copy-source-if-unmodified-since":
            copy_source_if_unmodified_since,
        "x-goog-copy-metadata-directive":
            "COPY" if copy_metadata else "REPLACE"
    }
    for (key, value) in metadata.iteritems():
        headers["x-goog-meta-" + key] = value
    headers = {key: value for key, value in headers.iteritems()
               if value is not None}

    return urlfetch.fetch("https://storage.googleapis.com/" +
                            urllib.quote(_object_path(obj)),
                          method="PUT", headers=headers)