def create(self, scopes): # pragma: no cover # This import doesn't work outside appengine, so delay it until it's used. from oauth2client import appengine from google.appengine.api import app_identity logging.info('Initializing with service account %s', app_identity.get_service_account_name()) return appengine.AppAssertionCredentials(scopes)
def create_custom_token(uid, valid_minutes=60): """Create a secure token for the given id. This method is used to create secure custom JWT tokens to be passed to clients. It takes a unique id (uid) that will be used by Firebase's security rules to prevent unauthorized access. In this case, the uid will be the channel id which is a combination of user_id and game_key """ # use the app_identity service from google.appengine.api to get the # project's service account email automatically client_email = app_identity.get_service_account_name() now = int(time.time()) # encode the required claims # per https://firebase.google.com/docs/auth/server/create-custom-tokens payload = base64.b64encode( json.dumps({ 'iss': client_email, 'sub': client_email, 'aud': _IDENTITY_ENDPOINT, 'uid': uid, # the important parameter, as it will be the channel id 'iat': now, 'exp': now + (valid_minutes * 60), })) # add standard header to identify this as a JWT header = base64.b64encode(json.dumps({'typ': 'JWT', 'alg': 'RS256'})) to_sign = '{}.{}'.format(header, payload) # Sign the jwt using the built in app_identity service return '{}.{}'.format(to_sign, base64.b64encode(app_identity.sign_blob(to_sign)[1]))
def receive(self, mail_message): logging.info("Received a message from: " + mail_message.sender) body = list(mail_message.bodies(content_type='text/plain'))[0] logging.info("Body of message: " + body[1].decode()) # Point to the Main Email trigger URL which will poll for all Firehalls # Example replace: http://soft-haus.com/svvfd/riprunner/ with the root of you installation url = "http://soft-haus.com/svvfd/riprunner/webhooks/email_trigger_webhook.php" form_fields = { "sender": mail_message.sender, "subject": mail_message.subject, "to": mail_message.to, "date": mail_message.date, "body": body[1].decode() } form_data = urllib.urlencode(form_fields) GAE_APP_ID = app_identity.get_application_id() GAE_ACCOUNT_NAME = app_identity.get_service_account_name() logging.info("AppID: " + GAE_APP_ID + " SAM: " + GAE_ACCOUNT_NAME) result = urlfetch.fetch(url=url, payload=form_data, method=urlfetch.POST, headers={ 'Content-Type': 'application/x-www-form-urlencoded', 'X-RipRunner-Auth-APPID': GAE_APP_ID, 'X-RipRunner-Auth-ACCOUNTNAME': GAE_ACCOUNT_NAME }) logging.info(result.status_code) logging.info(result.content)
def _get_storage_api(retry_params, account_id=None): """Returns storage_api instance for API methods. Args: retry_params: An instance of api_utils.RetryParams. If none, thread's default will be used. account_id: Internal-use only. Returns: A storage_api instance to handle urlfetch work to GCS. On dev appserver, this instance will talk to a local stub by default. However, if you pass the arguments --appidentity_email_address and --appidentity_private_key_path to dev_appserver.py it will attempt to use the real GCS with these credentials. Alternatively, you can set a specific access token with common.set_access_token. You can also pass --default_gcs_bucket_name to set the default bucket. """ api = _StorageApi(_StorageApi.full_control_scope, service_account_id=account_id, retry_params=retry_params) # when running local unit tests, the service account is test@localhost # from google.appengine.api.app_identity.app_identity_stub.APP_SERVICE_ACCOUNT_NAME service_account = app_identity.get_service_account_name() if (common.local_run() and not common.get_access_token() and (not service_account or service_account.endswith('@localhost'))): api.api_url = common.local_api_url() if common.get_access_token(): api.token = common.get_access_token() return api
def GenerateStorageSignedUrl(self, request): """Generates signed url for Cloud Storage.""" GetEndpointsAuthUser() if not request.filename: raise endpoints.BadRequestException('Missing request field "filename".') if not request.owner: raise endpoints.BadRequestException('Missing request field "owner".') expires = '%sZ' % (datetime.utcnow() + timedelta(hours=1)).isoformat()[:19] policy = base64.b64encode(json.dumps({ 'expiration': expires, 'conditions': [ ['eq', '$bucket', GCS_BUCKET], ['eq', '$key', request.filename], ['eq', '$x-goog-meta-owner', request.owner], ], })) signature = base64.b64encode(app_identity.sign_blob(policy)[1]) return StorageSignedUrlResponse( form_action=GCS_API_URL % GCS_BUCKET, bucket=GCS_BUCKET, policy=policy, signature=signature, google_access_id=app_identity.get_service_account_name(), filename=request.filename )
def SignUrl(gcs_filename): expiration_dt = _Now() + ATTACHMENT_TTL expiration = int(time.mktime(expiration_dt.timetuple())) signature_string = '\n'.join([ 'GET', '', # Optional MD5, which we don't have. '', # Optional content-type, which only applies to uploads. str(expiration), gcs_filename]).encode('utf-8') try: signature_bytes = app_identity.sign_blob(signature_string)[1] query_params = {'GoogleAccessId': app_identity.get_service_account_name(), 'Expires': str(expiration), 'Signature': base64.b64encode(signature_bytes)} result = 'https://storage.googleapis.com{resource}?{querystring}' if IS_DEV_APPSERVER: result = '/_ah/gcs{resource}?{querystring}' return result.format( resource=gcs_filename, querystring=urllib.urlencode(query_params)) except Exception as e: logging.exception(e) return '/missing-gcs-url'
def _get_service_account_name(credentials): """Determines service account name from a credentials object. :type credentials: :class:`client.SignedJwtAssertionCredentials`, :class:`service_account._ServiceAccountCredentials`, :class:`_GAECreds` :param credentials: The credentials used to determine the service account name. :rtype: string :returns: Service account name associated with the credentials. :raises: :class:`ValueError` if the credentials are not a valid service account type. """ service_account_name = None if isinstance(credentials, client.SignedJwtAssertionCredentials): service_account_name = credentials.service_account_name elif isinstance(credentials, service_account._ServiceAccountCredentials): service_account_name = credentials._service_account_email elif isinstance(credentials, _GAECreds): service_account_name = app_identity.get_service_account_name() if service_account_name is None: raise ValueError('Service account name could not be determined ' 'from credentials') return service_account_name
def create_custom_token(uid, is_premium_account): service_account_email = app_identity.get_service_account_name() payload = { 'iss': service_account_email, 'sub': service_account_email, 'aud': 'https://identitytoolkit.googleapis.com/google.identity.identitytoolkit.v1.IdentityToolkit', 'uid': uid, 'claims': { 'premium_account': is_premium_account } } exp = datetime.timedelta(minutes=60) token = jwt.generate_jwt(payload, None, 'RS256', exp) header, body, secret = token.split('.') # 返ってくるヘッダーは 'alg': 'none' になっているので RS256 に変更 header = jws.utils.encode({ 'typ': 'JWT', 'alg': 'RS256' }).decode('utf-8') # app_identity.sign_blob を使ってサインを作成 sign = base64.urlsafe_b64encode(app_identity.sign_blob(str(header + '.' + body))[1]).strip('=') return header + '.' + body + '.' + sign
def gcs_upload(acl='bucket-owner-read'): """ return GCS upload form context more info : https://cloud.google.com/storage/docs/xml-api/post-object """ user_id = users.get_current_user().email().lower() google_access_id = app_identity.get_service_account_name() success_redirect = webapp2.uri_for('gcs_upload_ok', _full=True) # GCS signed upload url expires expiration_dt = datetime.now() + timedelta(seconds=60) # The security json policy document that describes what can and cannot be uploaded in the form policy_string = """ {"expiration": "%s", "conditions": [ ["starts-with", "$key", ""], {"acl": "%s"}, {"success_action_redirect": "%s"}, {"success_action_status": "201"}, {"x-goog-meta-user-id": "%s"}, ]}""" % (expiration_dt.replace(microsecond=0).isoformat() + 'Z', acl, success_redirect, user_id) # sign the policy document policy = base64.b64encode(policy_string) _, signature_bytes = app_identity.sign_blob(policy) signature = base64.b64encode(signature_bytes) logging.debug('GCS upload policy : ' + policy_string) return dict(form_bucket=default_bucket, form_access_id=google_access_id, form_policy=policy, form_signature=signature, form_succes_redirect=success_redirect, form_user_id=user_id, form_folders=bucket_folders)
def image_send(img = None, path = None, width = None, height = None): if width and height: if img.width > width or img.height > height: img.resize(width, height) img.im_feeling_lucky() content = img.execute_transforms(output_encoding=images.JPEG) logging.info("app_identity: %s",app_identity.get_service_account_name()) filename='/gs/onarena/' + path write_path = files.gs.create(filename=filename, acl='public-read', cache_control='public,max-age=29030400', mime_type='image/jpeg') with files.open(write_path, 'a') as fp: fp.write(content) files.finalize(write_path) ''' try: uri.new_key().set_contents_from_string(content, headers=header_new, policy="public-read") return path except: logging.info("Size is too large") ''' return None
def get(self): self.send_response({ 'app_id': app_identity.get_application_id(), 'app_runtime': 'python27', 'app_version': utils.get_app_version(), 'service_account_name': app_identity.get_service_account_name(), })
def get_url(path, ttl=15): """Returns a signed URL for accessing a resource in the provided path. Args: path - path to the resource ttl - signed URL expiry time in minutes Returns: Signed URL to the resource """ expiry = int(round(time.time() + ttl * 60)) bucket = app_identity.get_default_gcs_bucket_name() cpath = '/' + bucket + '/' + path data = [] data.append('GET') # Method data.append('') # MD5 digest value data.append('') # Content-type data.append(str(expiry)) # Expiry date data.append(cpath) # Path to the resource data_str = "\n".join(data) print(type(data_str)) signing_key_name, signature = app_identity.sign_blob(str(data_str)) url = 'https://storage.googleapis.com' url += cpath url += '?GoogleAccessId=' + app_identity.get_service_account_name() url += '&Expires=' + str(expiry) url += '&Signature=' + urllib.quote_plus(base64.b64encode(signature)) return url
def get_service_account_name(): """Same as app_identity.get_service_account_name(), but caches the result. app_identity.get_service_account_name() does an RPC on each call, yet the result is always the same. """ return app_identity.get_service_account_name()
def prepare_upload(bucket_name, path, expiry=DEFAULT_URL_VALID_SECONDS): """Prepare a signed GCS upload.""" expiration_time = (datetime.datetime.utcnow() + datetime.timedelta(seconds=expiry)) conditions = [ { 'key': path }, { 'bucket': bucket_name }, ['content-length-range', 0, MAX_UPLOAD_SIZE], ['starts-with', '$x-goog-meta-filename', ''], ] policy = base64.b64encode( json.dumps({ 'expiration': expiration_time.isoformat() + 'Z', 'conditions': conditions, })) local_server = environment.get_value('LOCAL_GCS_SERVER_HOST') if local_server: url = local_server signature = 'SIGNATURE' service_account_name = 'service_account' else: url = STORAGE_URL % bucket_name signature = base64.b64encode(app_identity.sign_blob(policy)[1]) service_account_name = app_identity.get_service_account_name() return GcsUpload(url, bucket_name, path, service_account_name, policy, signature)
def get_signed_url(bucket_name, path, method='GET', expiry=DEFAULT_URL_VALID_SECONDS): """Return a signed url.""" timestamp = _get_expiration_time(expiry) blob = '%s\n\n\n%d\n/%s/%s' % (method, timestamp, bucket_name, path) local_server = environment.get_value('LOCAL_GCS_SERVER_HOST') if local_server: url = local_server + '/' + bucket_name signed_blob = 'SIGNATURE' service_account_name = 'service_account' else: url = STORAGE_URL % bucket_name signed_blob = app_identity.sign_blob(str(blob))[1] service_account_name = app_identity.get_service_account_name() params = { 'GoogleAccessId': service_account_name, 'Expires': timestamp, 'Signature': base64.b64encode(signed_blob), } return str(url + '/' + path + '?' + urllib.urlencode(params))
def receive(self, mail_message): logging.info("Received a message from: " + mail_message.sender) body = list(mail_message.bodies(content_type='text/plain'))[0] logging.info("Body of message: " + body[1].decode()) # Point to the Main Email trigger URL which will poll for all Firehalls # Example replace: https://svvfd.soft-haus.com/ with the root of you installation url = "https://svvfd.soft-haus.com/webhooks/email_trigger_webhook.php" form_fields = { "sender": mail_message.sender, "subject": mail_message.subject, "to": mail_message.to, "date": mail_message.date, "body": body[1].decode() } form_data = urllib.urlencode(form_fields) GAE_APP_ID = app_identity.get_application_id() GAE_ACCOUNT_NAME = app_identity.get_service_account_name() logging.info("AppID: " + GAE_APP_ID + " SAM: " + GAE_ACCOUNT_NAME) result = urlfetch.fetch(url=url, payload=form_data, method=urlfetch.POST, headers={'Content-Type': 'application/x-www-form-urlencoded', 'X-RipRunner-Auth-APPID': GAE_APP_ID, 'X-RipRunner-Auth-ACCOUNTNAME': GAE_ACCOUNT_NAME}) logging.info(result.status_code) logging.info(result.content)
def sign_url(bucket_object, expires_after_seconds=300): method = 'GET' gcs_filename = urllib.quote('/%s%s' % (settings.FILE_BUCKET, bucket_object)) content_md5, content_type = None, None expiration = datetime.datetime.utcnow() + timedelta(seconds=expires_after_seconds) expiration = int(time.mktime(expiration.timetuple())) # Generate the string to sign. signature_string = '\n'.join([ method, content_md5 or '', content_type or '', str(expiration), gcs_filename]) _, signature_bytes = app_identity.sign_blob(str(signature_string)) signature = base64.b64encode(signature_bytes) # Set the right query parameters. query_params = {'GoogleAccessId': app_identity.get_service_account_name(), 'Expires': str(expiration), 'Signature': signature} # Return the download URL. return '{endpoint}{resource}?{querystring}'.format(endpoint=GCS_ACCESS_ENDPOINT, resource=gcs_filename, querystring=urllib.urlencode(query_params))
def create_custom_token(uid, valid_minutes=60): """Create a secure token for the given id. This method is used to create secure custom JWT tokens to be passed to clients. It takes a unique id (uid) that will be used by Firebase's security rules to prevent unauthorized access. In this case, the uid will be the channel id which is a combination of user_id and game_key """ # use the app_identity service from google.appengine.api to get the # project's service account email automatically client_email = app_identity.get_service_account_name() now = int(time.time()) # encode the required claims # per https://firebase.google.com/docs/auth/server/create-custom-tokens payload = base64.b64encode(json.dumps({ 'iss': client_email, 'sub': client_email, 'aud': _IDENTITY_ENDPOINT, 'uid': uid, # the important parameter, as it will be the channel id 'iat': now, 'exp': now + (valid_minutes * 60), })) # add standard header to identify this as a JWT header = base64.b64encode(json.dumps({'typ': 'JWT', 'alg': 'RS256'})) to_sign = '{}.{}'.format(header, payload) # Sign the jwt using the built in app_identity service return '{}.{}'.format(to_sign, base64.b64encode( app_identity.sign_blob(to_sign)[1]))
def GenerateStorageSignedUrl(self, request): """Generates signed url for Cloud Storage.""" GetEndpointsAuthUser() if not request.filename: raise endpoints.BadRequestException( 'Missing request field "filename".') if not request.owner: raise endpoints.BadRequestException( 'Missing request field "owner".') expires = '%sZ' % (datetime.utcnow() + timedelta(hours=1)).isoformat()[:19] policy = base64.b64encode( json.dumps({ 'expiration': expires, 'conditions': [ ['eq', '$bucket', GCS_BUCKET], ['eq', '$key', request.filename], ['eq', '$x-goog-meta-owner', request.owner], ], })) signature = base64.b64encode(app_identity.sign_blob(policy)[1]) return StorageSignedUrlResponse( form_action=GCS_API_URL % GCS_BUCKET, bucket=GCS_BUCKET, policy=policy, signature=signature, google_access_id=app_identity.get_service_account_name(), filename=request.filename)
def create_custom_token(uid, valid_minutes=60): """Create a secure token for the given id. This method is used to create secure custom JWT tokens to be passed to clients. It takes a unique id (uid) that will be used by Firebase's security rules to prevent unauthorized access. In this case, the uid will be the channel id which is a combination of user_id and game_key """ header = base64.b64encode(json.dumps({'typ': 'JWT', 'alg': 'RS256'})) client_email = app_identity.get_service_account_name() now = int(time.time()) payload = base64.b64encode( json.dumps({ 'iss': client_email, 'sub': client_email, 'aud': _IDENTITY_ENDPOINT, 'uid': uid, 'iat': now, 'exp': now + (valid_minutes * 60), })) to_sign = '{}.{}'.format(header, payload) # Sign the jwt return '{}.{}'.format(to_sign, base64.b64encode(app_identity.sign_blob(to_sign)[1]))
def _get_base_context(self): """Returns context common to GET and POST.""" context = { 'service_account': (app_identity.get_service_account_name() or '[unknown service account on dev_appserver]'), 'mapping_kind': config.config.MAPPING_DATASTORE_KIND_NAME, 'config': config.config, 'config_keys': config.CONFIGURATION_KEYS_FOR_INDEX, } return context
def service_account_email(self): """Get the email for the current service account. Returns: string, The email associated with the Google App Engine service account. """ if self._service_account_email is None: self._service_account_email = ( app_identity.get_service_account_name()) return self._service_account_email
def HandleGet(self): """Update the repo_to_dep_path in config from the lastest DEPS.""" # Update repo_to_dep_path to the latest information. dep_fetcher = ChromeDependencyFetcher( CachedGitilesRepository.Factory(HttpClientAppengine())) repo_to_dep_path = GetRepoToDepPath(dep_fetcher) if not repo_to_dep_path: # pragma: no cover. return self.CreateError('Fail to update repo_to_dep_path config.', 400) crash_config = CrashConfig.Get() crash_config.Update(users.User(app_identity.get_service_account_name()), True, repo_to_dep_path=repo_to_dep_path)
def _MakeUrl(self, verb, path, content_type='', content_md5=''): """Forms and returns the full signed URL to access GCS.""" path = urllib.quote(path) base_url = '%s%s' % (self.gcs_api_endpoint, path) signature_string = self._MakeSignatureString(verb, path, content_md5, content_type) signature_signed = self._Base64Sign(signature_string) query_params = { 'GoogleAccessId': app_identity.get_service_account_name(), 'Expires': str(self.expiration), 'Signature': signature_signed } return base_url, query_params
def SendEmail(context, recipients): """Send alert/daily summary email.""" emailbody = EMAIL_TEMPLATE.render(context) if not recipients: logging.info('no recipients for email, using configured default: ' + config.default_to_email) recipients = [config.default_to_email] mail.send_mail(sender=app_identity.get_service_account_name(), subject='Billing Summary For ' + context['project'], body=emailbody, html=emailbody, to=recipients) logging.info('sending email to ' + ','.join(recipients) + emailbody)
def get(self): schema_ver = int(self.request.get('schema')) since = self.request.get('since') since = "0001-01-01 00:00:00" if since is None or since == "" else since since = datetime.datetime.strptime(since, "%Y-%m-%d %H:%M:%S") dbversion = models.DBUpdate.query(models.DBUpdate.schema_version == schema_ver, models.DBUpdate.source_time <= since).order(-models.DBUpdate.source_time).get() expiry = int(time.time()) + 30 obj = dbversion.delta_gs_object_name obj = obj[3:] if obj[:4] == "/gs/" else obj string_to_sign = "GET\n\n\n%d\n%s" % (expiry, obj) signature = app_identity.sign_blob(str(string_to_sign))[1] query_params = {'GoogleAccessId': app_identity.get_service_account_name(), 'Expires': str(expiry), 'Signature': base64.b64encode(signature)} url = '%s%s?%s' % (api_url, obj, urllib.urlencode(query_params)) return self.redirect(str(url))
def create_custom_token(uid, claims, mobile=False): """Create a secure token for the given ids. This method is used to create secure custom JWT tokens to be passed to clients. It takes a unique id (uid) and a session id (sid) that will be used by Firebase's security rules to prevent unauthorized access. Args: uid (str): a unique id (between 1-36 characters long) claims (dict): Additional claims mobile (bool): if the mobile service account should be used instead of default service account """ if mobile: credentials = json.loads(get_server_settings().mobileFirebaseCredentials) client_email = credentials['client_email'] else: # use the app_identity service from google.appengine.api to get the # project's service account email automatically client_email = app_identity.get_service_account_name() now = int(time.time()) payload = { 'iss': client_email, 'sub': client_email, 'aud': _IDENTITY_ENDPOINT, 'uid': uid, 'iat': now, 'exp': now + 3600, 'claims': claims } if mobile: return jwt.encode(payload, credentials['private_key'], algorithm=Algorithms.RS256) else: if DEBUG: from google.appengine.api.app_identity.app_identity_stub import APP_SERVICE_ACCOUNT_NAME if client_email == APP_SERVICE_ACCOUNT_NAME: raise Exception('Cannot create firebase token with default development service account.' ' Set the GOOGLE_APPLICATION_CREDENTIALS environment variable with as value the path ' 'to a json file containing the credentials for a service account.' ' See https://developers.google.com/identity/protocols/application-default-credentials') # encode the required claims # per https://firebase.google.com/docs/auth/server/create-custom-tokens # uid and sid will be used as channel ids, sid is added to *claims* header = b64encode(json.dumps({'typ': 'JWT', 'alg': 'RS256'})) encoded_payload = b64encode(json.dumps(payload)) to_sign = '%s.%s' % (header, encoded_payload) return '{}.{}'.format(to_sign, b64encode(app_identity.sign_blob(to_sign)[1]))
def create_short_url(long_url): scope = "https://www.googleapis.com/auth/urlshortener" authorization_token, _ = app_identity.get_access_token(scope) logging.info("Using token %s to represent identity %s", authorization_token, app_identity.get_service_account_name()) payload = json.dumps({"longUrl": long_url}) response = urlfetch.fetch( "https://www.googleapis.com/urlshortener/v1/url?pp=1", method=urlfetch.POST, payload=payload, headers = {"Content-Type": "application/json", "Authorization": "OAuth " + authorization_token}) if response.status_code == 200: result = json.loads(response.content) return result["id"] raise Exception("Call failed. Status code %s. Body %s", response.status_code, response.content)
def sign_url(self, object_name, url_lifetime): """ Generates Cloud Storage signed URL to download Google Cloud Storage object without sign in. See: https://cloud.google.com/storage/docs/access-control/signed-urls This only works on a real App Engine app, not in a dev app server. Args: object_name (str): The name of the object which is signed. url_lifetime (datetime.timedelta): Lifetime of the signed URL. The server rejects any requests received after this time from now. """ if utils.is_dev_app_server(): # Not working on a dev app server because it doesn't support # app_identity.sign_blob(). An alternative implementation would # be needed to make it work on a dev app server. raise Exception( 'sign_url only works on a real App Engine app, not on a dev ' 'app server.') method = 'GET' expiration_time = utils.get_utcnow() + url_lifetime expiration_sec = int(time.mktime(expiration_time.timetuple())) path = '/%s/%s' % (self.bucket_name, object_name) # These are unused in our use case. content_md5 = '' content_type = '' signed_text = '\n'.join([ method, content_md5, content_type, str(expiration_sec), path, ]) (_, signature) = app_identity.sign_blob(signed_text.encode('utf-8')) query_params = { 'GoogleAccessId': app_identity.get_service_account_name(), 'Expires': str(expiration_sec), 'Signature': base64.b64encode(signature), } return 'https://storage.googleapis.com%s?%s' % ( path, urllib.urlencode(query_params))
def sign_url(self, object_name, url_lifetime): """ Generates Cloud Storage signed URL to download Google Cloud Storage object without sign in. See: https://cloud.google.com/storage/docs/access-control/signed-urls This only works on a real App Engine app, not in a dev app server. Args: object_name (str): The name of the object which is signed. url_lifetime (datetime.timedelta): Lifetime of the signed URL. The server rejects any requests received after this time from now. """ if utils.is_dev_app_server(): # Not working on a dev app server because it doesn't support # app_identity.sign_blob(). An alternative implementation would # be needed to make it work on a dev app server. raise Exception( 'sign_url only works on a real App Engine app, not on a dev ' 'app server.') method = 'GET' expiration_time = utils.get_utcnow() + url_lifetime expiration_sec = int(time.mktime(expiration_time.timetuple())) path = '/%s/%s' % (self.bucket_name, object_name) # These are unused in our use case. content_md5 = '' content_type = '' signed_text = '\n'.join([ method, content_md5, content_type, str(expiration_sec), path, ]) (_, signature) = app_identity.sign_blob(signed_text.encode('utf-8')) query_params = { 'GoogleAccessId': app_identity.get_service_account_name(), 'Expires': str(expiration_sec), 'Signature': base64.b64encode(signature), } return 'https://storage.googleapis.com%s?%s' % (path, urllib.urlencode(query_params))
def _load_credentials(self, credentials_file_path): if credentials_file_path == GCE_CREDENTIALS: return gce.AppAssertionCredentials(self._SCOPES) if credentials_file_path == APPENGINE_CREDENTIALS: # pragma: no cover # This import doesn't work outside appengine, so delay it until it's used. from oauth2client import appengine from google.appengine.api import app_identity logging.info('Initializing with service account %s', app_identity.get_service_account_name()) return appengine.AppAssertionCredentials(self._SCOPES) with open(credentials_file_path, 'r') as credentials_file: credentials_json = json.load(credentials_file) if credentials_json.get('type', None): credentials = GoogleCredentials.from_stream(credentials_file_path) credentials = credentials.create_scoped(self._SCOPES) return credentials return Storage(credentials_file_path).get()
def _load_credentials(self, credentials_file_path): if credentials_file_path == GCE_CREDENTIALS: return gce.AppAssertionCredentials(self._SCOPES) if credentials_file_path == APPENGINE_CREDENTIALS: # pragma: no cover # This import doesn't work outside appengine, so delay it until it's used. from oauth2client import appengine from google.appengine.api import app_identity logging.info("Initializing with service account %s", app_identity.get_service_account_name()) return appengine.AppAssertionCredentials(self._SCOPES) with open(credentials_file_path, "r") as credentials_file: credentials_json = json.load(credentials_file) if credentials_json.get("type", None): credentials = GoogleCredentials.from_stream(credentials_file_path) credentials = credentials.create_scoped(self._SCOPES) return credentials return Storage(credentials_file_path).get()
def create_short_url(long_url): scope = 'https://www.googleapis.com/auth/urlshortener' authorization_token, _ = app_identity.get_access_token(scope) logging.debug(authorization_token) logging.info('Using token %s to represent identity %s', authorization_token, app_identity.get_service_account_name()) payload = json.dumps({'longUrl': long_url}) response = urlfetch.fetch( 'https://www.googleapis.com/urlshortener/v1/url?pp=1', method=urlfetch.POST, payload=payload, headers = { 'Content-Type': 'application/json', 'Authorization': 'OAuth '+authorization_token } ) if response.status_code == 200: result = json.loads(response.content) return result['id'] raise Exception("Google URL Shortener call failed. Status code %s. Body %s", response.status_code, response.content)
def get(self): auth_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/cloud-platform') logging.info('Using token {} to represent identity {}'.format( auth_token, app_identity.get_service_account_name())) response = urlfetch.fetch( 'https://www.googleapis.com/storage/v1/b?project={}'.format( app_identity.get_application_id()), method=urlfetch.GET, headers={'Authorization': 'Bearer {}'.format(auth_token)}) if response.status_code != 200: raise Exception('Call failed. Status code {}. Body {}'.format( response.status_code, response.content)) result = json.loads(response.content) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(result, indent=2))
def _mint_delegation_token_async(): """Generates an access token to impersonate the current user, if any. Memcaches the token. """ account = models.Account.current_user_account if account is None: raise ndb.Return(None) ctx = ndb.get_context() # Get from cache. cache_key = IMPERSONATION_TOKEN_CACHE_KEY_FORMAT % account.email token = yield ctx.memcache_get(cache_key) if token: raise ndb.Return(token) # Request a new one. logging.debug('Minting a delegation token for %s', account.email) req = { 'audience': ['user:%s' % app_identity.get_service_account_name()], 'services': ['service:%s' % BUILDBUCKET_APP_ID], 'impersonate': 'user:%s' % account.email, } resp = yield net.json_request_async( IMPERSONATION_TOKEN_MINT_URL, method='POST', payload=req, scopes=net.EMAIL_SCOPE) token = resp.get('delegation_token') if not token: raise BuildBucketError( 'Could not mint a delegation token. Response: %s' % resp) # Put to cache. validity_duration_sec = resp.get('validity_duration') assert isinstance(validity_duration_sec, int) if validity_duration_sec >= 10: validity_duration_sec -= 10 # Refresh the token 10 sec in advance. yield ctx.memcache_add(cache_key, token, time=validity_duration_sec) raise ndb.Return(token)
def auth_check(): credentials, project = google.auth.default() key_name, signature = app_identity.sign_blob(b'abc') scope = 'https://www.googleapis.com/auth/userinfo.email' token, expiry = app_identity.get_access_token(scope) return code_block( '>>> import google.auth', '>>> credentials, project = google.auth.default()', '>>> credentials', repr(credentials), '>>> project', repr(project), '>>> credentials.__dict__', repr(credentials.__dict__), '>>> from google.appengine.api import app_identity', '>>> app_identity', repr(app_identity), # ALSO: get_access_token_uncached # (scopes, service_account_id=None) '>>> scope = \'https://www.googleapis.com/auth/userinfo.email\'', '>>> token, expiry = app_identity.get_access_token(scope)', '>>> token', repr(token[:6] + b'...'), '>>> expiry', repr(expiry), '>>> app_identity.get_application_id()', repr(app_identity.get_application_id()), '>>> app_identity.get_default_gcs_bucket_name()', repr(app_identity.get_default_gcs_bucket_name()), '>>> app_identity.get_default_version_hostname()', repr(app_identity.get_default_version_hostname()), '>>> app_identity.get_public_certificates()', repr(app_identity.get_public_certificates()), '>>> app_identity.get_service_account_name()', repr(app_identity.get_service_account_name()), '>>> key_name, signature = app_identity.sign_blob(b\'abc\')', '>>> key_name', repr(key_name), '>>> signature', repr(signature[:16] + b'...'), )
def GetTemplateVariables(self): """Return the values that can be used as jinja variables in templates.""" today = datetime.date.today() options = copy.deepcopy(self.config.get('options', {})) storage = options.get('storage', {}) UpdateNestedDict(storage, {'bucket': '', 'prefix': ''}) storage['url'] = 'gs://%s/%s' % (storage['bucket'], storage['prefix']) UpdateNestedDict(options, { 'app': { 'id': app_identity.get_application_id(), 'hostname': app_identity.get_default_version_hostname(), 'serviceAccountName': app_identity.get_service_account_name(), }, 'storage': storage, 'date': { 'y-m-d': today.strftime('%Y-%m-%d'), 'ymd': today.strftime('%Y%m%d'), }, }) return options
def create_custom_token(uid, sid, valid_minutes=60): """Create a secure token for the given ids. This method is used to create secure custom JWT tokens to be passed to clients. It takes a unique id (uid) and a session id (sid) that will be used by Firebase's security rules to prevent unauthorized access. Args: uid (str): a unique id (between 1-36 characters long) """ # use the app_identity service from google.appengine.api to get the # project's service account email automatically client_email = app_identity.get_service_account_name() if DEBUG: from google.appengine.api.app_identity.app_identity_stub import APP_SERVICE_ACCOUNT_NAME if client_email == APP_SERVICE_ACCOUNT_NAME: raise Exception('Cannot create firebase token with default development service account.' ' Set the GOOGLE_APPLICATION_CREDENTIALS environment variable with as value the path to a ' 'json file containing the credentials for a service account.' ' See https://developers.google.com/identity/protocols/application-default-credentials') now = int(time.time()) # encode the required claims # per https://firebase.google.com/docs/auth/server/create-custom-tokens # uid and sid will be used as channel ids, sid is added to *claims* payload = base64.b64encode(json.dumps({ 'iss': client_email, 'sub': client_email, 'aud': _IDENTITY_ENDPOINT, 'uid': uid, 'iat': now, 'exp': now + (valid_minutes * 60), 'claims': { 'sid': sid } })) # add standard header to identify this as a JWT header = base64.b64encode(json.dumps({'typ': 'JWT', 'alg': 'RS256'})) to_sign = '{}.{}'.format(header, payload) # Sign the jwt using the built in app_identity service return '{}.{}'.format(to_sign, base64.b64encode(app_identity.sign_blob(to_sign)[1]))
def upload_to_gs(replication_state, auth_db_blob, key_name, sig): """Updates Google Storage files to contain the latest AuthDB. Args: replication_state: AuthReplicationState that correspond to auth_db_blob. auth_db_blob: serialized ReplicationPushRequest message (has AuthDB inside). key_name: name of the signing key. sig: a binary blob with RS256(SHA512(auth_db_blob)) signature. Raises: net.Error if Google Storage writes fail. """ signed = replication_pb2.SignedAuthDB( auth_db_blob=auth_db_blob, signer_id=app_identity.get_service_account_name(), signing_key_id=key_name, signature=sig) rev = replication_pb2.AuthDBRevision( primary_id=app_identity.get_application_id(), auth_db_rev=replication_state.auth_db_rev, modified_ts=utils.datetime_to_timestamp(replication_state.modified_ts)) gcs.upload_auth_db(signed.SerializeToString(), json_format.MessageToJson(rev))
def sign_gcs_url(gcs_filename, expires_after_seconds=6): """ cloudstorage signed url to download cloudstorage object without login Docs : https://cloud.google.com/storage/docs/access-control?hl=bg#Signed-URLs API : https://cloud.google.com/storage/docs/reference-methods?hl=bg#getobject """ GCS_API_ACCESS_ENDPOINT = 'https://storage.googleapis.com' google_access_id = app_identity.get_service_account_name() method = 'GET' # TODO: decide whether to support content_md5 and content_type as params content_md5, content_type = None, None # expiration : number of seconds since epoch expiration_dt = datetime.utcnow() + timedelta( seconds=expires_after_seconds) expiration = int(time.mktime(expiration_dt.timetuple())) # Generate the string to sign. signature_string = '\n'.join([ method, content_md5 or '', content_type or '', str(expiration), gcs_filename ]) signature_bytes = app_identity.sign_blob(str(signature_string))[1] # Set the right query parameters. we use a gae service account for the id query_params = { 'GoogleAccessId': google_access_id, 'Expires': str(expiration), 'Signature': base64.b64encode(signature_bytes) } # Return the built URL. result = '{endpoint}{resource}?{querystring}'.format( endpoint=GCS_API_ACCESS_ENDPOINT, resource=gcs_filename, querystring=urllib.urlencode(query_params)) return str(result)
def GetTemplateVariables(self): """Return the values that can be used as jinja variables in templates.""" today = datetime.date.today() options = copy.deepcopy(self.config.get('options', {})) storage = options.get('storage', {}) UpdateNestedDict(storage, {'bucket': '', 'prefix': ''}) storage['url'] = 'gs://%s/%s' % (storage['bucket'], storage['prefix']) UpdateNestedDict( options, { 'app': { 'id': app_identity.get_application_id(), 'hostname': app_identity.get_default_version_hostname(), 'serviceAccountName': app_identity.get_service_account_name(), }, 'storage': storage, 'date': { 'y-m-d': today.strftime('%Y-%m-%d'), 'ymd': today.strftime('%Y%m%d'), }, }) return options
def sign_gcs_url(gcs_filename, expires_after_seconds=6): """ cloudstorage signed url to download cloudstorage object without login Docs : https://cloud.google.com/storage/docs/access-control?hl=bg#Signed-URLs API : https://cloud.google.com/storage/docs/reference-methods?hl=bg#getobject """ GCS_API_ACCESS_ENDPOINT = 'https://storage.googleapis.com' google_access_id = app_identity.get_service_account_name() method = 'GET' content_md5, content_type = None, None # expiration : number of seconds since epoch expiration_dt = datetime.utcnow() + timedelta( seconds=expires_after_seconds) expiration = int(time.mktime(expiration_dt.timetuple())) # Generate the string to sign. signature_string = '\n'.join([ method, content_md5 or '', content_type or '', str(expiration), gcs_filename]) signature_bytes = app_identity.sign_blob(str(signature_string))[1] # Set the right query parameters. we use a gae service account for the id query_params = {'GoogleAccessId': google_access_id, 'Expires': str(expiration), 'Signature': base64.b64encode(signature_bytes)} # Return the built URL. result = '{endpoint}{resource}?{querystring}'.format( endpoint=GCS_API_ACCESS_ENDPOINT, resource=gcs_filename, querystring=urllib.urlencode(query_params)) return str(result)
def get(self): auth_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/cloud-platform') logging.info( 'Using token {} to represent identity {}'.format( auth_token, app_identity.get_service_account_name())) response = urlfetch.fetch( 'https://www.googleapis.com/storage/v1/b?project={}'.format( app_identity.get_application_id()), method=urlfetch.GET, headers={ 'Authorization': 'Bearer {}'.format(auth_token) } ) if response.status_code != 200: raise Exception( 'Call failed. Status code {}. Body {}'.format( response.status_code, response.content)) result = json.loads(response.content) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(result, indent=2))
def test_get_service_account_name(): assert app_identity.get_service_account_name()
def test_app_identity_api_returns_service_account_name(self): hostname = app_identity.get_service_account_name() self.assertEqual(hostname, 'test@localhost')
from oauth2client.client import GoogleCredentials _FIREBASE_DB_URL = 'https://netskrafl.firebaseio.com' _IDENTITY_ENDPOINT = ( 'https://identitytoolkit.googleapis.com/' 'google.identity.identitytoolkit.v1.IdentityToolkit' ) _FIREBASE_SCOPES = [ 'https://www.googleapis.com/auth/firebase.database', 'https://www.googleapis.com/auth/userinfo.email' ] _TIMEOUT = 15 # Seconds # Use the app_identity service from google.appengine.api to get the # project's service account email automatically _CLIENT_EMAIL = app_identity.get_service_account_name(deadline = _TIMEOUT) _HEADERS = { "Connection": "keep-alive" } # Initialize thread-local storage _tls = threading.local() def _get_http(): """ Provides an authorized HTTP object, one per thread """ if not hasattr(_tls, "_HTTP"): http = httplib2.Http(timeout = _TIMEOUT) # Use application default credentials to make the Firebase calls # https://firebase.google.com/docs/reference/rest/database/user-auth
import json import logging import webapp2 COMPUTE_SCOPE = 'https://www.googleapis.com/auth/compute' STORAGE_SCOPE = 'https://www.googleapis.com/auth/devstorage.full_control' PROJECT = 'sauer-cloud' ZONE = 'us-central1-a' INSTANCE = 'mc' DISK = 'mc' NETWORK = 'mc' DEFAULT_MACHINE_TYPE='g1-small' STARTUP_SCRIPT_URL='gs://sauer-cloud/mc-startup-script.sh' SERVICE_ACCOUNT_EMAIL = app_identity.get_service_account_name() API_V1_URL = 'https://www.googleapis.com/compute/v1' PROJECT_URL = API_V1_URL + '/projects/' + PROJECT PROJECT_ZONE_URL = PROJECT_URL + '/zones/' + ZONE PROJECT_GLOBAL_URL = PROJECT_URL + '/global' DISK_URL = PROJECT_ZONE_URL + '/disks/' + DISK INSTANCES_URL = PROJECT_ZONE_URL + '/instances' NETWORK_URL = PROJECT_GLOBAL_URL + '/networks/' + NETWORK class PingHandler(webapp2.RequestHandler): def get(self): self.response.headers['Content-Type'] = 'text/plain' self.response.write('OK.')
def get(self): acct_name = app_identity.get_service_account_name() self.response.write('accountname: ' + acct_name)
def service_account_email(self): """The service account email.""" if self._service_account_id is None: self._service_account_id = app_identity.get_service_account_name() return self._service_account_id
def _is_local_service_account(): service_account_name = app_identity.get_service_account_name() return service_account_name == '' or service_account_name.endswith( '@localhost')
def get_appengine_default_system_email_address(): return "%s <noreply@%s.appspotmail.com>" % ( app_identity.get_service_account_name(), app_identity.get_application_id() )