def test_ok(self): """Lightweight end-to-end flow test of backup_datastore.""" with sleuth.switch( 'djangae.contrib.backup.tasks._get_authentication_credentials', lambda: app_engine.Credentials(scopes=AUTH_SCOPES) ): with sleuth.switch( 'googleapiclient.http.HttpRequest.execute', lambda x: True ) as mock_fn: kinds = ['backup_mockuser'] backup_datastore(kinds=kinds) self.assertTrue(mock_fn.called)
def test_default_state(self, app_identity): credentials = app_engine.Credentials() # Not token acquired yet assert not credentials.valid # Expiration hasn't been set yet assert not credentials.expired # Scopes are required assert not credentials.scopes assert not credentials.default_scopes assert credentials.requires_scopes assert not credentials.quota_project_id
def test_with_default_scopes(self, app_identity): credentials = app_engine.Credentials() assert not credentials.scopes assert not credentials.default_scopes assert credentials.requires_scopes scoped_credentials = credentials.with_scopes(scopes=None, default_scopes=["email"]) assert scoped_credentials.has_scopes(["email"]) assert not scoped_credentials.requires_scopes
def test_sign_bytes(self, app_identity): app_identity.sign_blob.return_value = ( mock.sentinel.key_id, mock.sentinel.signature, ) credentials = app_engine.Credentials() to_sign = b"123" signature = credentials.sign_bytes(to_sign) assert signature == mock.sentinel.signature app_identity.sign_blob.assert_called_with(to_sign)
def _get_authentication_credentials(): """ Returns authentication credentials depending on environment. See https://developers.google.com/api-client-library/python/auth/service-accounts """ if is_production_environment(): credentials = app_engine.Credentials(scopes=AUTH_SCOPES) else: service_account_path = os.environ['GOOGLE_APPLICATION_CREDENTIALS'] credentials = service_account.Credentials.from_service_account_file( service_account_path, scopes=AUTH_SCOPES) return credentials
def test_refresh_with_default_scopes(self, utcnow, app_identity): token = "token" ttl = 643942923 app_identity.get_access_token.return_value = token, ttl credentials = app_engine.Credentials(default_scopes=["email"]) credentials.refresh(None) app_identity.get_access_token.assert_called_with( credentials.default_scopes, credentials._service_account_id) assert credentials.token == token assert credentials.expiry == datetime.datetime(1990, 5, 29, 1, 2, 3) assert credentials.valid assert not credentials.expired
def get_authenticated_services(): # Check runtime environment and use the appropriate mechanism for retrieving credentials if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'): # Production credentials = app_engine.Credentials(scopes=YOUTUBE_SCOPES) else: # Local development server credentials = service_account.Credentials.from_service_account_file( SERVICE_ACCOUNT_FILE, scopes=YOUTUBE_SCOPES) youtube = googleapiclient.discovery.build("youtube", "v3", credentials=credentials) youtube_partner = googleapiclient.discovery.build("youtubePartner", "v1", credentials=credentials) return (youtube, youtube_partner)
def _get_gae_credentials(): """Gets Google App Engine App Identity credentials and project ID.""" # While this library is normally bundled with app_engine, there are # some cases where it's not available, so we tolerate ImportError. try: import google.auth.app_engine as app_engine except ImportError: return None, None try: credentials = app_engine.Credentials() project_id = app_engine.get_project_id() return credentials, project_id except EnvironmentError: return None, None
def test_refresh(self, now_mock, app_identity_mock): token = 'token' ttl = 100 app_identity_mock.get_access_token.return_value = token, ttl credentials = app_engine.Credentials(scopes=['email']) credentials.refresh(None) app_identity_mock.get_access_token.assert_called_with( credentials.scopes, credentials._service_account_id) assert credentials.token == token assert credentials.expiry == (datetime.datetime.min + datetime.timedelta(seconds=ttl)) assert credentials.valid assert not credentials.expired
def test_refresh(self, utcnow, app_identity): token = 'token' ttl = _helpers.CLOCK_SKEW_SECS + 100 app_identity.get_access_token.return_value = token, ttl credentials = app_engine.Credentials(scopes=['email']) credentials.refresh(None) app_identity.get_access_token.assert_called_with( credentials.scopes, credentials._service_account_id) assert credentials.token == token assert credentials.expiry == (utcnow() + datetime.timedelta(seconds=ttl)) assert credentials.valid assert not credentials.expired
def test_credentials(): credentials = app_engine.Credentials() scoped_credentials = credentials.with_scopes([EMAIL_SCOPE]) scoped_credentials.refresh(None) assert scoped_credentials.valid assert scoped_credentials.token is not None # Get token info and verify scope url = _helpers.update_query(TOKEN_INFO_URL, {"access_token": scoped_credentials.token}) response = HTTP_REQUEST(url=url, method="GET") token_info = json.loads(response.data.decode("utf-8")) assert token_info["scope"] == EMAIL_SCOPE
def explicit_app_engine(project): from google.auth import app_engine import googleapiclient.discovery # Explicitly use App Engine credentials. These credentials are # only available when running on App Engine Standard. credentials = app_engine.Credentials() # Explicitly pass the credentials to the client library. storage_client = googleapiclient.discovery.build('storage', 'v1', credentials=credentials) # Make an authenticated API request buckets = storage_client.buckets().list(project=project).execute() print(buckets)
def auth_sheets_api(): """ authentification in sheets api :return: service """ # Check environment if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'): # Production credentials = app_engine.Credentials(scopes=SCOPES) else: # Local development server credentials = service_account.Credentials.from_service_account_file( SERVICE_ACCOUNT_PATH_JSON) # Call the Sheets API service = build('sheets', 'v4', credentials=credentials) return service
def __init__(self, credentials=None): """Builds a connector to interact with Google Cloud tools. :type credentials: `google.auth.credentials.Credentials` or str :param credentials: certificates to connect to GCP, can be either a Credentials class or a path to the json key file. :raises: TypeError if credentials is not of type google.auth.credentials """ if (credentials is not None and not isinstance( credentials, google.auth.credentials.Credentials)): raise TypeError("credentials must be of type " "google.auth.credentials") # if no ``credentials`` is sent then assume we are running this # code in AppEngine environment self._credentials = (app_engine.Credentials() if not credentials else credentials)
def _get_gae_credentials(): """Gets Google App Engine App Identity credentials and project ID.""" # While this library is normally bundled with app_engine, there are # some cases where it's not available, so we tolerate ImportError. try: _LOGGER.debug("Checking for App Engine runtime as part of auth process...") import google.auth.app_engine as app_engine except ImportError: _LOGGER.warning("Import of App Engine auth library failed.") return None, None try: credentials = app_engine.Credentials() project_id = app_engine.get_project_id() return credentials, project_id except EnvironmentError: _LOGGER.debug( "No App Engine library was found so cannot authentication via App Engine Identity Credentials." ) return None, None
def create_directory_service(user_email): """Build and returns an Admin SDK Directory service object authorized with the service accounts that act on behalf of the given user. Args: user_email: The email of the user. Needs permissions to access the Admin APIs. Returns: Admin SDK directory service object. """ if config.USE_APP_ENGINE_SERVICE_ACCOUNT: credentials = app_engine.Credentials() else: credentials = ServiceAccountCredentials.from_json_keyfile_name( "credentials.json", scopes=[ 'https://www.googleapis.com/auth/admin.directory.group.member.readonly', 'https://www.googleapis.com/auth/admin.directory.group.readonly' ]) credentials = credentials.create_delegated(user_email) return build('admin', 'directory_v1', credentials=credentials)
def test_signer_email(self, app_identity): credentials = app_engine.Credentials() assert credentials.signer_email == credentials.service_account_email
def test_signer(self, app_identity): credentials = app_engine.Credentials() assert isinstance(credentials.signer, app_engine.Signer)
def test_service_account_email_explicit(self, app_identity): credentials = app_engine.Credentials( service_account_id=mock.sentinel.service_account_email) assert credentials.service_account_email == mock.sentinel.service_account_email assert not app_identity.get_service_account_name.called
import logging from google.auth import app_engine from googleapiclient import discovery, errors from pluginbase import Plugin from utils import gcp, utils SCOPES = ['https://www.googleapis.com/auth/bigtable.admin'] CREDENTIALS = app_engine.Credentials(scopes=SCOPES) class BigTable(Plugin): def __init__(self): Plugin.__init__(self) self.bigtable = discovery.build('bigtableadmin', 'v2', credentials=CREDENTIALS) self.batch = self.bigtable.new_batch_http_request( callback=self.batch_callback) def register_signals(self): """ Register with the plugin manager. """ logging.debug("BigTable class created and registering signals") def api_name(self): return "bigtableadmin.googleapis.com"
def get_oauth2_token(): credentials = app_engine.Credentials() return credentials.token
project_id = settings.GCP_PROJECT_ID bucket_name = settings.GCP_STORAGE_BUCKET_NAME service_account_json = settings.GCP_SERVICE_ACCOUNT_JSON if not settings.GCP_USE_SERVICE_ACCOUNT_JSON: try: # Try local development environment credentials, project_id = google.auth.default() except: # Try production environment try: credentials = compute_engine.Credentials() except: credentials = app_engine.Credentials() client = storage.Client(credentials=credentials, project=project_id) else: client = storage.Client.from_service_account_json(service_account_json) bucket = client.get_bucket(bucket_name) class GCSObjectStreamUpload(object): """ Example) client = storage.Client() with GCSObjectStreamUpload(client=client, bucket_name='test-bucket', blob_name='test-blob') as s: for _ in range(1024): s.write(b'x' * 1024)
import os import googleapiclient.discovery SCOPES = [ 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/spreadsheets' ] spreadsheetId = '1qbAvTxUP-0I8eiYQVkXiLJo_3bT39JlW2uX2sVECvq8' rangeName = 'A1:A2' if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'): # Production from google.auth import app_engine credentials = app_engine.Credentials(scopes=SCOPES) else: #Development from google.oauth2 import service_account SERVICE_ACCOUNT_FILE = './credentials/sheets-cred.json' credentials = service_account.Credentials.from_service_account_file( SERVICE_ACCOUNT_FILE, scopes=SCOPES) service = googleapiclient.discovery.build('sheets', 'v4', credentials=credentials) response = service.spreadsheets().values().get(spreadsheetId=spreadsheetId, range=rangeName).execute()
def test_missing_apis(self): with pytest.raises(EnvironmentError) as excinfo: app_engine.Credentials() assert excinfo.match(r"App Engine APIs are not available")
def get(self): #Google API Setup credentials = app_engine.Credentials() service = build('sheets', 'v4', credentials=credentials) spreadsheet_id = '1Mlw-vHaiMcAN7OJZpFVzw0vd1lbv05zC_4mgzhUP64Q' #Replace this #Collect list of users for next month RANGE_NAME = 'Staff List!A2:11' result = service.spreadsheets().values().get( spreadsheetId=spreadsheet_id, range=RANGE_NAME).execute() values = result.get('values', []) users = [] for row in values: users.append(str(row[0])) #Sheet Name Details today = datetime.datetime.now() month = today.month year = today.year if (month == 12): nextMonth = 1 nextYear = year + 1 else: nextMonth = month + 1 nextYear = year if (month == 1): lastMonth = 12 lastYear = year - 1 else: lastMonth = month - 1 lastYear = year nextCreation = datetime.date(nextYear, nextMonth, 1) lastCreation = datetime.date(lastYear, lastMonth, 1) nextSheetName = nextCreation.strftime("%B") + ' ' + str(nextYear) lastSheetName = lastCreation.strftime("%B") + ' ' + str(lastYear) #Generate all Mondays in the next month mondays = [] while (nextCreation.month == nextMonth): if nextCreation.weekday() == 0: mondays.append(nextCreation.day) nextCreation = nextCreation + datetime.timedelta(days=1) #Get Last month's Sheet ID to hide it spreadsheet = service.spreadsheets().get( spreadsheetId=spreadsheet_id).execute() sheets = spreadsheet.get('sheets') for sheet in sheets: properties = sheet.get('properties') if (properties.get('title') == lastSheetName): lastSheetId = properties.get('sheetId') if (properties.get('title') == 'Base Sheet'): baseSheetId = properties.get('sheetId') #Create Requests object #Two objectives for this request, hide last month's sheet and create next month's sheet #Example: Program runs in June #Hides: May. Creates: July requests = [] #Add a new Sheet requests.append({ "addSheet": { "properties": { "title": nextSheetName, "gridProperties": { "rowCount": len(mondays) * (4 + len(users)) + 8, "columnCount": 7 } } } }) #Hide last month's sheet requests.append({ "updateSheetProperties": { "properties": { "sheetId": lastSheetId, "hidden": 'true', }, "fields": 'hidden' } }) body = {'requests': requests} #Send the Request to the server response = service.spreadsheets().batchUpdate( spreadsheetId=spreadsheet_id, body=body).execute() #Get the Id of the new sheet, we need this to put all the information in later newSheetId = response['replies'][0]['addSheet']['properties'][ 'sheetId'] #Empty the Requests, we need to use it again #Objective: Populate next month's sheet with information requests = [] #Set Column Sizes requests.append([{ "updateDimensionProperties": { "range": { "sheetId": newSheetId, "dimension": "COLUMNS", "startIndex": 0, "endIndex": 1 }, "properties": { "pixelSize": 20 }, "fields": "pixelSize" } }, { "updateDimensionProperties": { "range": { "sheetId": newSheetId, "dimension": "COLUMNS", "startIndex": 2, "endIndex": 7 }, "properties": { "pixelSize": 159 }, "fields": "pixelSize" } }, { "updateDimensionProperties": { "range": { "sheetId": newSheetId, "dimension": "COLUMNS", "startIndex": 1, "endIndex": 2 }, "properties": { "pixelSize": 166 }, "fields": "pixelSize" } }]) #Add the Header for i in range(len(mondays)): requests.append({ "copyPaste": { "source": { "sheetId": baseSheetId, "startRowIndex": 0, "endRowIndex": 4, "startColumnIndex": 0, "endColumnIndex": 7 }, "destination": { "sheetId": newSheetId, "startRowIndex": i * (4 + len(users)), "endRowIndex": i * (4 + len(users)) + 4, "startColumnIndex": 0, "endColumnIndex": 7 }, "pasteType": "PASTE_NORMAL", } }) #Add Row Formatting requests.append([ { "updateDimensionProperties": { #Green Header Row "range": { "sheetId": newSheetId, "dimension": "ROWS", "startIndex": i * (4 + len(users)), "endIndex": i * (4 + len(users)) + 1 }, "properties": { "pixelSize": 8 }, "fields": "pixelSize" } }, { "updateDimensionProperties": { "range": { "sheetId": newSheetId, "dimension": "ROWS", "startIndex": i * (4 + len(users)) + 1, "endIndex": i * (4 + len(users)) + 4 + len(users) }, "properties": { "pixelSize": 30 }, "fields": "pixelSize" } }, { "updateDimensionProperties": { "range": { "sheetId": newSheetId, "dimension": "ROWS", "startIndex": 1, "endIndex": 2 }, "properties": { "pixelSize": 48 }, "fields": "pixelSize" } }, { "updateDimensionProperties": { "range": { "sheetId": newSheetId, "dimension": "ROWS", "startIndex": len(mondays) * (4 + len(users)), "endIndex": len(mondays) * (4 + len(users)) + 8, }, "properties": { "pixelSize": 30 }, "fields": "pixelSize" } } ]) #Add the User Rows for j in range(len(users)): requests.append([{ "copyPaste": { "source": { "sheetId": baseSheetId, "startRowIndex": 4, "endRowIndex": 5, "startColumnIndex": 0, "endColumnIndex": 7 }, "destination": { "sheetId": newSheetId, "startRowIndex": i * (4 + len(users)) + 4 + j, "endRowIndex": i * (4 + len(users)) + 4 + j + 1, "startColumnIndex": 0, "endColumnIndex": 7 }, "pasteType": "PASTE_NORMAL", } }]) #Addition of Comments / Legend requests.append({ "copyPaste": { "source": { "sheetId": baseSheetId, "startRowIndex": 5, "endRowIndex": 13, "startColumnIndex": 0, "endColumnIndex": 7 }, "destination": { "sheetId": newSheetId, "startRowIndex": len(mondays) * (4 + len(users)), "endRowIndex": len(mondays) * (4 + len(users)) + 8, "startColumnIndex": 0, "endColumnIndex": 7 }, "pasteType": "PASTE_NORMAL", } }) #Moving of sheet to front requests.append({ "updateSheetProperties": { "properties": { "sheetId": newSheetId, "index": 0, "gridProperties": { "hideGridlines": 'true' } }, "fields": 'index, gridProperties.hideGridlines' } }) body = {'requests': requests} response = service.spreadsheets().batchUpdate( spreadsheetId=spreadsheet_id, body=body).execute() #Filling in Date and Names requests = [] for i in xrange(len(mondays)): requests.append([{ "range": nextSheetName + "!C" + str(i * (4 + len(users)) + 1), "majorDimension": "ROWS", "values": [[ str(nextMonth) + "/" + str(mondays[i]) + "/" + str(nextYear) ]], }]) for j in xrange(len(users)): requests.append([{ "range": nextSheetName + "!A" + str(i * (4 + len(users)) + 5 + j) + ":B" + str(i * (4 + len(users)) + 5 + j), "majorDimension": "ROWS", "values": [[str(j + 1), users[j]]], }]) body = {"valueInputOption": "USER_ENTERED", 'data': requests} response = service.spreadsheets().values().batchUpdate( spreadsheetId=spreadsheet_id, body=body).execute()
def create_email(): # Request should contain: # email <str> data = request.get_json() # Return error if request is missing data if (not data or 'email' not in data): return make_response('Request must contain email address', 400) # Return error if email address is not a string if not isinstance(data['email'], str): return make_response('Email address must be a string', 400) # Remove all whitespace from email address email = re.sub(r"\s+", "", data['email'], flags=re.UNICODE) # Validate email address format pattern = re.compile(r'^(([^<>()\[\]\.,;:\s@\"]+(\.[^<>()\[\]\.,;:\s@\"]+)' r'*)|(\".+\"))@(([^<>()[\]\.,;:\s@\"]+\.)+[^<>()[\]\.,;:\s@\"]{2,})$') if not pattern.match(email): return make_response('Invalid email address', 400) # Get Google Sheets API credentials scope = ['https://www.googleapis.com/auth/spreadsheets', 'https://www.googleapis.com/auth/drive'] credentials = app_engine.Credentials(scopes=scope) # Initiate Google Sheets service service = discovery.build('sheets', 'v4', credentials=credentials) # The ID of the spreadsheet to update spreadsheet_id = os.environ['SPREADSHEET'] # The A1 notation of a range to search for data in the spreadsheet range_ = os.environ['RANGE'] values_request = service.spreadsheets().values().get( spreadsheetId=spreadsheet_id, range=range_ ) values_response = values_request.execute() values = values_response['values'] if [email] in values: return make_response('Email address already on mailing list', 409) # How the input data should be interpreted (as though a user entered it) value_input_option = 'USER_ENTERED' # How the input data should be inserted (at the end of the data range) insert_data_option = 'INSERT_ROWS' # Value to be added to spreadsheet (email address) value_range_body = { "values": [ [email] ] } append_request = service.spreadsheets().values().append( spreadsheetId=spreadsheet_id, range=range_, valueInputOption=value_input_option, insertDataOption=insert_data_option, body=value_range_body ) append_response = append_request.execute() return make_response(email, 201)
def setup_catalog_bucket(): gae_credentials = app_engine.Credentials() client = storage.Client(credentials=gae_credentials) config.CATALOG_BUCKET = client.get_bucket("earthengine-catalog")
def explicit_app_engine(project): from google.auth import app_engine import googleapiclient.discovery # Explicitly use App Engine credentials. These credentials are # only available when running on App Engine Standard. credentials = app_engine.Credentials()