def test_fetch_new_client_credentials_envvar_set(mocker, tmpdir): # Use a non-standard filename and file ending to ensure they work file_path = tmpdir.join('my_client_secrets_file.foo') # Credentials were built by taking an existing secrets file and manually smudging it file_path.write(r""" { "installed": { "client_id":"562803761647-1lj6fdt4rk27qde3f61slphbqcr9mieh.apps.googleusercontent.com", "project_id":"gsheets-etl", "auth_uri":"https://accounts.google.com/o/oauth2/auth", "token_uri":"https://www.googleapis.com/oauth2/v3/token", "auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs", "client_secret":"yMWIX9SijX-nUgvFGqkzoSBb", "redirect_uris":["urn:ietf:wg:oauth:2.0:oob","http://localhost"]}} """) os.environ['DATASHEETS_SECRETS_PATH'] = file_path.strpath mocker.patch.object(datasheets.Client, '__init__', return_value=None) mocker.patch.object( InstalledAppFlow, 'run_local_server', autospec=True, side_effect=lambda appflow_instance, port: appflow_instance) client = datasheets.Client() client.user_agent = "Test" flow = client._fetch_new_client_credentials() config = flow.client_config assert isinstance(flow, InstalledAppFlow) assert config[ "client_id"] == '562803761647-1lj6fdt4rk27qde3f61slphbqcr9mieh.apps.googleusercontent.com'
def test_get_service_credentials_envvar_set(mocker, tmpdir): """ Only the envvar-based version of running Client()._get_service_credentials() is tested as the non-envvar version simply uses a different path """ # Use a non-standard filename and file ending to ensure they work file_path = tmpdir.join('my_service_key_file.foo') # Credentials were built by taking an existing key and manually smudging it file_path.write(r""" { "type": "service_account", "project_id": "datasheets-etl", "private_key_id": "199689b78c435a8d2416d166dd3c8f816dbe9837", "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCJsDzi0xK3dJza\nsT/sx3Bu+3kXhhpld0BDfQnngU948JjXWlco+svezVXL9fjvaaA5eIhKhZvtAr+2\nJROykKfrER899zPcZxTrUkZQi+T8I5NpufXU68Sx0/mUXpZMx+RZtU12O+YkUqKu\n4694P98vTnY7IkGmp8roAxHo9qe6wn9l6/bNprfuhvp3NAyUK5abcQGHHGJaY7PW\nP129j/fKfXaaB/b7QhdfsbRr4VOMt5eJh2qt8baEJbPbasDfA0h1Q4IMYzPP0UHf\nf9ORsdBklPFpCO56L3rVFd4zbj2JOv2gG63gIPWgAS34FfL1G+s0nN18OrSmg9Yv\nJIjo4WQ7AgMBAAECggEAHt/Jb1VEK6x11cU1wBrij8hIhV8LmRQcDjPq/HqEuoiI\nxTVpOPO1Vpqw2iOxuakkScOfT3A0T088mjI08wEPlmfyYV0MaH4m3ZpFN75+qLkQ\n6ZyuixpbpMJzAv0q3/7sq0321MowK3HqL8vr7TfBpqCWG2Dz0zuQbG3MTqmlaIiR\ndBB0s+qYAJtjXxxRBI7/h1Lkky0DXgw47xI2gdA1COIC+URX49xiamyXcU2M67AY\n1ehdE+98X/I6TpIxm34OFHFLijxi3YAP1Ro0EpDN+xG0CJxbqRh+whdgZ30pu7fQ\nYhtmBaKpaBSALkKsM0nTX/hHtgx1MLQFy/0nGZJ49QKBgQC9YOJVUaUwTObhIimA\nJo0KmquneE/uO0TZf0Abn10aABfKFk5vkJyYqX0vFbm1VA/w7uY579gx3xnFntNL\n3b6WlT87Ffm2PqIPM9v9Lyw2hcRme0AaFalHf0pAYN1civ0Apzd/+z3eRp7JL/8M\n4kppgth1d0rmuGVKKZ1BA8WEVQKBgQC6IDvs2naERYUHRF9tKr0rGKnJSJUxz2UV\naG/Y/xCsSm0pqaAPkY8UQfZp/4iOaXBA5310pGlVG9qBnBAjJl04ivXNphwLMCSe\nInkEVV08eUASR0f0sniP1RR8VSsQSx8aIaxtLyrE0YRQsOSMXYCUK8GdGTbRRrHh\nDv9L45VWTwKBgEFyjyW/PqBvooBx1IjKRl8Lc+W9g9xqlLpY6lNgLnM87jkBl75g\ns1RsSlllrIz+DzhLX12xEbo/MpjgQoskdjbnaxldwudHGYhJetiICfaZRuhHr1jA\nyKzp1+Jwk18bhsAK3L1PsbDmrcYH0VQpjFOw91Jsk58Ih/DWauNCI2u1AoGBAInE\nJDb7uS/Mulsb1kwFngGRPtsjb01BzFEOcW5/1VBdIXDRfwkbh1pwl083v58Rwkn4\nTAbRgza102mnK62MzwF0Md6nLijGk5Ud6Q91hBxk2GnfH8e5r08zvWeOS1LDF+Fi\nvVsQBSBjk82A1881wl9qR+Q8OiFxvO5GCIemi7oXAoGAV/7SZexPjcskjb1jPrHX\nT+cbxVpnoJ7T2m9x5u/53MEKu0JafjMtn84t/2HS5cak5f256bmpFTF9KgvF0daz\nQATAgphZsjbUoMWfTC6Bsaj1kbqQC41mhdzMjz23mIYQbrpxlB11rVu+u6NmMNIB\nzgL2IXskmK19pKzia4FlKVw=\n-----END PRIVATE KEY-----\n", "client_email": "*****@*****.**", "client_id": "109046144667258184477", "auth_uri": "https://accounts.google.com/o/oauth2/auth", "token_uri": "https://accounts.google.com/o/oauth2/token", "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/datasheets-service%40datasheets-etl.iam.gserviceaccount.com" } """) os.environ['DATASHEETS_SERVICE_PATH'] = file_path.strpath mocker.patch.object(datasheets.Client, '__init__', return_value=None) client = datasheets.Client() credentials = client._get_service_credentials() assert isinstance(credentials, oauth2client.service_account.ServiceAccountCredentials) assert client.email == '*****@*****.**'
def run_pipeline(conf='conf.yml'): """ Pull inputs from google sheets, solve the allocation problem, and write the solution back to the sheet. """ workbook, num_screens, empty_screen_cost, budget = parse_conf(conf) # Pull data client = datasheets.Client(service=True) workbook = client.fetch_workbook(workbook) input_data = load_data(workbook) empty_screen = pd.DataFrame({'movie': ['empty'], 'revenue': [0], 'cost': [empty_screen_cost]}) input_data = pd.concat([input_data, empty_screen], axis=0) # Define and solve allocation problem optimizer = Optimizer(input_data, num_screens, budget) solved = optimizer.run() solution_msg = optimizer.get_solution(solved) print(solution_msg) if solved: # Write the results to google sheet. allocation = optimizer.build_allocation() tab = workbook.fetch_tab('outputs') tab.insert_data(allocation) return solution_msg
def import_googlesheets(customers): """Import into Google Sheets""" import datasheets # Create a data set to upload import pandas as pd customer_slicer = [] for customer in customers: customer_row = [] customer_row.append(customer.DisplayName) customer_row.append(customer.Balance) customer_slicer.append(customer_row) df = pd.DataFrame(customer_slicer, columns=['DisplayName', 'Balance']) try: client = datasheets.Client() workbook = client.fetch_workbook('QuickBooksSample') if workbook is None: workbook = client.create_workbook('QuickBooksSample') tab_names = workbook.fetch_tab_names() if not 'QuickBooks-Export' in tab_names.values: tab = workbook.create_tab('QuickBooks-Export') else: tab = workbook.fetch_tab('QuickBooks-Export') tab.clear_data() # Upload data to sheet tab.insert_data(df, index=False) except Exception as ex: print str(ex) return "Worked"
def test_retrieve_client_credentials_no_storage(mocker): mocker.patch.object(datasheets.Client, '__init__', return_value=None) client = datasheets.Client() client.use_storage = False mocked_fetch_new = mocker.patch.object(client, '_fetch_new_client_credentials') credentials = client._retrieve_client_credentials() assert credentials == mocked_fetch_new()
def mock_client(mocker, drive_svc, sheets_svc): mocker.patch('datasheets.Client._authenticate') mocker.patch('datasheets.Client.credentials', create=True) mocker.patch('apiclient.discovery.build', autospec=True, side_effect=[drive_svc, sheets_svc]) mocker.patch('datasheets.Client._refresh_token_if_needed') client = datasheets.Client() client.email = '*****@*****.**' return client
def test_retrieve_client_credentials_use_storage_and_envvar_set( mocker, tmpdir): # Use a non-standard filename and file ending to ensure they work file_path = tmpdir.join('my_client_credentials_file.foo') # Credentials were built by taking an existing credentials file and manually smudging it file_path.write(r"""{ "access_token": "ya29.GlycBQBd0i9bxu2F1DZ4kPhk4ahwcayAVNEzo1aLFcLVIRFevIJXCvG7WtKDT7jX3nnSTdI69nprY6W27AfEgBHlDRKOGI1VkyDgtV8OidAP5wutTduMoVd8pqrpmw", "client_id": "561903281647-9kt18bal218sblb1b4b76uj0b5vq7e0o.apps.googleusercontent.com", "client_secret": "acGIZrpwf18djbk1EUVyzpjq", "refresh_token": null, "token_expiry": "2018-04-13T05:47:47Z", "token_uri": "https://accounts.google.com/o/oauth2/token", "user_agent": "Python datasheets library", "revoke_uri": "https://accounts.google.com/o/oauth2/revoke", "id_token": { "azp": "561903281647-9kt18bal218sblb1b4b76uj0b5vq7e0o.apps.googleusercontent.com", "aud": "561903281647-9kt18bal218sblb1b4b76uj0b5vq7e0o.apps.googleusercontent.com", "sub": "101102810221472303039", "hd": "squarespace.com", "email": "*****@*****.**", "email_verified": true, "at_hash": "PZQvzsWx-wOvZGgCAQaJeQ", "exp": 1523599737, "iss": "accounts.google.com", "iat": 1523494867}, "id_token_jwt": "eyJhbGciOiJSUz19283jbj1kdk1jbN1dNTQ3ODg2ZmY4NWEzNDI4ZGY0ZjYxZGI3M2MxYzIzOTgyYTkyOGUifQ.eyJhenAsdkLS18dBpwQ28BjsNDctOWt0MGNmdXZiOGgwOWxiMWI0Yjc2dWowYjV2cTdlMG8uYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCsp1PRlbj4KNjI4MDM3NjE2NDctOWt0MGNmdXZiOGgwOWxiMWI0Yjc2dWowYjV2cTdlMG8uYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCJzdWIiOiIxMDEzNzgyMzQzODczMjMzMDMwMzkiLCJoZCI6InNxdWFyZXNwYWNlLmNvbSIsImVtYWlsIjoiem1hcmluZUBzcXVhcmVzcGFjZS5jb20iLCJlbWFpbF92ZXJpZ18GPfp1jbL1ZSwiYXRfaGFzaCI6IlBPWnN2eld4LXdPWnZnR0FDcUFKZVEiLCJleHAiOjE1MjM1OTg0NjcsImlzcyI6ImFjY291bnRzLmdvb2dsZS5jb20iLCJpYXQiOjE1MjM1OTQ4Njd9.VP-LSRBnBx87YcvWi5kV1SEZlg3AKky7o_qIBo8Q9KT7nPwihBDdE0uBk5GraKFwGIKu-Xx95AisUEJdnWnJQZZg-RXyINCVHiEzutskPL3jBKlL0EJWnre2IISJxmIqrz6yAJcQD-buWTk1J7zf4Sbhk7EzVvpI1kQJO_pSWgRCdglgFQXJ4ozdBmIQbd76WUXA8-juElea9NkRjCKW8t_dXKvbj-1okR-YOczgmYAoQOfnJ19jplGK7qrQ9sP06ALon993yhbW4Ah37wMEEX3EcHoxcjciH6Z_373ZyVyjf2ZHOZKgqkHZqzefUteEMMdG3phiNd0h6ro12DrGMw", "token_response": { "access_token": "ya29.GlycBQBd0i9bxu2F1DZ4kPpwl284J3klLNEzo1aLFcLVIRFevIJXCvG7WtKDT7jX3npwlbHJknprY6W27AfEgBHlDRKOGI1VkyDgtV8OidAP5wutTduMoVd8pqrpmw", "expires_in": 3600, "id_token": "eyJhbGciOiJSUzILPL1jk294MvI6IjNiNTQ3ODg2ZmY4NWEzNDI4ZGY0ZjYxZGI3M2MxYzIzOTgyYTkyOGUifQ.eyJhenAiOiI1NjI4MDM3NjE2NDctOWt0MGNmdXZiOGgwOWxiMWI0Yjc2dWowYjV2cTdlMG8uYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCJhdWQiOiI1NjI4MDM3NjE2NDctOWt0MGNmdXZiOkshb1KLvWI0Yjc2dWowYjV2cTdlMG8uYXBwcy5nb29nbGV1c2VyY29udGplwMN4820iLCJzdWIiOiIxMDEzNzgyMzQzODczMjMzMDMwMzkiLCJoZCI6InNxdWFyZXNwYWNlLmNvbSIsImVtYWlsk1MNl83hcmluZUBzcXVhcmVzcGFjZS5jb20iLCJlbWFpbF92ZXJpZmllZCI6dHJ1ZSwiYXRfaGFzaCI6IlBPWnN2eld4LXdPWnZnR0FDcUFKZVEiLCJleHAiOjE1MjM1OTg0NjcsImlzcyI6ImFjY291bnRzLmdvb2dsZS5jb20iLCJpYXQiOjE1MjM1OTQ4Njd9.VP-LSRBnBx87YcvWi5kV1SEZlg3AKky7o_qIBplKm28FbmsihBDdE0uBk5GraKFwGIKu-Xx95AisUEJdnWnJQZZg-RXyINCVHiEzutuBg2kKl84pmEJWnre2IISJxmIqrz6yAJcQD-buWTk1J7zf4Sbhk7EzVvpI1kQJO_pSWgRCdglgFQXJ4ozdBmIQbd76WUXA8-juElea9NkRjCKW8t_dXKvbj-1okR-YOczgmYkmpvinJ8HTSNFl7qrQ9sP06ALon993yhbW4Ah37wMEEX3EcHoxcjciH6Z_373ZyVyjf2ZHbj1kKplZqzefUteEMMdG3phiNd0h6ro12DrGMw", "token_type": "Bearer"}, "scopes": ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/userinfo.email"], "token_info_uri": "https://www.googleapis.com/oauth2/v3/tokeninfo", "invalid": false, "_class": "OAuth2Credentials", "_module": "oauth2client.client" }""") os.environ['DATASHEETS_CREDENTIALS_PATH'] = file_path.strpath mocker.patch.object(datasheets.Client, '__init__', return_value=None) mocked_fetch_new = mocker.patch.object(datasheets.Client, '_fetch_new_client_credentials', return_value='test_return', autospec=True) client = datasheets.Client() client.use_storage = True credentials = client._retrieve_client_credentials() assert mocked_fetch_new.call_count == 0 assert client.email == '*****@*****.**' assert isinstance(credentials, oauth2client.client.OAuth2Credentials)
def test_stores_credentials_when_not_found(mocker, tmpdir): credentials = base_credentials("token", refresh_token="refresh_token", client_id="client_id", client_secret="client_secret") file_path = tmpdir.join("test_stores_credentials_when_not_found.json") os.environ['DATASHEETS_CREDENTIALS_PATH'] = file_path.strpath mocker.patch.object(datasheets.Client, '__init__', return_value=None) mocker.patch.object(datasheets.Client, '_fetch_new_client_credentials', return_value=credentials, autospec=True) client = datasheets.Client() client.use_storage = True client._retrieve_client_credentials() with open(os.environ['DATASHEETS_CREDENTIALS_PATH']) as file: expected_string = '{"refresh_token": "refresh_token", "client_id": "client_id", "client_secret": "client_secret"}' assert json.loads(file.read()) == json.loads(expected_string)
def LoadGoogleSheets(self, strSheetName, strTabName): """ Note that the folder ~/.datasheets contains the Google Oath files required Returns a list of dictionary items representing the rows of the specified google sheet (strSheetName)'s tab (strTabName) """ self.lstMailingList = list() client = datasheets.Client(service=True) workbook = client.fetch_workbook(strSheetName) tab = workbook.fetch_tab(strTabName) df = tab.fetch_data() del tab del workbook del client self.lstMailingList = df.to_dict(orient='record') del df
def test_fetch_new_client_credentials_envvar_set(mocker, tmpdir): mocked_run_flow = mocker.patch('oauth2client.tools.run_flow', autospec=True) # Use a non-standard filename and file ending to ensure they work file_path = tmpdir.join('my_client_secrets_file.foo') # Credentials were built by taking an existing secrets file and manually smudging it file_path.write(r""" { "web": { "client_id":"562138271922-9kt0cvbh78fv0p24b4b76uj0b5vq7e0o.apps.googleusercontent.com", "project_id":"datasheets-etl", "auth_uri":"https://accounts.google.com/o/oauth2/auth", "token_uri":"https://accounts.google.com/o/oauth2/token", "auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs", "client_secret":"GIacz91bpsk2j1hIEUVyzpjq", "redirect_uris":["http://localhost:8080/","http://localhost:8888/"], "javascript_origins":["http://localhost:8080","http://localhost:8888"] } } """) os.environ['DATASHEETS_SECRETS_PATH'] = file_path.strpath mocker.patch.object(datasheets.Client, '__init__', return_value=None) client = datasheets.Client() client.user_agent = 'Python datasheets library' # Manually set this since we skipped __init__ store = datasheets.helpers._MockStorage() # Normally we would get credentials back, but since we mock run_flow() we just get a # mock; we will instead inspect the input to run_flow(), i.e. the `flow` argument _ = client._fetch_new_client_credentials(store) assert mocked_run_flow.call_count == 1 _, args, _ = mocked_run_flow.mock_calls[0] flow = args[0] assert isinstance(flow, oauth2client.client.OAuth2WebServerFlow) assert flow.client_id == '562138271922-9kt0cvbh78fv0p24b4b76uj0b5vq7e0o.apps.googleusercontent.com' assert flow.user_agent == 'Python datasheets library' assert flow.params.get('access_type') == 'offline'
main executable part below. As a practice I normally bound the main program in a few lines that track how long it ran (i.e. Start/End of Process) """ if __name__ == '__main__': print("hello from module %s. Python version: %s" % (sys.argv[0], sys.version)) sys.stdout.write( "--------------------------------------------------------------\n") sys.stdout.write( "Start of %s Process: %s\n\n" % (sys.argv[0], time.strftime("%H:%M:%S", time.localtime()))) strPathFileName = "/home/pi/astro-pi-flight-data.csv" # or whatever folder you've placed this code strSheetName = "RaspberryPiSenseHatReadings" # The name of the sheet you wish to create / populate goes here client = datasheets.Client( service=True ) # this line returns the client object that we'll use from here on workbook = client.fetch_workbook(strSheetName) try: tab = workbook.create_tab( "AstroPi" ) # let's create a new tab in our GoogleSheet to hold the data from our CSV file except: workbook.delete_tab( "AstroPi" ) # ... if there's already an AstroPi tab, delete it and replace it with a new one. tab = workbook.create_tab("AstroPi") df = pd.read_csv( strPathFileName ) # pandas provides a method to import data from a CSV into a dataframe in one easy step