def CredentialsServiceWrapper(service): if isinstance(service, dict): return CredentialsService.from_service_account_info(service) elif RE_CREDENTIALS_JSON.match(service): return CredentialsService.from_service_account_info(json.loads(service)) else: return CredentialsService.from_service_account_file(service)
def run_gbq_query(query, project_id='places-clickstream', credentials=None): """ BigQuery-specific query function Args: query (str): The query to submit to BigQuery project_id (str): The GCP project to run the query under. Required even if a dataset is accessible from multiple projects credentials(dict, str, or google.oauth2.service_account.Credentials): The credentials to use for accessing BigQuery """ try: from google.oauth2.service_account import Credentials except ModuleNotFoundError: raise ImportError('Package "google-auth" is required to use ' 'honeycomb\'s "bigquery" module.') if credentials is not None: if isinstance(credentials, dict): credentials = Credentials.from_service_account_info(credentials) elif isinstance(credentials, str): credentials = Credentials.from_service_account_info( json.loads(credentials)) if not isinstance(credentials, Credentials): raise TypeError( 'Credentials passed to "run_gbq_query" must be a JSON string, ' 'a dictionary, or google.oauth2.service_account.Credentials.') df = pd.read_gbq(query, project_id=project_id, credentials=credentials) return df
def register_holiday_to_calendar(date_list: List[datetime.date]): credentials_path = Path(__file__).parent.parent.joinpath( 'credentials.json') credentials_obj = json.load(credentials_path.open()) credentials = Credentials.from_service_account_info(credentials_obj, scopes=SCOPES) delegated_credentials = credentials.with_subject( settings.GOOGLE_EMAIL_ADDRESS) service = build('calendar', 'v3', credentials=delegated_credentials) for date in date_list: date_text = date.strftime("%Y-%m-%d") end_date = date + datetime.timedelta(days=1) end_date_text = end_date.strftime("%Y-%m-%d") body = { "summary": f"{settings.CALENDAR_SUMMARY}", "start": { "date": f"{date_text}", "timeZone": "Asia/Tokyo", }, "end": { "date": f"{end_date_text}", "timeZone": "Asia/Tokyo", }, } event = service.events().insert(calendarId='primary', body=body).execute() print('Event created: %s' % (event.get('htmlLink')))
def client(self): """ Set up Google API lazily :rtype: googleapiclient.discovery.Resource """ if not self._client: self.logger.info('Setting up Google API client') try: service_account_info = json.loads(self.credentials) except json.JSONDecodeError as ex: raise MycroftSourceError('Failed to load Google\'s service account JSON file') \ from ex # simple validation of provided JSON credentials assert 'client_email' in service_account_info,\ "'client_email' entry not found in service account JSON" self.logger.info('Using service account for %s', service_account_info.get('client_email')) self._client = build( 'analyticsreporting', 'v4', credentials=Credentials.from_service_account_info( info=service_account_info), # file_cache is unavailable when using oauth2client >= 4.0.0 or google-auth cache_discovery=False) self.logger.info( 'Connected with Google API for Analytics view #%d', self.view_id) return self._client
def __init__(self, service_file, scopes, manualScopes=[], domainWide=True, *args, **kwargs): # Load valid APIs unlocked with the scopes self._loadApiNames(scopes) # Save all scopes results self.SCOPES = list( set([x['scope'] for x in self.apis.values()] + manualScopes)) # Set domain wide delegation flag self.__domWide = domainWide # Acquire credentials from JSON keyfile if service_file is not None: if isinstance(service_file, six.string_types): self._credentials = Credentials.from_service_account_file( service_file, scopes=self.SCOPES, ) else: self._credentials = Credentials.from_service_account_info( service_file, scopes=self.SCOPES, ) self.projectId = self._credentials.project_id else: self._credentials, self.projectId = google.auth.default() self._credentials = self._credentials.with_scopes(self.SCOPES) logger.debug("Credentials acquired")
def BlobUploadThread(blobUploadQueue: queue.SimpleQueue, blobUploadQueueBytes: ThreadValueLock, credentials: str, bucketName: str, threadId: int) -> None: # Get GCS bucket credentials = Credentials.from_service_account_info( json.loads(credentials)) client = storage.Client(project=credentials.project_id, credentials=credentials) bucket = client.get_bucket(bucketName) # Process tasks until received None while True: # Get task task = blobUploadQueue.get() if task is None: break # Extract task name: str = task[0] blob: bytes = task[1] # Release available space counter blobUploadQueueBytes.release(len(blob)) # Upload blob bucket.blob(name).upload_from_string(blob)
def BlobRemoveProcess(blobRemoveQueue: multiprocessing.SimpleQueue, credentials: str, bucketName: str) -> None: # Get GCS bucket credentials = Credentials.from_service_account_info( json.loads(credentials)) client = storage.Client(project=credentials.project_id, credentials=credentials) bucket = client.get_bucket(bucketName) # Process tasks until received None while True: # Set process title setproctitle.setproctitle('BlobRemoveProcess') # Get task task = blobRemoveQueue.get() if task is None: break # Extract task name: str = task[0] # Update process title setproctitle.setproctitle('BlobRemoveProcess {}'.format(name)) # Remove blob try: bucket.delete_blob(name) except: print('Exception while deleting blob {}'.format(name))
def load_credentials(cls): credentials = (Credentials.from_service_account_info({ 'type': os.environ.get('GCE_TYPE'), 'project_id': os.environ.get('GCE_PROJECT_ID'), 'private_key_id': os.environ.get('GCE_PRIVATE_KEY_ID'), 'private_key': os.environ.get('GCE_PRIVATE_KEY').replace('\\n', '\n'), 'client_email': os.environ.get('GCE_CLIENT_EMAIL'), 'client_id': os.environ.get('GCE_CLIENT_ID'), 'auth_uri': os.environ.get('GCE_AUTH_URI'), 'token_uri': os.environ.get('GCE_TOKEN_URI'), 'auth_provider_x509_cert_url': os.environ.get('GCE_AUTH_PROVIDER_X509_CERT_URL'), 'client_x509_cert_url': os.environ.get('GCE_CLIENT_X509_CERT_URL'), })) return credentials
def obtain_pvme_spreadsheet_data(worksheet: str) -> dict: """Obtain a worksheet from the PVME-guides price spreadsheet. This function is only called once for every worksheet (function caching). :param worksheet: Worksheet to obtain (e.g. Perks/Consumables) :return: all the worksheet contents as a dictionary or None (cannot obtain the worksheet) """ try: # set the credentials credentials = ServiceAccountCredentials.from_service_account_info( { 'private_key': GS_PRIVATE_KEY, 'client_email': GS_CLIENT_EMAIL, 'token_uri': GS_TOKEN_URI }, scopes=gspread.auth.READONLY_SCOPES) # authenticate + obtain the pvme-guides spreadsheet URL gc = gspread.client.Client(auth=credentials) sh = gc.open_by_url(GS_URL) worksheet = sh.worksheet(worksheet) except ValueError as e: logger.warning(f"PVME-spreadsheet ValueError: {e}") except gspread.exceptions.GSpreadException as e: logger.warning(f"PVME-spreadsheet GSpreadException: {e}") except Exception as e: logger.warning(f"PVME-spreadsheet Exception: {e}") else: return worksheet.get_all_values()
def _load_client(self, project, credentials_secret): "Creates and returns a GCS Client instance" creds = Secret(credentials_secret).get() credentials = Credentials.from_service_account_info(creds) project = project or credentials.project_id client = storage.Client(project=project, credentials=credentials) return client
def __init__(self, bucket_name): self.bucket_name = bucket_name credentials = Credentials.from_service_account_info( json.loads(os.environ["GOOGLE_CREDENTIALS"])) self.client = storage.Client(project=PROJECT_ID, credentials=credentials) self.root_path = f"https://storage.googleapis.com/{bucket_name}/"
def tick(self): creds_dict = json.loads(GOOGLE_SHEETS_CREDS_JSON) scopes = [ 'https://www.googleapis.com/auth/spreadsheets', 'https://www.googleapis.com/auth/drive' ] creds = Credentials.from_service_account_info(creds_dict, scopes=scopes) gc = gspread.authorize(creds) worksheet = gc.open("Personal Finance").sheet1 list_of_lists = worksheet.get_all_values() columns = list_of_lists[0] rows = list_of_lists[1:] symbol_index = columns.index('symbol') last_price_index = columns.index('last price') price_cache = {} for index, row in enumerate(rows): symbol = row[symbol_index] if price_cache.get(symbol): last_price = price_cache[symbol] else: last_price = self.get_last_price(symbol) price_cache[symbol] = last_price range_name = chr(ord('A') + last_price_index) + str(2 + index) worksheet.update(range_name, last_price)
def service_account_auth(client_secrets, oauth2_scopes, delegated_email_address): """ Creates a Credentials instance from a service account json file. Args: client_secrets (str): The path to the credentials json file or credentials information in json format. oauth2_scopes (list of str): Scopes to request during the authorization grant. delegated_email_address (str): For domain-wide delegation, the email address of the user to for which to request delegated access. Returns: google.auth.service_account.Credentials: Service account credentials """ try: data = json.loads(client_secrets) # https://google-auth.readthedocs.io/en/latest/reference/google.oauth2.service_account.html return Credentials.from_service_account_info( data, scopes=oauth2_scopes, subject=delegated_email_address) except JSONDecodeError: data = client_secrets # https://google-auth.readthedocs.io/en/latest/reference/google.oauth2.service_account.html return Credentials.from_service_account_file( client_secrets, scopes=oauth2_scopes, subject=delegated_email_address)
def __init__(self): global SpeechClient, types, enums, Credentials from google.cloud.speech import SpeechClient, types, enums from google.oauth2.service_account import Credentials super(GoogleCloudStreamingSTT, self).__init__() # override language with module specific language selection self.language = self.config.get('lang') or self.lang credentials = Credentials.from_service_account_info( self.credential.get('json')) self.client = SpeechClient(credentials=credentials) recognition_config = types.RecognitionConfig( encoding=enums.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=16000, language_code=self.language, model='command_and_search', max_alternatives=1, ) self.streaming_config = types.StreamingRecognitionConfig( config=recognition_config, interim_results=True, single_utterance=True, )
def build_credentials(credentials_file, admin): """Build authenticated credentials headers.""" if not os.path.isfile(credentials_file): print(f'ERROR: {credentials_file} does not exist') sys.exit(2) with open(credentials_file, 'rb') as fpointer: credentials_data = json.load(fpointer) client_id = credentials_data.get('client_id') if not client_id: print(f'ERROR: {credentials_file} is not valid, no client_id present') sys.exit(3) headers = { 'Accept': 'application/json', 'User-Agent': f'Break19 {__version__} ' \ 'https://github.com/jay0lee/break19' } creds = Credentials.from_service_account_info(credentials_data) creds = creds.with_scopes(SCOPES) creds = creds.with_subject(admin) request = google.auth.transport.requests.Request() try: creds.refresh(request) except google.auth.exceptions.RefreshError as err: print(err) admin_url = f'https://admin.google.com/ac/owl/domainwidedelegation' \ f'?clientIdToAdd={client_id}' \ f'&clientScopeToAdd={",".join(SCOPES)}' \ f'&overwriteClientId=true' print(f'Please go to:\n\n{admin_url}\n\nto authorize access.') sys.exit(1) creds.apply(headers) return headers
def get_google_client(submodule, credentials: dict = None, project: str = None): """ Utility function for loading Google Client objects from a given set of credentials. Args: - submodule: a Python submodule with a Client attribute - credentials (dict, optional): a dictionary of Google credentials used to initialize the Client; if not provided, will attempt to load the Client using ambient environment settings - project (str, optional): the Google project to point the Client to; if not provided, Client defaults will be used Returns: - Client: an initialized and authenticated Google Client """ Client = getattr(submodule, "Client") credentials = credentials or prefect.context.get("secrets", {}).get( "GCP_CREDENTIALS" ) if credentials is not None: credentials = Credentials.from_service_account_info(credentials) project = project or credentials.project_id client = Client(project=project, credentials=credentials) else: client = Client(project=project) return client
def create_connection(self, *args, **kwargs): credentials = Credentials.from_service_account_info( self.account_info_dict) project_id = self.account_info_dict['project_id'] self.client = bigquery.Client(project=project_id, credentials=credentials) return dbapi.Connection(self.client)
def test_bigquery(self): database = 'sodalite' account_info_json_str = os.getenv('BIGQUERY_ACCOUNT_INFO_JSON') account_info_json_dict = json.loads(account_info_json_str) credentials = Credentials.from_service_account_info( account_info_json_dict) project_id = account_info_json_dict['project_id'] client = bigquery.Client(project=project_id, credentials=credentials) connection = dbapi.Connection(client) try: sql_update( connection, (f'DROP TABLE IF EXISTS `{database}`.`{self.table_name}`')) sql_update(connection, (f'CREATE TABLE `{database}`.`{self.table_name}` (\n' f' `id` STRING, \n' f' `size` INT64 );')) sql_fetchone( connection, (f'WITH `{self.cte_table_name}` as ( \n' f' SELECT "id" as `v`, "size" as `s`, LENGTH("id") as `l` \n' f' FROM `{database}`.`{self.table_name}` \n' f' WHERE `size` = 1 \n' f' ORDER BY `size` ASC ) \n' f'SELECT COUNT(DISTINCT("v")), COUNT("s") \n' f'FROM `{self.cte_table_name}`;')) finally: connection.close()
def __init__(self, credentials_list, resource_keys): """Initialise the service. :param credentials_list: A list of dicts of credentials info as provided by Google console's JSON format. :param resource_keys: A dict of file ids to resource keys, to fill out any missing resource keys. :raises ConfigurationError: If the credentials are not accepted by Google """ if credentials_list[0].get("disable"): LOG.error("Google Drive credentials have been disabled") return self._resource_keys = resource_keys try: credentials = Credentials.from_service_account_info( credentials_list[0], scopes=self.SCOPES) except ValueError as exc: raise ConfigurationError( "The Google Drive service account information is invalid" ) from exc self._http_service = HTTPService( session=AuthorizedSession(credentials, refresh_timeout=self.TIMEOUT), error_translator=translate_google_error, )
def service_account_from_dict(info, scopes=DEFAULT_SCOPES): """Authenticate using a service account (json). ``scopes`` parameter defaults to read/write scope available in ``gspread.auth.DEFAULT_SCOPES``. It's read/write for Sheets and Drive API:: DEFAULT_SCOPES =[ 'https://www.googleapis.com/auth/spreadsheets', 'https://www.googleapis.com/auth/drive' ] You can also use ``gspread.auth.READONLY_SCOPES`` for read only access. Obviously any method of ``gspread`` that updates a spreadsheet **will not work** in this case. :param info (Mapping[str, str]): The service account info in Google format :param list scopes: The scopes used to obtain authorization. :rtype: :class:`gspread.Client` """ creds = ServiceAccountCredentials.from_service_account_info( info=info, scopes=scopes, ) return Client(auth=creds)
def authorize_google_api(service_account_info, scopes): credentials = Credentials.from_service_account_info( service_account_info, scopes=scopes ) return credentials
def detect_intent_texts(session_id, text): '''Connects to DialogFlow using the environment variables 'DF_LANGUAGE_CODE' to determine the language code, 'DF_CREDENTIALS' for authentication. Credentials need to be stored as an environment variable. Before doing so, line-breaks need to be removed, and double-quotes turned into single-quotes. ''' try: language_code = os.environ['DF_LANGUAGE_CODE'] credentials = os.environ['DF_CREDENTIALS'] credentials = ast.literal_eval(credentials) project_id = credentials['project_id'] cr = Credentials.from_service_account_info(credentials) session_client = dialogflow.SessionsClient(credentials=cr) session = session_client.session_path(project_id, session_id) print('Session path: {}\n'.format(session)) text_input = dialogflow.TextInput(text=text, language_code=language_code) query_input = dialogflow.QueryInput(text=text_input) response = session_client.detect_intent(request={ 'session': session, 'query_input': query_input }) response = proto_message_to_dict(response.query_result) except Exception as e: response = 'DialogFlow error: ' + str(e) return response
def build_credentials_with_key_from(self, bucket, path): storage_credentials = self.default_service_account_credentials() storage_client = build_api_resource_from(storage_credentials, u'storage', u'v1') key_data = storage_client.objects().get_media(bucket=bucket, object=path).execute() keyfile_dict = loads(key_data) return Credentials.from_service_account_info(keyfile_dict)
def connect_to_bigquery(): GOOGLE_CREDENTIALS = os.getenv("GOOGLE_CREDENTIALS") GOOGLE_PROJECT_ID = os.getenv("GOOGLE_PROJECT_ID") credentials = Credentials.from_service_account_info( json.loads(GOOGLE_CREDENTIALS)) bqclient = bigquery.Client(credentials=credentials, project=GOOGLE_PROJECT_ID) return bqclient
def get_api(): scopes = ["https://www.googleapis.com/auth/spreadsheets"] credentials = Credentials.from_service_account_info(get_secret( "GOOGLE_CREDENTIALS", is_json=True), scopes=scopes) return gspread.authorize(credentials)
def _credentials_storage_service(): if RE_CREDENTIALS_JSON.match(UI_SERVICE): credentials = Credentials.from_service_account_info(json.loads(UI_SERVICE)) else: credentials = Credentials.from_service_account_file(UI_SERVICE) return discovery.build('storage', 'v1', credentials=credentials)
def _credentials(self): if not self.credentials_obj: if self.credentials_dict: self.credentials_obj = Credentials.from_service_account_info( self.credentials_dict) else: self.credentials_obj, _ = google.auth.default() return self.credentials_obj
def get_credentials(event): service_account_info = json.loads(event['credentials']['credential_key']) credentials = Credentials.from_service_account_info(service_account_info) credentials = credentials.with_scopes( ['https://www.googleapis.com/auth/cloud-platform']) credentials = credentials.with_subject( event['credentials']['credential_id']) return credentials
def _connect_to_cloud_storage_bucket(): CREDENTIALS = os.getenv("GOOGLE_CREDENTIALS") PROJECT_ID = os.getenv("GOOGLE_PROJECT_ID") credentials = Credentials.from_service_account_info( json.loads(CREDENTIALS)) client = storage.Client(credentials=credentials, project=PROJECT_ID) bucket = client.get_bucket(f"{PROJECT_ID}-tweets-requested") return bucket
def _connect_to_google_queue(): CREDENTIALS = os.getenv("GOOGLE_CREDENTIALS") PROJECT_ID = os.getenv("GOOGLE_PROJECT_ID") credentials = Credentials.from_service_account_info( json.loads(CREDENTIALS)) client = tasks_v2.CloudTasksClient(credentials=credentials) queue_path = client.queue_path(PROJECT_ID, "southamerica-east1", "tweet-request-queue") return client, queue_path
def dummy_service_account(): global _DUMMY_SERVICE_ACCOUNT from google.oauth2.service_account import Credentials if _DUMMY_SERVICE_ACCOUNT is None: _DUMMY_SERVICE_ACCOUNT = Credentials.from_service_account_info( _SERVICE_ACCOUNT_JSON ) return _DUMMY_SERVICE_ACCOUNT
'GCLOUD_BUCKET', ] if os.path.exists('settings.json'): with open('settings.json') as f: json_dict = json.load(f) settings = {k: v for k, v in json_dict.items() if k in key_filter} else: settings = {k: os.environ[k] for k in key_filter} return settings _settings = _load_settings() _credentials = Credentials.from_service_account_info({ 'token_uri': 'https://accounts.google.com/o/oauth2/token', 'client_email': _settings['GCLOUD_CLIENT_EMAIL'], 'private_key': _settings['GCLOUD_PRIVATE_KEY'], }) _gcs_client = gcs.Client( project=_settings['GCLOUD_PROJECT_ID'], credentials=_credentials) _loop = asyncio.get_event_loop() _max_workers = 1 _loop.set_default_executor(ProcessPoolExecutor(max_workers=_max_workers)) _manager = multiprocessing.Manager() _jobs_list = _manager.list() _app = web.Application(logger=_logger) _app.router.add_route('POST', '/jobs', _add_job) web.run_app(_app, port=10000)