def main(): # Preamble - gets some creds and a valid Google BigQuery Client object. gbq_creds_path = Path("~/.gbq-key.json").expanduser() client = Client.from_service_account_json(gbq_creds_path) # Example of df_to_table(). schema = [ SchemaField('field1', 'float64', 'REQUIRED'), SchemaField('field2', 'float64', 'REQUIRED'), SchemaField('field3', 'float64', 'REQUIRED'), SchemaField('field4', 'float64', 'REQUIRED') ] table = client.dataset('my_dataset').table('example_table_1', schema) df1 = pd.DataFrame(np.random.rand(10, 4)) gbq.df_to_table(df1, table) # Example table_to_df(). dataset = client.dataset('my_dataset') table = dataset.table('example_table_1') df2 = gbq.table_to_df(table) df2.info() # Example of query_to_df(). query = 'select field1, field2 from my_dataset.example_table_1;' df3 = gbq.query_to_df(query, client) df3.info()
def create_client(creds_file=None): creds_file = creds_file or os.environ.get('GOOGLE_APPLICATION_CREDENTIALS') if creds_file is None: raise SystemError('Credentials could not be found.') project = json.load(open(creds_file))['project_id'] return Client.from_service_account_json(creds_file, project=project)
def __init__(self, credentials_path=None, location=None): self.credentials_path = credentials_path self.location = location if self.credentials_path: self.client = Client.from_service_account_json( self.credentials_path, location=self.location) else: self.client = Client(location=self.location)
def __init__(self): # check for bq creds json file in environment vars first, # or then look in local directory (for deployed cloud functions) try: self.bq_cred_path = os.environ["AUGUR_BLACK_CREDS"] except: self.bq_cred_path = "./secrets/db_creds.json" self.bq_client = Client.from_service_account_json(self.bq_cred_path)
def create_client(creds_file: Optional[str] = None) -> Client: creds_file = creds_file or os.environ.get('GOOGLE_APPLICATION_CREDENTIALS') if creds_file is None: raise SystemError('Credentials could not be found.') with open(creds_file, encoding="UTF-8") as file: creds = json.load(file) project = creds['project_id'] return Client.from_service_account_json(creds_file, project=project)
def __init__(self, client=None, credentials_file=None): if client is not None: super(BigQueryDatabaseClient, self).__init__(client) else: if credentials_file is not None: bigquery_client = Client.from_service_account_json( credentials_file) else: raise ValueError( "BigQueryDatabaseClient requires a client or a credentials_file." ) super(BigQueryDatabaseClient, self).__init__(bigquery_client)
def __init__(self, **values: Any): super().__init__(**values) project_name = json.loads( self.service_account_json.get_secret_value())['project_id'] with tempfile.NamedTemporaryFile(mode="w", suffix=".json", prefix="google_cred") as tmp: tmp.write(self.service_account_json.get_secret_value()) tmp.seek(0) self._client = Client.from_service_account_json( tmp.name, project=project_name) tmp.close()
def __init__(self, service_file, dataset, export_as="csv.gz", verbose=True, temp_path=create_temp_directory()[1]): self.bq_client = BQclient.from_service_account_json(service_file) self.gcs_client = GCSclient.from_service_account_json(service_file) self._use_legacy_sql = use_legacy_sql self._verbose = verbose self._temp_path = temp_path self._base_name = self._temp_path.replace("/tmp", "") self.temp_table = self.bq_client.dataset(dataset).table( self._base_name + "_temp_table") assert export_as in ["csv.gz", "csv", "avro"] self._export_as = export_as
def __init__(self, product_ids, service_file=None, brand_new=False, **kwargs): self.tables = { 'trades': self.TABLE_TRADES, 'quotes': self.TABLE_QUOTES, 'orderbook': self.TABLE_ORDERBOOK } self.schemas = { self.TABLE_TRADES: _SCHEMA_TRADES, self.TABLE_QUOTES: _SCHEMA_QUOTES, self.TABLE_ORDERBOOK: _SCHEMA_ORDERBOOK } if not isinstance(product_ids, list): product_ids = [product_ids] self.product_ids = [ product_id.replace("-", "_") for product_id in product_ids ] self.datasets = {} for product_id in self.product_ids: self.datasets.update({ product_id: dict( zip(('datasets', 'tables'), (None, dict(zip(self.tables.values(), [None] * 3))))) }) if service_file is not None: self.client = Client.from_service_account_json(service_file) for product_id in self.product_ids: if brand_new: self._create_brand_new_datasets(product_id=product_id) else: self._connect_to_existing_datasets(product_id=product_id) super().__init__(**kwargs)
def __init__(self, service_account_cred=BigQueryConfig.default_credential_file): """ Constructor :param service_account_cred: service account access key as a JSON file """ self.client = Client.from_service_account_json(service_account_cred)
from os import path from google.cloud.bigquery import Client _json_file_path = path.dirname(__file__) _json_file_path = path.join(_json_file_path, '..', '..', 'pontoz-secret.json') _json_file_path = path.abspath(_json_file_path) client = Client.from_service_account_json(_json_file_path)