def from_api_repr(cls, resource): """Factory: construct a table given its API representation :type resource: dict :param resource: table resource representation returned from the API :type dataset: :class:`google.cloud.bigquery.dataset.Dataset` :param dataset: The dataset containing the table. :rtype: :class:`google.cloud.bigquery.table.Table` :returns: Table parsed from ``resource``. """ from google.cloud.bigquery import dataset if ('tableReference' not in resource or 'tableId' not in resource['tableReference']): raise KeyError('Resource lacks required identity information:' '["tableReference"]["tableId"]') project_id = resource['tableReference']['projectId'] table_id = resource['tableReference']['tableId'] dataset_id = resource['tableReference']['datasetId'] dataset_ref = dataset.DatasetReference(project_id, dataset_id) table = cls(dataset_ref.table(table_id)) table._set_properties(resource) return table
def from_api_repr(cls, resource): """Factory: construct a table given its API representation Args: resource (Dict[str, object]): Table resource representation from the API dataset (google.cloud.bigquery.dataset.Dataset): The dataset containing the table. Returns: google.cloud.bigquery.table.Table: Table parsed from ``resource``. Raises: KeyError: If the ``resource`` lacks the key ``'tableReference'``, or if the ``dict`` stored within the key ``'tableReference'`` lacks the keys ``'tableId'``, ``'projectId'``, or ``'datasetId'``. """ from google.cloud.bigquery import dataset if ('tableReference' not in resource or 'tableId' not in resource['tableReference']): raise KeyError('Resource lacks required identity information:' '["tableReference"]["tableId"]') project_id = resource['tableReference']['projectId'] table_id = resource['tableReference']['tableId'] dataset_id = resource['tableReference']['datasetId'] dataset_ref = dataset.DatasetReference(project_id, dataset_id) table = cls(dataset_ref.table(table_id)) table._properties = resource return table
def setUp(self): super(BigQueryClientTest, self).setUp() self.dataset_reference = dataset.DatasetReference( PROJECT_ID, DATASET_ID) self.table_reference = table.TableReference(self.dataset_reference, TABLE_ID) self.dummy_query = bigquery_client.generate_query_string( DUMMY_QUERY_FILEPATH, PROJECT_ID)
def _reference_getter(table): """A :class:`~google.cloud.bigquery.table.TableReference` pointing to this table. Returns: google.cloud.bigquery.table.TableReference: pointer to this table. """ from google.cloud.bigquery import dataset dataset_ref = dataset.DatasetReference(table.project, table.dataset_id) return TableReference(dataset_ref, table.table_id)
print( 'Unable to open/find the {} schema file'.format(schema_file_name)) return table_schema # Starts here create_table("table.schema") credentials = service_account.Credentials.from_service_account_file( SERVICE_ACCOUNT_KEY_FILE_PATH) bq_client = client.Client(project=PROJECT_ID, credentials=credentials) # get the list of all datasets in the intended project dataset_list = bq_client.list_datasets(project=PROJECT_ID) # Iterate over each dataset in the project for i_dataset in dataset_list: v_dataset = bq_dataset.DatasetReference(project=PROJECT_ID, dataset_id=i_dataset.dataset_id) print("Processing Dataset : {}".format(i_dataset.dataset_id)) table_list = bq_client.list_tables(dataset=v_dataset) # if table_list.page_number > 0: print("Processing Tables metadata") # table_metadata holds dictionary of table attributes one dict per table table_metadata = [] for i_table in table_list: table = bq_client.get_table(i_table.reference) created = None expires = None # Datetime is converted to String to avoid serialization errors. if table.created: created = (table.created).strftime("%Y-%m-%d %H:%M:%S") modified = (table.modified).strftime("%Y-%m-%d %H:%M:%S") if table.expires: