def test_initialize_tables(self, mock_schema, _): mock_client = bigquery_client.BigQueryClient() mock_client._dataset = mock.Mock() mock_client.initialize_tables() mock_schema.assert_called() mock_client._dataset.create.assert_called() mock_client._dataset.table.called_with(constants.BIGQUERY_DEVICE_TABLE) mock_client._dataset.table.called_with(constants.BIGQUERY_SHELF_TABLE)
def bootstrap_bq_history(**kwargs): """Bootstraps BigQuery history tables for archival purposes. Args: **kwargs: keyword args including a user_email with which to run the Directory API client methods (required for BigQuery streaming). """ del kwargs # Unused, but comes by default. client = bigquery_client.BigQueryClient() client.initialize_tables()
def test_initialize_tables__dataset_exists(self, mock_schema, unused): del unused mock_client = bigquery_client.BigQueryClient() mock_client._dataset = mock.Mock() mock_client._dataset.create.side_effect = cloud.exceptions.Conflict( 'Already Exists: Dataset Loaner') mock_client.initialize_tables() mock_schema.assert_called() mock_client._dataset.create.assert_called()
def stream(self): """Streams the row to BigQuery.""" logging.info('Streaming row to table %s', self.model_type) bq_client = bigquery_client.BigQueryClient() try: bq_client.stream_row(self.model_type, self._to_bq_format()) except bigquery_client.InsertError: logging.error('Unable to stream row, see logs.') return else: self._set_streamed()
def setUp(self): super(BigQueryClientTest, self).setUp() bq_patcher = mock.patch('__main__.bigquery_client.bigquery.Client') self.addCleanup(bq_patcher.stop) self.bq_mock = bq_patcher.start() self.dataset = mock.Mock() self.table = mock.Mock() self.table.schema = [] self.table.exists.return_value = True self.table.insert_data.return_value = None self.dataset.table.return_value = self.table self.client = bigquery_client.BigQueryClient() self.client._dataset = self.dataset