def test_fetchall_w_bqstorage_client_fetch_error_no_fallback(self): from google.cloud.bigquery import dbapi from google.cloud.bigquery import table row_data = [table.Row([1.1, 1.2], {"foo": 0, "bar": 1})] def fake_ensure_bqstorage_client(bqstorage_client=None, **kwargs): return bqstorage_client mock_client = self._mock_client(rows=row_data) mock_client._ensure_bqstorage_client.side_effect = fake_ensure_bqstorage_client mock_bqstorage_client = self._mock_bqstorage_client( stream_count=1, rows=row_data, ) no_access_error = exceptions.Forbidden("invalid credentials") mock_bqstorage_client.create_read_session.side_effect = no_access_error connection = dbapi.connect( client=mock_client, bqstorage_client=mock_bqstorage_client, ) cursor = connection.cursor() cursor.execute("SELECT foo, bar FROM some_table") with self.assertRaisesRegex(exceptions.Forbidden, "invalid credentials"): cursor.fetchall() # the default client was not used mock_client.list_rows.assert_not_called()
def test_ensure_bucket_exists_bucket_already_exists_in_different_project( self): mock_bucket = mock.Mock() gcs_client = self.gcs_client( client_attrs={ "get_bucket.side_effect": exceptions.Forbidden("err"), "bucket.return_value": mock_bucket, }) returned_bucket_name = gcs_client.ensure_bucket_exists( "my-project", "us-central1") gcs_client.client.get_bucket.assert_called_with( "my-project-automl-tables-staging") gcs_client.client.bucket.assert_called_with(returned_bucket_name) mock_bucket.create.assert_called_with(project="my-project", location="us-central1") assert re.match("^my-project-automl-tables-staging-[0-9]*$", returned_bucket_name)