def test_partition_expiration_w_none_no_partition_set(self): from google.cloud.bigquery.table import SchemaField dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') table = self._make_one(table_ref, schema=[full_name, age]) self.assertIsNone(table.partition_expiration) table.partition_expiration = None self.assertIsNone(table.partitioning_type) self.assertIsNone(table.partition_expiration)
def test_partition_type_setter_w_none(self): from google.cloud.bigquery.table import SchemaField dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') table = self._make_one(table_ref, schema=[full_name, age]) table._properties['timePartitioning'] = {'type': 'DAY'} table.partitioning_type = None self.assertIsNone(table.partitioning_type) self.assertFalse('timePartitioning' in table._properties)
def test_encryption_configuration_setter(self): from google.cloud.bigquery.table import EncryptionConfiguration dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) encryption_configuration = EncryptionConfiguration( kms_key_name=self.KMS_KEY_NAME) table.encryption_configuration = encryption_configuration self.assertEqual(table.encryption_configuration.kms_key_name, self.KMS_KEY_NAME) table.encryption_configuration = None self.assertIsNone(table.encryption_configuration)
def test_begin_w_bound_client(self): from google.cloud.bigquery.dataset import DatasetReference PATH = "/projects/%s/jobs" % (self.PROJECT, ) RESOURCE = self._make_resource() # Ensure None for missing server-set props del RESOURCE["statistics"]["creationTime"] del RESOURCE["etag"] del RESOURCE["selfLink"] del RESOURCE["user_email"] conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) source_dataset = DatasetReference(self.PROJECT, self.DS_ID) source = source_dataset.table(self.SOURCE_TABLE) job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], client) with mock.patch( "google.cloud.bigquery.opentelemetry_tracing._get_final_span_attributes" ) as final_attributes: job._begin() final_attributes.assert_called_with({"path": PATH}, client, job) conn.api_request.assert_called_once_with( method="POST", path=PATH, data={ "jobReference": { "projectId": self.PROJECT, "jobId": self.JOB_ID }, "configuration": { "extract": { "sourceTable": { "projectId": self.PROJECT, "datasetId": self.DS_ID, "tableId": self.SOURCE_TABLE, }, "destinationUris": [self.DESTINATION_URI], } }, }, timeout=None, ) self._verifyResourceProperties(job, RESOURCE)
def test__row_from_mapping_w_schema(self): from google.cloud.bigquery.table import Table, SchemaField MAPPING = { 'full_name': 'Phred Phlyntstone', 'age': 32, 'colors': ['red', 'green'], 'extra': 'IGNORED', } dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') colors = SchemaField('colors', 'DATETIME', mode='REPEATED') joined = SchemaField('joined', 'STRING', mode='NULLABLE') table = Table(table_ref, schema=[full_name, age, colors, joined]) self.assertEqual(self._call_fut(MAPPING, table.schema), ('Phred Phlyntstone', 32, ['red', 'green'], None))
def test__row_from_mapping_w_invalid_schema(self): from google.cloud.bigquery.table import Table, SchemaField MAPPING = { 'full_name': 'Phred Phlyntstone', 'age': 32, 'colors': ['red', 'green'], 'bogus': 'WHATEVER', } dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') colors = SchemaField('colors', 'DATETIME', mode='REPEATED') bogus = SchemaField('joined', 'STRING', mode='BOGUS') table = Table(table_ref, schema=[full_name, age, colors, bogus]) with self.assertRaises(ValueError) as exc: self._call_fut(MAPPING, table.schema) self.assertIn('Unknown field mode: BOGUS', str(exc.exception))
def test_num_rows_getter(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) # Check with no value set. self.assertIsNone(table.num_rows) num_rows = 42 # Check with integer value set. table._properties = {'numRows': num_rows} self.assertEqual(table.num_rows, num_rows) # Check with a string value set. table._properties = {'numRows': str(num_rows)} self.assertEqual(table.num_rows, num_rows) # Check with invalid int value. table._properties = {'numRows': 'x'} with self.assertRaises(ValueError): getattr(table, 'num_rows')
def test_reload_w_bound_client(self): from google.cloud.bigquery.dataset import DatasetReference PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) RESOURCE = self._make_resource() conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) source_dataset = DatasetReference(self.PROJECT, self.DS_ID) source = source_dataset.table(self.SOURCE_TABLE) job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], client) with mock.patch( "google.cloud.bigquery.opentelemetry_tracing._get_final_span_attributes" ) as final_attributes: job.reload() final_attributes.assert_called_with({"path": PATH}, client, job) conn.api_request.assert_called_once_with(method="GET", path=PATH, query_params={}, timeout=None) self._verifyResourceProperties(job, RESOURCE)
def test_labels_setter_bad_value(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) with self.assertRaises(ValueError): table.labels = 12345
def test_external_data_configuration_setter_bad_value(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) with self.assertRaises(ValueError): table.external_data_configuration = 12345
def test_view_use_legacy_sql_setter_bad_value(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) with self.assertRaises(ValueError): table.view_use_legacy_sql = 12345
def test_location_setter(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) table.location = 'LOCATION' self.assertEqual(table.location, 'LOCATION')
def test_friendly_name_setter(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) table.friendly_name = 'FRIENDLY' self.assertEqual(table.friendly_name, 'FRIENDLY')
def test_description_setter(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) table.description = 'DESCRIPTION' self.assertEqual(table.description, 'DESCRIPTION')
def test_schema_setter_non_list(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) with self.assertRaises(TypeError): table.schema = object()