def test_create_or_update_view_creates_view(self) -> None: """create_or_update_view creates a View if it does not exist.""" self.mock_client.get_table.side_effect = exceptions.NotFound('!') self.bq_client.create_or_update_view(self.mock_dataset_ref, self.mock_view) self.mock_client.create_table.assert_called() self.mock_client.update_table.assert_not_called()
def test_create_table_with_schema(self): """Tests that the create_table_with_schema function calls the create_table function on the client.""" self.mock_client.get_table.side_effect = exceptions.NotFound('!') schema_fields = [bigquery.SchemaField('new_schema_field', 'STRING')] self.bq_client.create_table_with_schema(self.mock_dataset_id, self.mock_table_id, schema_fields) self.mock_client.create_table.assert_called()
def test_wait_for_table_load_fail(self): """Test wait_for_table_load logs and exits if there is an error.""" self.mock_load_job.result.side_effect = exceptions.NotFound('!') with self.assertLogs(level='ERROR'): success = bq_load.wait_for_table_load(self.mock_load_job, self.mock_table) self.assertFalse(success)
def test_wait_for_table_load_fail(self) -> None: """Test wait_for_table_load logs and exits if there is an error.""" self.mock_load_job.result.side_effect = exceptions.NotFound("!") with self.assertLogs(level="ERROR"): success = bq_refresh.wait_for_table_load(self.mock_bq_client, self.mock_load_job) self.assertFalse(success)
def test_create_dataset_if_necessary_table_expiration(self) -> None: """Check that the dataset is created with a set table expiration if the dataset does not exist and the new_dataset_table_expiration_ms is specified.""" self.mock_client.get_dataset.side_effect = exceptions.NotFound('!') self.bq_client.create_dataset_if_necessary( self.mock_dataset_ref, default_table_expiration_ms=6000) self.mock_client.create_dataset.assert_called()
def _build_mock_client_for_copying(self, table_exists=True): bq_client = mock.MagicMock() if not table_exists: bq_client.get_table.side_effect = exceptions.NotFound('not found') bq = GoogleBigQuery() bq._client = bq_client return bq
def test_insert_from_frame_create_first_true_get_table_not_found( dummy_db_credentials, df): from google.cloud import exceptions bq_cli = BigQueryClient(**dummy_db_credentials) bq_cli.credentials = True with patch( "google.cloud.bigquery.client.Client") as connection, patch.object( bq_cli, 'execute') as execute_mock: bq_cli.connection = connection connection.get_table.side_effect = exceptions.NotFound('Not Found') q = "CREATE TABLE {table_id}" params = {'table_id': 'test'} bq_cli.insert_from_frame( df, table=params['table_id'], create_first=True, create_sql=q.format(**params), connection=connection, ) connection.get_table.assert_called_once_with(params['table_id']) execute_mock.assert_called_once_with( q.format(**params), params=params, connection=connection, )
def get(self, field_paths=None, transaction=None): """Retrieve a snapshot of the current document. See :meth:`~.firestore_v1beta1.client.Client.field_path` for more information on **field paths**. If a ``transaction`` is used and it already has write operations added, this method cannot be used (i.e. read-after-write is not allowed). Args: field_paths (Optional[Iterable[str, ...]]): An iterable of field paths (``.``-delimited list of field names) to use as a projection of document fields in the returned results. If no value is provided, all fields will be returned. transaction (Optional[~.firestore_v1beta1.transaction.\ Transaction]): An existing transaction that this reference will be retrieved in. Returns: ~.firestore_v1beta1.document.DocumentSnapshot: A snapshot of the current document. Raises: ~google.cloud.exceptions.NotFound: If the document does not exist. """ snapshot_generator = self._client.get_all([self], field_paths=field_paths, transaction=transaction) snapshot = _consume_single_get(snapshot_generator) if snapshot is None: raise exceptions.NotFound(self._document_path) else: return snapshot
def test_table_exists_does_not_exist(self): """Check that table_exists returns False if the table does not exist.""" self.mock_client.get_table.side_effect = exceptions.NotFound('!') with self.assertLogs(level='WARNING'): table_exists = self.bq_client.table_exists( self.mock_dataset, self.mock_table_id) self.assertFalse(table_exists)
def testGetGCSBlob_noBucket(self): mock_client = mock.MagicMock() mock_client.get_bucket.side_effect = cloud_exceptions.NotFound('') with self.assertRaisesRegex(gcs_file_util.GCSError, r'bucket doesn\'t exist.'): gcs_file_util.GetGCSBlob(mock_client, 'gs://bucket/path/to/file') mock_client.get_bucket.assert_called_once_with('bucket')
def test_creat_or_update_table_from_view_no_view(self): """create_or_update_table_from_view does not run a query if the source view does not exist.""" self.mock_client.get_table.side_effect = exceptions.NotFound('!') with self.assertLogs(level='WARNING'): bq_utils.create_or_update_table_from_view(self.mock_dataset, self.mock_view, 'US_ND') self.mock_client.query.assert_not_called()
def test_cloud_storage_get_bucket__not_found_error(self): """Test the get_bucket API method for an invalid project.""" test_cloud_storage_api = storage.CloudStorageAPI( self.config, mock.Mock()) test_cloud_storage_api._client.get_bucket.side_effect = exceptions.NotFound( 'The bucket requested was not found.') with self.assertRaises(storage.NotFoundError): test_cloud_storage_api.get_bucket()
def test_delete_task_not_found(self): self.mock_client.delete_task.side_effect = exceptions.NotFound( message="message") self.client_wrapper.delete_task( tasks_v2.types.task_pb2.Task(name="task_name")) self.mock_client.delete_task.assert_called_with(name="task_name")
def test_that_raises_error_when_it_can_not_find_bucket( self, mock_storage_client: mock.MagicMock): mock_storage_client.side_effect = exceptions.NotFound("") with self.assertRaises(StorageError.BucketNotFound): self.request.upload(self.remote_file_path, self.local_file_path) mock_storage_client.assert_called_once()
def test_that_raises_bucket_not_found_when_it_can_not_find_bucket( self, mock_bucket: mock.MagicMock): mock_bucket.side_effect = exceptions.NotFound("") with self.assertRaises(StorageError.BucketNotFound): Bucket(storage.Client(), self.bucket().name) mock_bucket.assert_called_once_with(self.bucket().name)
def test_that_raises_can_not_delete_when_file_is_not_existed( self, mock_object: mock.MagicMock): mock_object.side_effect = exceptions.NotFound("") with self.assertRaises(StorageError.FileNotFound): self.blob.delete(self.remote_file_path) mock_object.assert_called_once()
def test_export_to_cloud_storage_no_table(self): """export_to_cloud_storage does not extract from a table if the table does not exist.""" bucket = self.mock_project_id + '-bucket' self.mock_client.get_table.side_effect = exceptions.NotFound('!') with self.assertLogs(level='WARNING'): bq_utils.export_to_cloud_storage(self.mock_dataset, bucket, self.mock_view, 'US_MT') self.mock_client.extract_table.assert_not_called()
def testMissingTable_Created(self): mock_client = mock.Mock(spec=tables.bigquery.Client) mock_client.insert_rows.side_effect = [exceptions.NotFound('OMG'), []] mock_client.get_table.side_effect = exceptions.NotFound('OMG') self.Patch(tables.bigquery, 'Client', return_value=mock_client) tables._SendToBigQuery(TEST_TABLE, self.row_dict) insert_rows_call = mock.call( mock.ANY, [self.row_dict], selected_fields=TEST_TABLE.schema, row_ids=[self.row_id]) mock_client.insert_rows.assert_has_calls([insert_rows_call] * 2) mock_client.get_dataset.assert_called_once() mock_client.create_dataset.assert_not_called() mock_client.get_table.assert_called_once() mock_client.create_table.assert_called_once()
def test_insert_into_table_from_table_invalid_destination(self): """Tests that the insert_into_table_from_table function does not run the query if the destination table does not exist.""" self.mock_client.get_table.side_effect = exceptions.NotFound('!') with pytest.raises(ValueError): self.bq_client.insert_into_table_from_table_async(self.mock_dataset_id, self.mock_table_id, 'fake_source_dataset_id', 'fake_table_id') self.mock_client.get_table.assert_called() self.mock_client.query.assert_not_called()
def test_add_missing_fields_to_schema_no_table(self): """Tests that the add_missing_fields_to_schema function does not call the client to update the table when the table does not exist.""" self.mock_client.get_table.side_effect = exceptions.NotFound('!') new_schema_fields = [bigquery.SchemaField('fake_schema_field', 'STRING')] with pytest.raises(ValueError): self.bq_client.add_missing_fields_to_schema(self.mock_dataset_id, self.mock_table_id, new_schema_fields) self.mock_client.update_table.assert_not_called()
def test_delete_nonexisting_object(self, mock_service): test_bucket = 'test_bucket' test_object = 'test_object' get_bucket_method = mock_service.return_value.get_bucket blob = get_bucket_method.return_value.blob delete_method = blob.return_value.delete delete_method.side_effect = exceptions.NotFound(message="Not Found") with self.assertRaises(exceptions.NotFound): self.gcs_hook.delete(bucket=test_bucket, object=test_object)
def test_insert_into_table_from_cloud_storage_async(self): self.mock_client.get_dataset.side_effect = exceptions.NotFound('!') self.bq_client.insert_into_table_from_cloud_storage_async( destination_dataset_ref=self.mock_dataset, destination_table_id=self.mock_table_id, destination_table_schema=[SchemaField('my_column', 'STRING', 'NULLABLE', None, ())], source_uri='gs://bucket/export-uri') self.mock_client.create_dataset.assert_called() self.mock_client.load_table_from_uri.assert_called()
def test_export_to_cloud_storage_no_table(self): """export_to_cloud_storage does not extract from a table if the table does not exist.""" self.mock_client.get_table.side_effect = exceptions.NotFound('!') with self.assertLogs(level='WARNING'): self.assertIsNone(self.bq_client.export_table_to_cloud_storage_async( source_table_dataset_ref=self.mock_dataset, source_table_id='source-table', destination_uri=f'gs://{self.mock_project_id}-bucket/destination_path.json', destination_format=bigquery.DestinationFormat.NEWLINE_DELIMITED_JSON)) self.mock_client.extract_table.assert_not_called()
def test_export_query_results_to_cloud_storage_no_table(self): bucket = self.mock_project_id + '-bucket' self.mock_client.get_table.side_effect = exceptions.NotFound('!') with self.assertLogs(level='WARNING'): self.bq_client.export_query_results_to_cloud_storage([ ExportQueryConfig.from_view_query( view=self.mock_view, view_filter_clause='WHERE x = y', intermediate_table_name=self.mock_table_id, output_uri=f'gs://{bucket}/view.json', output_format=bigquery.DestinationFormat.NEWLINE_DELIMITED_JSON) ])
def one(self): """Return exactly one result, or raise an exception. :raises: :exc:`NotFound`: If there are no results. :raises: :exc:`ValueError`: If there are multiple results. :raises: :exc:`RuntimeError`: If consumption has already occurred, in whole or in part. """ answer = self.one_or_none() if answer is None: raise exceptions.NotFound("No rows matched the given query.") return answer
def test_upload_to_google_cloud_storage_handles_exceptions( self, mock_get_bucket, mock_err_report): """ Test the function handles an exception thrown while uploading the object to Google Cloud Storage. """ mock_get_bucket.side_effect = exceptions.NotFound( '404 GET https://storage.googleapis.com/storage/v1/b/some-bucket-name: Not Found' ) mock_err_report.message = 'test error msg' self.assertFalse( main.upload_to_google_cloud_storage(self.json_data_to_upload))
def test_verify_gcs_bucket_exceptions(klio_config, mock_storage, not_found): test_path = "gs://bucket/blob" if not_found: mock_storage.get_bucket.side_effect = exceptions.NotFound("test") else: mock_storage.get_bucket.side_effect = Exception job = verify.VerifyJob(klio_config, False) job._storage_client = mock_storage actual = job._verify_gcs_bucket(test_path) assert actual is False
def test_verify_pub_topic_exceptions(klio_config, mock_publisher, not_found): test_topic = "test" if not_found: mock_publisher.get_topic.side_effect = exceptions.NotFound("test") else: mock_publisher.get_topic.side_effect = Exception job = verify.VerifyJob(klio_config, False) job._publisher_client = mock_publisher actual = job._verify_pub_topic(test_topic, input) assert actual is False
def test_insert_into_table_from_cloud_storage_async(self) -> None: self.mock_client.get_dataset.side_effect = exceptions.NotFound("!") self.bq_client.insert_into_table_from_cloud_storage_async( destination_dataset_ref=self.mock_dataset_ref, destination_table_id=self.mock_table_id, destination_table_schema=[ SchemaField("my_column", "STRING", "NULLABLE", None, ()) ], source_uri="gs://bucket/export-uri", ) self.mock_client.create_dataset.assert_called() self.mock_client.load_table_from_uri.assert_called()
def test_get_not_in_cache_not_found(self, mock_project_id: MagicMock) -> None: mock_project_id.return_value = "test-project" mock_client = Mock() mock_client.secret_version_path.return_value = "test-project.top_track.latest" mock_client.access_secret_version.side_effect = exceptions.NotFound( "Could not find it") with patch( "google.cloud.secretmanager_v1beta1.SecretManagerServiceClient", return_value=mock_client, ): actual = secrets.get_secret("top_track") assert actual is None