def test_list_transfer_runs(self): # Setup Expected Response next_page_token = '' transfer_runs_element = {} transfer_runs = [transfer_runs_element] expected_response = { 'next_page_token': next_page_token, 'transfer_runs': transfer_runs } expected_response = datatransfer_pb2.ListTransferRunsResponse( **expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request parent = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') paged_list_response = client.list_transfer_runs(parent) resources = list(paged_list_response) assert len(resources) == 1 assert expected_response.transfer_runs[0] == resources[0] assert len(channel.requests) == 1 expected_request = datatransfer_pb2.ListTransferRunsRequest( parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_schedule_transfer_runs(self): # Setup Expected Response expected_response = {} expected_response = datatransfer_pb2.ScheduleTransferRunsResponse( **expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup Request parent = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') start_time = {} end_time = {} response = client.schedule_transfer_runs(parent, start_time, end_time) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.ScheduleTransferRunsRequest( parent=parent, start_time=start_time, end_time=end_time) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_list_data_sources(self): # Setup Expected Response next_page_token = '' data_sources_element = {} data_sources = [data_sources_element] expected_response = { 'next_page_token': next_page_token, 'data_sources': data_sources } expected_response = datatransfer_pb2.ListDataSourcesResponse( **expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup Request parent = client.project_path('[PROJECT]') paged_list_response = client.list_data_sources(parent) resources = list(paged_list_response) assert len(resources) == 1 assert expected_response.data_sources[0] == resources[0] assert len(channel.requests) == 1 expected_request = datatransfer_pb2.ListDataSourcesRequest( parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_get_transfer_run(self): # Setup Expected Response name_2 = 'name2-1052831874' destination_dataset_id = 'destinationDatasetId1541564179' data_source_id = 'dataSourceId-1015796374' user_id = 147132913 schedule = 'schedule-697920873' expected_response = { 'name': name_2, 'destination_dataset_id': destination_dataset_id, 'data_source_id': data_source_id, 'user_id': user_id, 'schedule': schedule } expected_response = transfer_pb2.TransferRun(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup Request name = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') response = client.get_transfer_run(name) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.GetTransferRunRequest(name=name) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_list_transfer_logs(self): # Setup Expected Response next_page_token = "" transfer_messages_element = {} transfer_messages = [transfer_messages_element] expected_response = { "next_page_token": next_page_token, "transfer_messages": transfer_messages, } expected_response = datatransfer_pb2.ListTransferLogsResponse( **expected_response ) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request parent = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") paged_list_response = client.list_transfer_logs(parent) resources = list(paged_list_response) assert len(resources) == 1 assert expected_response.transfer_messages[0] == resources[0] assert len(channel.requests) == 1 expected_request = datatransfer_pb2.ListTransferLogsRequest(parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_list_transfer_logs(self): # Setup Expected Response next_page_token = '' transfer_messages_element = {} transfer_messages = [transfer_messages_element] expected_response = { 'next_page_token': next_page_token, 'transfer_messages': transfer_messages } expected_response = datatransfer_pb2.ListTransferLogsResponse( **expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup Request parent = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') paged_list_response = client.list_transfer_logs(parent) resources = list(paged_list_response) assert len(resources) == 1 assert expected_response.transfer_messages[0] == resources[0] assert len(channel.requests) == 1 expected_request = datatransfer_pb2.ListTransferLogsRequest( parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_get_transfer_run(self): # Setup Expected Response name_2 = "name2-1052831874" destination_dataset_id = "destinationDatasetId1541564179" data_source_id = "dataSourceId-1015796374" user_id = 147132913 schedule = "schedule-697920873" notification_pubsub_topic = "notificationPubsubTopic1794281191" expected_response = { "name": name_2, "destination_dataset_id": destination_dataset_id, "data_source_id": data_source_id, "user_id": user_id, "schedule": schedule, "notification_pubsub_topic": notification_pubsub_topic, } expected_response = transfer_pb2.TransferRun(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") response = client.get_transfer_run(name) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.GetTransferRunRequest(name=name) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_check_valid_creds(self): # Setup Expected Response has_valid_creds = False expected_response = {"has_valid_creds": has_valid_creds} expected_response = datatransfer_pb2.CheckValidCredsResponse( **expected_response ) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") response = client.check_valid_creds(name) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.CheckValidCredsRequest(name=name) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_schedule_transfer_runs(self): # Setup Expected Response expected_response = {} expected_response = datatransfer_pb2.ScheduleTransferRunsResponse( **expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request parent = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') start_time = {} end_time = {} response = client.schedule_transfer_runs(parent, start_time, end_time) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.ScheduleTransferRunsRequest( parent=parent, start_time=start_time, end_time=end_time) actual_request = channel.requests[0][1] assert expected_request == actual_request
def __init__(self, project_id: str): """Initialise new instance of CloudDataTransferUtils. Args: project_id: GCP project id. """ self.project_id = project_id self.client = bigquery_datatransfer_v1.DataTransferServiceClient()
def test_start_manual_transfer_runs_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() with pytest.raises(CustomException): client.start_manual_transfer_runs()
def test_list_data_sources_exception(self): channel = ChannelStub(responses=[CustomException()]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup request parent = client.location_path('[PROJECT]', '[LOCATION]') paged_list_response = client.list_data_sources(parent) with pytest.raises(CustomException): list(paged_list_response)
def test_check_valid_creds_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup request name = client.project_data_source_path('[PROJECT]', '[DATA_SOURCE]') with pytest.raises(CustomException): client.check_valid_creds(name)
def test_list_transfer_configs_exception(self): channel = ChannelStub(responses=[CustomException()]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup request parent = client.project_path('[PROJECT]') paged_list_response = client.list_transfer_configs(parent) with pytest.raises(CustomException): list(paged_list_response)
def test_get_transfer_config_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup request name = client.location_transfer_config_path('[PROJECT]', '[LOCATION]', '[TRANSFER_CONFIG]') with pytest.raises(CustomException): client.get_transfer_config(name)
def test_get_data_source_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup request name = client.location_data_source_path('[PROJECT]', '[LOCATION]', '[DATA_SOURCE]') with pytest.raises(CustomException): client.get_data_source(name)
def test_delete_transfer_run_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup request name = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') with pytest.raises(CustomException): client.delete_transfer_run(name)
def test_create_transfer_config_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup request parent = client.project_path('[PROJECT]') transfer_config = {} with pytest.raises(CustomException): client.create_transfer_config(parent, transfer_config)
def test_update_transfer_config_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup request transfer_config = {} update_mask = {} with pytest.raises(CustomException): client.update_transfer_config(transfer_config, update_mask)
def test_check_valid_creds_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup request name = client.project_data_source_path('[PROJECT]', '[DATA_SOURCE]') with pytest.raises(CustomException): client.check_valid_creds(name)
def test_list_transfer_configs_exception(self): channel = ChannelStub(responses=[CustomException()]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup request parent = client.project_path('[PROJECT]') paged_list_response = client.list_transfer_configs(parent) with pytest.raises(CustomException): list(paged_list_response)
def test_delete_transfer_config_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup request name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") with pytest.raises(CustomException): client.delete_transfer_config(name)
def test_get_transfer_run_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup request name = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') with pytest.raises(CustomException): client.get_transfer_run(name)
def test_update_transfer_config_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup request transfer_config = {} update_mask = {} with pytest.raises(CustomException): client.update_transfer_config(transfer_config, update_mask)
def test_schedule_transfer_runs_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup request parent = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') start_time = {} end_time = {} with pytest.raises(CustomException): client.schedule_transfer_runs(parent, start_time, end_time)
def test_delete_transfer_run(self): channel = ChannelStub() client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup Request name = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') client.delete_transfer_run(name) assert len(channel.requests) == 1 expected_request = datatransfer_pb2.DeleteTransferRunRequest(name=name) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_get_data_source(self): # Setup Expected Response name_2 = "name2-1052831874" data_source_id = "dataSourceId-1015796374" display_name = "displayName1615086568" description = "description-1724546052" client_id = "clientId-1904089585" supports_multiple_transfers = True update_deadline_seconds = 991471694 default_schedule = "defaultSchedule-800168235" supports_custom_schedule = True help_url = "helpUrl-789431439" default_data_refresh_window_days = 1804935157 manual_runs_disabled = True expected_response = { "name": name_2, "data_source_id": data_source_id, "display_name": display_name, "description": description, "client_id": client_id, "supports_multiple_transfers": supports_multiple_transfers, "update_deadline_seconds": update_deadline_seconds, "default_schedule": default_schedule, "supports_custom_schedule": supports_custom_schedule, "help_url": help_url, "default_data_refresh_window_days": default_data_refresh_window_days, "manual_runs_disabled": manual_runs_disabled, } expected_response = datatransfer_pb2.DataSource(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") response = client.get_data_source(name) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.GetDataSourceRequest(name=name) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_get_data_source(self): # Setup Expected Response name_2 = 'name2-1052831874' data_source_id = 'dataSourceId-1015796374' display_name = 'displayName1615086568' description = 'description-1724546052' client_id = 'clientId-1904089585' supports_multiple_transfers = True update_deadline_seconds = 991471694 default_schedule = 'defaultSchedule-800168235' supports_custom_schedule = True help_url = 'helpUrl-789431439' default_data_refresh_window_days = 1804935157 manual_runs_disabled = True expected_response = { 'name': name_2, 'data_source_id': data_source_id, 'display_name': display_name, 'description': description, 'client_id': client_id, 'supports_multiple_transfers': supports_multiple_transfers, 'update_deadline_seconds': update_deadline_seconds, 'default_schedule': default_schedule, 'supports_custom_schedule': supports_custom_schedule, 'help_url': help_url, 'default_data_refresh_window_days': default_data_refresh_window_days, 'manual_runs_disabled': manual_runs_disabled } expected_response = datatransfer_pb2.DataSource(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request name = client.project_data_source_path('[PROJECT]', '[DATA_SOURCE]') response = client.get_data_source(name) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.GetDataSourceRequest(name=name) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_delete_transfer_config(self): channel = ChannelStub() client = bigquery_datatransfer_v1.DataTransferServiceClient( channel=channel) # Setup Request name = client.location_transfer_config_path('[PROJECT]', '[LOCATION]', '[TRANSFER_CONFIG]') client.delete_transfer_config(name) assert len(channel.requests) == 1 expected_request = datatransfer_pb2.DeleteTransferConfigRequest( name=name) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_schedule_transfer_runs_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup request parent = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') start_time = {} end_time = {} with pytest.raises(CustomException): client.schedule_transfer_runs(parent, start_time, end_time)