def test_create_transfer_config(self): # Setup Expected Response name = "name3373707" destination_dataset_id = "destinationDatasetId1541564179" display_name = "displayName1615086568" data_source_id = "dataSourceId-1015796374" schedule = "schedule-697920873" data_refresh_window_days = 327632845 disabled = True user_id = 147132913 dataset_region = "datasetRegion959248539" notification_pubsub_topic = "notificationPubsubTopic1794281191" expected_response = { "name": name, "destination_dataset_id": destination_dataset_id, "display_name": display_name, "data_source_id": data_source_id, "schedule": schedule, "data_refresh_window_days": data_refresh_window_days, "disabled": disabled, "user_id": user_id, "dataset_region": dataset_region, "notification_pubsub_topic": notification_pubsub_topic, } expected_response = transfer_pb2.TransferConfig(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request parent = client.project_path("[PROJECT]") transfer_config = {} response = client.create_transfer_config(parent, transfer_config) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.CreateTransferConfigRequest( parent=parent, transfer_config=transfer_config ) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_create_transfer_config(self): # Setup Expected Response name = 'name3373707' destination_dataset_id = 'destinationDatasetId1541564179' display_name = 'displayName1615086568' data_source_id = 'dataSourceId-1015796374' schedule = 'schedule-697920873' data_refresh_window_days = 327632845 disabled = True user_id = 147132913 dataset_region = 'datasetRegion959248539' expected_response = { 'name': name, 'destination_dataset_id': destination_dataset_id, 'display_name': display_name, 'data_source_id': data_source_id, 'schedule': schedule, 'data_refresh_window_days': data_refresh_window_days, 'disabled': disabled, 'user_id': user_id, 'dataset_region': dataset_region } expected_response = transfer_pb2.TransferConfig(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request parent = client.project_path('[PROJECT]') transfer_config = {} response = client.create_transfer_config(parent, transfer_config) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.CreateTransferConfigRequest( parent=parent, transfer_config=transfer_config) actual_request = channel.requests[0][1] assert expected_request == actual_request
def create_transfer_config( self, parent, transfer_config, authorization_code=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT): """ Creates a new data transfer configuration. Example: >>> from google.cloud import bigquery_datatransfer_v1 >>> >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> transfer_config = {} >>> >>> response = client.create_transfer_config(parent, transfer_config) Args: parent (str): The BigQuery project id where the transfer configuration should be created. Must be in the format /projects/{project_id}/locations/{location_id} or /projects/{project_id}/locations/- In case when '-' is specified as location_id, location is infered from the destination dataset region. transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Data transfer configuration to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` authorization_code (str): Optional OAuth2 authorization code to use with this transfer configuration. This is required if new credentials are needed, as indicated by ``CheckValidCreds``. In order to obtain authorization_code, please make a request to https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri> * client_id should be OAuth client_id of BigQuery DTS API for the given data source returned by ListDataSources method. * data_source_scopes are the scopes returned by ListDataSources method. * redirect_uri is an optional parameter. If not specified, then authorization code is posted to the opener of authorization flow window. Otherwise it will be sent to the redirect uri. A special value of urn:ietf:wg:oauth:2.0:oob means that authorization code should be returned in the title bar of the browser, with the page text prompting the user to copy the code and paste it in the application. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. Returns: A :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ request = datatransfer_pb2.CreateTransferConfigRequest( parent=parent, transfer_config=transfer_config, authorization_code=authorization_code) return self._create_transfer_config(request, retry=retry, timeout=timeout)