def test_update_transfer_config(self): # Setup Expected Response name = "name3373707" destination_dataset_id = "destinationDatasetId1541564179" display_name = "displayName1615086568" data_source_id = "dataSourceId-1015796374" schedule = "schedule-697920873" data_refresh_window_days = 327632845 disabled = True user_id = 147132913 dataset_region = "datasetRegion959248539" notification_pubsub_topic = "notificationPubsubTopic1794281191" expected_response = { "name": name, "destination_dataset_id": destination_dataset_id, "display_name": display_name, "data_source_id": data_source_id, "schedule": schedule, "data_refresh_window_days": data_refresh_window_days, "disabled": disabled, "user_id": user_id, "dataset_region": dataset_region, "notification_pubsub_topic": notification_pubsub_topic, } expected_response = transfer_pb2.TransferConfig(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request transfer_config = {} update_mask = {} response = client.update_transfer_config(transfer_config, update_mask) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.UpdateTransferConfigRequest( transfer_config=transfer_config, update_mask=update_mask ) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_update_transfer_config(self): # Setup Expected Response name = 'name3373707' destination_dataset_id = 'destinationDatasetId1541564179' display_name = 'displayName1615086568' data_source_id = 'dataSourceId-1015796374' schedule = 'schedule-697920873' data_refresh_window_days = 327632845 disabled = True user_id = 147132913 dataset_region = 'datasetRegion959248539' expected_response = { 'name': name, 'destination_dataset_id': destination_dataset_id, 'display_name': display_name, 'data_source_id': data_source_id, 'schedule': schedule, 'data_refresh_window_days': data_refresh_window_days, 'disabled': disabled, 'user_id': user_id, 'dataset_region': dataset_region } expected_response = transfer_pb2.TransferConfig(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = bigquery_datatransfer_v1.DataTransferServiceClient() # Setup Request transfer_config = {} update_mask = {} response = client.update_transfer_config(transfer_config, update_mask) assert expected_response == response assert len(channel.requests) == 1 expected_request = datatransfer_pb2.UpdateTransferConfigRequest( transfer_config=transfer_config, update_mask=update_mask) actual_request = channel.requests[0][1] assert expected_request == actual_request
def update_transfer_config( self, transfer_config, update_mask, authorization_code=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT): """ Updates a data transfer configuration. All fields must be set, even if they are not updated. Example: >>> from google.cloud import bigquery_datatransfer_v1 >>> >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() >>> >>> transfer_config = {} >>> update_mask = {} >>> >>> response = client.update_transfer_config(transfer_config, update_mask) Args: transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Data transfer configuration to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` update_mask (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.FieldMask]): Required list of fields to be updated in this request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.FieldMask` authorization_code (str): Optional OAuth2 authorization code to use with this transfer configuration. If it is provided, the transfer configuration will be associated with the authorizing user. In order to obtain authorization_code, please make a request to https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri> * client_id should be OAuth client_id of BigQuery DTS API for the given data source returned by ListDataSources method. * data_source_scopes are the scopes returned by ListDataSources method. * redirect_uri is an optional parameter. If not specified, then authorization code is posted to the opener of authorization flow window. Otherwise it will be sent to the redirect uri. A special value of urn:ietf:wg:oauth:2.0:oob means that authorization code should be returned in the title bar of the browser, with the page text prompting the user to copy the code and paste it in the application. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. Returns: A :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ request = datatransfer_pb2.UpdateTransferConfigRequest( transfer_config=transfer_config, update_mask=update_mask, authorization_code=authorization_code) return self._update_transfer_config(request, retry=retry, timeout=timeout)