def test_execute(self, mock_name, mock_hook): mock_name.return_value = TRANSFER_CONFIG_ID mock_xcom = mock.MagicMock() op = BigQueryCreateDataTransferOperator( transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID, task_id="id" ) op.xcom_push = mock_xcom op.execute(None) mock_hook.return_value.create_transfer_config.assert_called_once_with( authorization_code=None, metadata=None, transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID, retry=None, timeout=None, )
def test_execute(self, mock_hook): op = BigQueryCreateDataTransferOperator( transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID, task_id="id" ) ti = mock.MagicMock() op.execute({'ti': ti}) mock_hook.return_value.create_transfer_config.assert_called_once_with( authorization_code=None, metadata=None, transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID, retry=None, timeout=None, ) ti.xcom_push.assert_called_once_with(execution_date=None, key='transfer_config_id', value='1a2b3c')
"schedule_options": schedule_options, "params": PARAMS, } # [END howto_bigquery_dts_create_args] with models.DAG( "example_gcp_bigquery_dts", schedule_interval=None, # Override to match your needs start_date=days_ago(1), tags=['example'], ) as dag: # [START howto_bigquery_create_data_transfer] gcp_bigquery_create_transfer = BigQueryCreateDataTransferOperator( transfer_config=TRANSFER_CONFIG, project_id=GCP_PROJECT_ID, task_id="gcp_bigquery_create_transfer", ) transfer_config_id = ( "{{ task_instance.xcom_pull('gcp_bigquery_create_transfer', key='transfer_config_id') }}" ) # [END howto_bigquery_create_data_transfer] # [START howto_bigquery_start_transfer] gcp_bigquery_start_transfer = BigQueryDataTransferServiceStartTransferRunsOperator( task_id="gcp_bigquery_start_transfer", transfer_config_id=transfer_config_id, requested_run_time={"seconds": int(time.time() + 60)}, ) run_id = "{{ task_instance.xcom_pull('gcp_bigquery_start_transfer', key='run_id') }}"