コード例 #1
0
 def test_templates(self, _):
     dag_id = 'test_dag_id'
     # pylint: disable=attribute-defined-outside-init
     self.dag = DAG(dag_id, default_args={'start_date': DEFAULT_DATE})
     op = CloudDataTransferServiceCreateJobOperator(
         body={"description": "{{ dag.dag_id }}"},
         gcp_conn_id='{{ dag.dag_id }}',
         aws_conn_id='{{ dag.dag_id }}',
         task_id='task-id',
         dag=self.dag,
     )
     ti = TaskInstance(op, DEFAULT_DATE)
     ti.render_templates()
     self.assertEqual(dag_id, getattr(op, 'body')[DESCRIPTION])
     self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
     self.assertEqual(dag_id, getattr(op, 'aws_conn_id'))
コード例 #2
0
    def test_job_create_multiple(self, aws_hook, gcp_hook):
        aws_hook.return_value.get_credentials.return_value = Credentials(
            TEST_AWS_ACCESS_KEY_ID, TEST_AWS_ACCESS_SECRET, None
        )
        gcp_hook.return_value.create_transfer_job.return_value = VALID_TRANSFER_JOB_AWS_RAW
        body = deepcopy(VALID_TRANSFER_JOB_AWS)

        op = CloudDataTransferServiceCreateJobOperator(body=body, task_id=TASK_ID)
        result = op.execute(None)
        self.assertEqual(result, VALID_TRANSFER_JOB_AWS_RAW)

        op = CloudDataTransferServiceCreateJobOperator(body=body, task_id=TASK_ID)
        result = op.execute(None)
        self.assertEqual(result, VALID_TRANSFER_JOB_AWS_RAW)
コード例 #3
0
}
# [END howto_operator_gcp_transfer_create_job_body_aws]

# [START howto_operator_gcp_transfer_default_args]
default_args = {'owner': 'airflow'}
# [END howto_operator_gcp_transfer_default_args]

with models.DAG(
        'example_gcp_transfer_aws',
        schedule_interval=None,  # Override to match your needs
        start_date=days_ago(1),
        tags=['example'],
) as dag:

    # [START howto_operator_gcp_transfer_create_job]
    create_transfer_job_from_aws = CloudDataTransferServiceCreateJobOperator(
        task_id="create_transfer_job_from_aws", body=aws_to_gcs_transfer_body)
    # [END howto_operator_gcp_transfer_create_job]

    wait_for_operation_to_start = CloudDataTransferServiceJobStatusSensor(
        task_id="wait_for_operation_to_start",
        job_name=
        "{{task_instance.xcom_pull('create_transfer_job_from_aws')['name']}}",
        project_id=GCP_PROJECT_ID,
        expected_statuses={GcpTransferOperationStatus.IN_PROGRESS},
        poke_interval=WAIT_FOR_OPERATION_POKE_INTERVAL,
    )

    # [START howto_operator_gcp_transfer_pause_operation]
    pause_operation = CloudDataTransferServicePauseOperationOperator(
        task_id="pause_operation",
        operation_name=
コード例 #4
0
    PROJECT_ID: GCP_PROJECT_ID,
    TRANSFER_JOB: {
        DESCRIPTION: "description_updated"
    },
    TRANSFER_JOB_FIELD_MASK: "description",
}
# [END howto_operator_gcp_transfer_update_job_body]

with models.DAG(
        "example_gcp_transfer",
        schedule_interval='@once',  # Override to match your needs
        start_date=days_ago(1),
        tags=["example"],
) as dag:

    create_transfer = CloudDataTransferServiceCreateJobOperator(
        task_id="create_transfer", body=gcs_to_gcs_transfer_body)

    # [START howto_operator_gcp_transfer_update_job]
    update_transfer = CloudDataTransferServiceUpdateJobOperator(
        task_id="update_transfer",
        job_name="{{task_instance.xcom_pull('create_transfer')['name']}}",
        body=update_body,
    )
    # [END howto_operator_gcp_transfer_update_job]

    wait_for_transfer = CloudDataTransferServiceJobStatusSensor(
        task_id="wait_for_transfer",
        job_name="{{task_instance.xcom_pull('create_transfer')['name']}}",
        project_id=GCP_PROJECT_ID,
        expected_statuses={GcpTransferOperationStatus.SUCCESS},
    )