Beispiel #1
0
        This is only an example DAG to highlight usage of QuboleSensor in various scenarios,
        some of these tasks may or may not work based on your QDS account setup.

        Run a shell command from Qubole Analyze against your Airflow cluster with following to
        trigger it manually `airflow dags trigger example_qubole_sensor`.

        *Note: Make sure that connection `qubole_default` is properly set before running
        this example.*
        """)

    check_s3_file = QuboleFileSensor(
        task_id='check_s3_file',
        poke_interval=60,
        timeout=600,
        data={
            "files": [
                "s3://paid-qubole/HadoopAPIExamples/jars/hadoop-0.20.1-dev-streaming.jar",
                "s3://paid-qubole/HadoopAPITests/data/{{ ds.split('-')[2] }}.tsv",
            ]  # will check for availability of all the files in array
        },
    )

    check_hive_partition = QubolePartitionSensor(
        task_id='check_hive_partition',
        poke_interval=10,
        timeout=60,
        data={
            "schema":
            "default",
            "table":
            "my_partitioned_table",
Beispiel #2
0
 def test_file_sensore(self, patched_poke):
     patched_poke.return_value = True
     sensor = QuboleFileSensor(
         task_id='test_qubole_file_sensor',
         data={"files": ["s3://some_bucket/some_file"]})
     self.assertTrue(sensor.poke({}))