Пример #1
0
    def test_partition_sensor(self, patched_poke):
        patched_poke.return_value = True

        sensor = QubolePartitionSensor(
            task_id='test_qubole_partition_sensor',
            data={
                "schema": "default",
                "table": "my_partitioned_table",
                "columns": [{"column": "month", "values": ["1", "2"]}],
            },
        )

        assert sensor.poke({})
Пример #2
0
    def test_partition_sensor_error(self, patched_poke):
        patched_poke.return_value = True

        dag = DAG(DAG_ID, start_date=DEFAULT_DATE)

        with pytest.raises(AirflowException):
            QubolePartitionSensor(
                task_id='test_qubole_partition_sensor',
                poke_interval=1,
                data={
                    "schema": "default",
                    "table": "my_partitioned_table",
                    "columns": [{"column": "month", "values": ["1", "2"]}],
                },
                dag=dag,
            )
Пример #3
0
        },
    )

    check_hive_partition = QubolePartitionSensor(
        task_id='check_hive_partition',
        poke_interval=10,
        timeout=60,
        data={
            "schema":
            "default",
            "table":
            "my_partitioned_table",
            "columns": [
                {
                    "column": "month",
                    "values": ["{{ ds.split('-')[1] }}"]
                },
                {
                    "column":
                    "day",
                    "values": [
                        "{{ ds.split('-')[2] }}",
                        "{{ yesterday_ds.split('-')[2] }}"
                    ]
                },
            ],  # will check for partitions like [month=12/day=12,month=12/day=13]
        },
    )

    check_s3_file >> check_hive_partition