def test_named_hive_partition_sensor(self): op = NamedHivePartitionSensor( task_id='hive_partition_check', partition_names=[ "airflow.static_babynames_partitioned/ds={{ds}}" ], dag=self.dag) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_named_hive_partition_sensor_times_out_on_nonexistent_partition(self): with self.assertRaises(AirflowSensorTimeout): op = NamedHivePartitionSensor( task_id='hive_partition_check', partition_names=[ "airflow.static_babynames_partitioned/ds={{ds}}", "airflow.static_babynames_partitioned/ds=nonexistent" ], poke_interval=0.1, timeout=1, dag=self.dag) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_succeeds_on_one_partition(self): mock_hive_metastore_hook = MockHiveMetastoreHook() mock_hive_metastore_hook.check_for_named_partition = mock.MagicMock( return_value=True) op = NamedHivePartitionSensor( task_id='hive_partition_check', partition_names=["airflow.static_babynames_partitioned/ds={{ds}}"], dag=self.dag, hook=mock_hive_metastore_hook) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) mock_hive_metastore_hook.check_for_named_partition.assert_called_once_with( 'airflow', 'static_babynames_partitioned', 'ds=2015-01-01')
def test_times_out_on_nonexistent_partition(self): with self.assertRaises(AirflowSensorTimeout): mock_hive_metastore_hook = MockHiveMetastoreHook() mock_hive_metastore_hook.check_for_named_partition = mock.MagicMock( return_value=False) op = NamedHivePartitionSensor( task_id='hive_partition_check', partition_names=[ "airflow.static_babynames_partitioned/ds={{ds}}", "airflow.static_babynames_partitioned/ds=nonexistent" ], poke_interval=0.1, timeout=1, dag=self.dag, hook=mock_hive_metastore_hook) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)