def test_hdfs_sensor(self): op = HdfsSensor( task_id='hdfs_sensor_check', filepath='hdfs://user/hive/warehouse/airflow.db/static_babynames', dag=self.dag) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_legacy_file_exist(self): """ Test the legacy behaviour :return: """ # When task = HdfsSensor(task_id='Should_be_file_legacy', filepath='/datadirectory/datafile', timeout=1, retry_delay=timedelta(seconds=1), poke_interval=1, hook=self.hook) task.execute(None)
def test_legacy_file_does_not_exists(self): """ Test the legacy behaviour :return: """ task = HdfsSensor(task_id='Should_not_be_file_legacy', filepath='/datadirectory/not_existing_file_or_directory', timeout=1, retry_delay=timedelta(seconds=1), poke_interval=1, hook=self.hook) # When # Then with self.assertRaises(AirflowSensorTimeout): task.execute(None)
def test_legacy_file_exist_but_filesize(self): """ Test the legacy behaviour with the filesize :return: """ # When task = HdfsSensor(task_id='Should_be_file_legacy', filepath='/datadirectory/datafile', timeout=1, file_size=20, retry_delay=timedelta(seconds=1), poke_interval=1, hook=self.hook) # When # Then with self.assertRaises(AirflowSensorTimeout): task.execute(None)