def test_fractional_seconds(self): """ Tests if fractional seconds are stored in the database """ dag = DAG(TEST_DAG_ID + 'test_fractional_seconds') dag.schedule_interval = '@once' dag.add_task(BaseOperator( task_id="faketastic", owner='Also fake', start_date=datetime(2015, 1, 2, 0, 0))) start_date = timezone.utcnow() run = dag.create_dagrun( run_id='test_' + start_date.isoformat(), execution_date=start_date, start_date=start_date, state=State.RUNNING, external_trigger=False ) run.refresh_from_db() self.assertEqual(start_date, run.execution_date, "dag run execution_date loses precision") self.assertEqual(start_date, run.start_date, "dag run start_date loses precision ")
def setUp(self): from airflow.contrib.hooks.fs_hook import FSHook hook = FSHook() args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.hook = hook self.dag = dag
def setUp(self): configuration.load_test_config() from airflow.contrib.hooks.fs_hook import FSHook hook = FSHook() args = {"owner": "airflow", "start_date": DEFAULT_DATE, "provide_context": True} dag = DAG(TEST_DAG_ID + "test_schedule_dag_once", default_args=args) dag.schedule_interval = "@once" self.hook = hook self.dag = dag
def setUp(self): configuration.load_test_config() from airflow.contrib.hooks.fs_hook import FSHook hook = FSHook() args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, 'provide_context': True } dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.hook = hook self.dag = dag
def setUp(self): configuration.load_test_config() from airflow.contrib.hooks.fs_hook import FSHook hook = FSHook() args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, 'provide_context': True } dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.hook = hook self.dag = dag
def test_schedule_dag_once(self): """ Tests scheduling a dag scheduled for @once - should be scheduled the first time it is called, and not scheduled the second. """ dag = DAG(TEST_DAG_ID+'test_schedule_dag_once') dag.schedule_interval = '@once' dag.tasks = [models.BaseOperator(task_id="faketastic", owner='Also fake', start_date=datetime(2015, 1, 2, 0, 0))] dag_run = jobs.SchedulerJob(test_mode=True).schedule_dag(dag) dag_run2 = jobs.SchedulerJob(test_mode=True).schedule_dag(dag) assert dag_run is not None assert dag_run2 is None
def setUp(self): configuration.test_mode() from airflow.contrib.hooks.ssh_hook import SSHHook hook = SSHHook() hook.no_host_key_check = True args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, 'provide_context': True } dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.hook = hook self.dag = dag
def setUp(self): configuration.test_mode() from airflow.contrib.hooks.ssh_hook import SSHHook hook = SSHHook() hook.no_host_key_check = True args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, 'provide_context': True } dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.hook = hook self.dag = dag
def test_schedule_dag_once(self): """ Tests scheduling a dag scheduled for @once - should be scheduled the first time it is called, and not scheduled the second. """ dag = DAG(self.TEST_SCHEDULE_ONCE_DAG_ID) dag.schedule_interval = '@once' dag.add_task(BaseOperator( task_id="faketastic", owner='Also fake', start_date=datetime(2015, 1, 2, 0, 0))) dag_run = DagFileProcessor(dag_ids=[], log=mock.MagicMock()).create_dag_run(dag) dag_run2 = DagFileProcessor(dag_ids=[], log=mock.MagicMock()).create_dag_run(dag) self.assertIsNotNone(dag_run) self.assertIsNone(dag_run2) dag.clear()
def test_schedule_dag_once(self): """ Tests scheduling a dag scheduled for @once - should be scheduled the first time it is called, and not scheduled the second. """ dag = DAG(self.TEST_SCHEDULE_ONCE_DAG_ID) dag.schedule_interval = '@once' dag.add_task(BaseOperator( task_id="faketastic", owner='Also fake', start_date=datetime(2015, 1, 2, 0, 0))) dag_run = jobs.SchedulerJob(**self.default_scheduler_args).create_dag_run(dag) dag_run2 = jobs.SchedulerJob(**self.default_scheduler_args).create_dag_run(dag) self.assertIsNotNone(dag_run) self.assertIsNone(dag_run2) dag.clear()
def setUp(self): args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, } dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.dag = dag self.sensor = GCSUploadSessionCompleteSensor(task_id='sensor', bucket='test-bucket', prefix='test-prefix/path', inactivity_period=12, poke_interval=10, min_objects=1, allow_delete=False, previous_num_objects=0, dag=self.dag) self.last_mocked_date = datetime(2019, 4, 24, 0, 0, 0)
def setUp(self): if sys.version_info[0] == 3: raise unittest.SkipTest('SSHExecuteOperatorTest won\'t work with ' 'python3. No need to test anything here') configuration.load_test_config() from airflow.contrib.hooks.ssh_hook import SSHHook hook = mock.MagicMock(spec=SSHHook) hook.no_host_key_check = True hook.Popen.return_value.stdout = StringIO(u'stdout') hook.Popen.return_value.returncode = False args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, 'provide_context': True } dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.hook = hook self.dag = dag
def setUp(self): if sys.version_info[0] == 3: raise unittest.SkipTest('SSHExecuteOperatorTest won\'t work with ' 'python3. No need to test anything here') configuration.load_test_config() from airflow.contrib.hooks.ssh_hook import SSHHook hook = mock.MagicMock(spec=SSHHook) hook.no_host_key_check = True hook.Popen.return_value.stdout = StringIO(u'stdout') hook.Popen.return_value.returncode = False args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, 'provide_context': True } dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.hook = hook self.dag = dag
def setUp(self): configuration.load_test_config() args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, 'provide_context': True } dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.dag = dag self.sensor = gcs_sensor.GoogleCloudStorageUploadSessionCompleteSensor( task_id='sensor', bucket='test-bucket', prefix='test-prefix/path', inactivity_period=12, poke_interval=10, min_objects=1, allow_delete=False, previous_num_objects=0, dag=self.dag) self.last_mocked_date = datetime(2019, 4, 24, 0, 0, 0)
def setUp(self): configuration.load_test_config() args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, 'provide_context': True } dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.dag = dag self.sensor = gcs_sensor.GoogleCloudStorageUploadSessionCompleteSensor( task_id='sensor', bucket='test-bucket', prefix='test-prefix/path', inactivity_period=12, poke_interval=10, min_objects=1, allow_delete=False, previous_num_objects=0, dag=self.dag ) self.last_mocked_date = datetime(2019, 4, 24, 0, 0, 0)