def test_parse_once(self): clear_db_serialized_dags() clear_db_dags() test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py') async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') processor_agent = DagFileProcessorAgent( test_dag_path, 1, type(self)._processor_factory, timedelta.max, [], False, async_mode ) processor_agent.start() if not async_mode: processor_agent.run_single_parsing_loop() while not processor_agent.done: if not async_mode: processor_agent.wait_until_finished() processor_agent.heartbeat() assert processor_agent.all_files_processed assert processor_agent.done with create_session() as session: dag_ids = session.query(DagModel.dag_id).order_by("dag_id").all() assert dag_ids == [('test_start_date_scheduling',), ('test_task_start_date_scheduling',)] dag_ids = session.query(SerializedDagModel.dag_id).order_by("dag_id").all() assert dag_ids == [('test_start_date_scheduling',), ('test_task_start_date_scheduling',)]
def setUp(self) -> None: db.clear_db_runs() db.clear_db_task_state() db.clear_db_serialized_dags() self._serialized_dag, self._dag_run = self.init_dag_and_dag_run( '../../dags/test_task_event_handler_dag.py', 'test_event_handler', timezone.datetime(2017, 1, 1))
def setUp(self): db.clear_db_jobs() db.clear_db_dags() db.clear_db_serialized_dags() db.clear_db_runs() db.clear_db_task_execution() db.clear_db_message() self.scheduler = None self.port = 50102 self.storage = MemoryEventStorage() self.master = NotificationMaster(NotificationService(self.storage), self.port) self.master.run() self.client = NotificationClient(server_uri="localhost:{}".format( self.port), default_namespace="test_namespace") time.sleep(1)
def test_dag_with_system_exit(self): """ Test to check that a DAG with a system.exit() doesn't break the scheduler. """ # We need to _actually_ parse the files here to test the behaviour. # Right now the parsing code lives in SchedulerJob, even though it's # called via utils.dag_processing. from airflow.jobs.scheduler_job import SchedulerJob dag_id = 'exit_test_dag' dag_directory = TEST_DAG_FOLDER.parent / 'dags_with_system_exit' # Delete the one valid DAG/SerializedDAG, and check that it gets re-created clear_db_dags() clear_db_serialized_dags() child_pipe, parent_pipe = multiprocessing.Pipe() manager = DagFileProcessorManager( dag_directory=dag_directory, dag_ids=[], max_runs=1, processor_factory=SchedulerJob._create_dag_file_processor, processor_timeout=timedelta(seconds=5), signal_conn=child_pipe, pickle_dags=False, async_mode=True, ) manager._run_parsing_loop() result = None while parent_pipe.poll(timeout=None): result = parent_pipe.recv() if isinstance(result, DagParsingStat) and result.done: break # Three files in folder should be processed assert sum(stat.run_count for stat in manager._file_stats.values()) == 3 with create_session() as session: assert session.query(DagModel).get(dag_id) is not None
def clean_db(): clear_db_runs() clear_db_dags() clear_db_serialized_dags()
def tearDown(self) -> None: db.clear_db_dags() db.clear_db_serialized_dags()
def setUp(self) -> None: db.clear_db_dags() db.clear_db_serialized_dags()
def clear_db(): clear_db_dags() clear_db_serialized_dags() clear_db_dag_code()
def setUp(self) -> None: db.clear_db_dags() db.clear_db_serialized_dags() db.clear_db_runs() db.clear_db_task_execution() db.clear_db_task_fail()