Esempio n. 1
0
    def test_parse_once(self):
        clear_db_serialized_dags()
        clear_db_dags()

        test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py')
        async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn')
        processor_agent = DagFileProcessorAgent(
            test_dag_path, 1, type(self)._processor_factory, timedelta.max, [], False, async_mode
        )
        processor_agent.start()
        if not async_mode:
            processor_agent.run_single_parsing_loop()
        while not processor_agent.done:
            if not async_mode:
                processor_agent.wait_until_finished()
            processor_agent.heartbeat()

        assert processor_agent.all_files_processed
        assert processor_agent.done

        with create_session() as session:
            dag_ids = session.query(DagModel.dag_id).order_by("dag_id").all()
            assert dag_ids == [('test_start_date_scheduling',), ('test_task_start_date_scheduling',)]

            dag_ids = session.query(SerializedDagModel.dag_id).order_by("dag_id").all()
            assert dag_ids == [('test_start_date_scheduling',), ('test_task_start_date_scheduling',)]
 def setUp(self):
     clear_db_runs()
     clear_db_pools()
     clear_db_dags()
     clear_db_sla_miss()
     clear_db_errors()
     clear_db_event_model()
Esempio n. 3
0
 def tearDown(self):
     clear_db_runs()
     clear_db_dags()
     self.appbuilder = None
     self.app = None
     self.db = None
     log.debug("Complete teardown!")
Esempio n. 4
0
    def tearDown(self):
        super(TestPythonVirtualenvOperator, self).tearDown()
        clear_db_runs()
        clear_db_dags()

        for var in TI_CONTEXT_ENV_VARS:
            if var in os.environ:
                del os.environ[var]
Esempio n. 5
0
    def setUp(self):
        clear_db_runs()
        clear_db_dags()
        self.db = SQLA(self.app)
        self.appbuilder.add_view(SomeBaseView, "SomeBaseView", category="BaseViews")
        self.appbuilder.add_view(SomeModelView, "SomeModelView", category="ModelViews")

        log.debug("Complete setup!")
 def test_dagtag_repr(self):
     clear_db_dags()
     dag = DAG('dag-test-dagtag',
               start_date=DEFAULT_DATE,
               tags=['tag-1', 'tag-2'])
     dag.sync_to_db()
     with create_session() as session:
         self.assertEqual({'tag-1', 'tag-2'}, {
             repr(t)
             for t in session.query(DagTag).filter(
                 DagTag.dag_id == 'dag-test-dagtag').all()
         })
Esempio n. 7
0
 def tearDown(self):
     session = Session()
     session.query(DagRun).filter(DagRun.dag_id == TEST_DAG_ID).delete(
         synchronize_session=False)
     session.query(TaskInstance).filter(
         TaskInstance.dag_id == TEST_DAG_ID).delete(
             synchronize_session=False)
     session.query(TaskFail).filter(TaskFail.dag_id == TEST_DAG_ID).delete(
         synchronize_session=False)
     session.commit()
     session.close()
     clear_db_dags()
     clear_db_runs()
Esempio n. 8
0
 def setUp(self):
     db.clear_db_jobs()
     db.clear_db_dags()
     db.clear_db_serialized_dags()
     db.clear_db_runs()
     db.clear_db_task_execution()
     db.clear_db_message()
     self.scheduler = None
     self.port = 50102
     self.storage = MemoryEventStorage()
     self.master = NotificationMaster(NotificationService(self.storage),
                                      self.port)
     self.master.run()
     self.client = NotificationClient(server_uri="localhost:{}".format(
         self.port),
                                      default_namespace="test_namespace")
     time.sleep(1)
Esempio n. 9
0
    def test_dag_with_system_exit(self):
        """
        Test to check that a DAG with a system.exit() doesn't break the scheduler.
        """

        # We need to _actually_ parse the files here to test the behaviour.
        # Right now the parsing code lives in SchedulerJob, even though it's
        # called via utils.dag_processing.
        from airflow.jobs.scheduler_job import SchedulerJob

        dag_id = 'exit_test_dag'
        dag_directory = TEST_DAG_FOLDER.parent / 'dags_with_system_exit'

        # Delete the one valid DAG/SerializedDAG, and check that it gets re-created
        clear_db_dags()
        clear_db_serialized_dags()

        child_pipe, parent_pipe = multiprocessing.Pipe()

        manager = DagFileProcessorManager(
            dag_directory=dag_directory,
            dag_ids=[],
            max_runs=1,
            processor_factory=SchedulerJob._create_dag_file_processor,
            processor_timeout=timedelta(seconds=5),
            signal_conn=child_pipe,
            pickle_dags=False,
            async_mode=True,
        )

        manager._run_parsing_loop()

        result = None
        while parent_pipe.poll(timeout=None):
            result = parent_pipe.recv()
            if isinstance(result, DagParsingStat) and result.done:
                break

        # Three files in folder should be processed
        assert sum(stat.run_count for stat in manager._file_stats.values()) == 3

        with create_session() as session:
            assert session.query(DagModel).get(dag_id) is not None
 def setUp(self) -> None:
     db.clear_db_dags()
     db.clear_db_serialized_dags()
     db.clear_db_runs()
     db.clear_db_task_execution()
     db.clear_db_task_fail()
Esempio n. 11
0
 def setup_attrs(self, configured_app) -> None:
     self.app = configured_app
     self.client = self.app.test_client()  # type:ignore
     clear_db_runs()
     clear_db_dags()
Esempio n. 12
0
 def setUp(self):
     clear_db_event_model()
     clear_db_dag_pickle()
     clear_db_dags()
     clear_db_task_instance()
     clear_db_runs()
Esempio n. 13
0
 def clean_db():
     clear_db_runs()
     clear_db_dags()
     clear_db_serialized_dags()
Esempio n. 14
0
 def setUp(self) -> None:
     clear_db_dags()
Esempio n. 15
0
 def setUp(self) -> None:
     db.clear_db_dags()
     db.clear_db_serialized_dags()
Esempio n. 16
0
 def clean_db():
     clear_db_dags()
     clear_db_runs()
     clear_db_xcom()
Esempio n. 17
0
 def tearDown(self) -> None:
     clear_db_dags()
Esempio n. 18
0
 def tearDown(self) -> None:
     db.clear_db_dags()
     db.clear_db_serialized_dags()
Esempio n. 19
0
 def clear_db():
     clear_db_dags()
     clear_db_serialized_dags()
     clear_db_dag_code()
Esempio n. 20
0
 def setUp(self) -> None:
     self.client = self.app.test_client()  # type:ignore
     self.default_time = "2020-06-11T18:00:00+00:00"
     self.default_time_2 = "2020-06-12T18:00:00+00:00"
     clear_db_runs()
     clear_db_dags()
Esempio n. 21
0
    def test_bulk_sync_to_db(self):
        clear_db_dags()
        dags = [
            DAG(f'dag-bulk-sync-{i}', start_date=DEFAULT_DATE, tags=["test-dag"]) for i in range(0, 4)
        ]

        with assert_queries_count(3):
            DAG.bulk_sync_to_db(dags)
        with create_session() as session:
            self.assertEqual(
                {'dag-bulk-sync-0', 'dag-bulk-sync-1', 'dag-bulk-sync-2', 'dag-bulk-sync-3'},
                {row[0] for row in session.query(DagModel.dag_id).all()}
            )
            self.assertEqual(
                {
                    ('dag-bulk-sync-0', 'test-dag'),
                    ('dag-bulk-sync-1', 'test-dag'),
                    ('dag-bulk-sync-2', 'test-dag'),
                    ('dag-bulk-sync-3', 'test-dag'),
                },
                set(session.query(DagTag.dag_id, DagTag.name).all())
            )
        # Re-sync should do fewer queries
        with assert_queries_count(2):
            DAG.bulk_sync_to_db(dags)
        with assert_queries_count(2):
            DAG.bulk_sync_to_db(dags)
        # Adding tags
        for dag in dags:
            dag.tags.append("test-dag2")
        with assert_queries_count(3):
            DAG.bulk_sync_to_db(dags)
        with create_session() as session:
            self.assertEqual(
                {'dag-bulk-sync-0', 'dag-bulk-sync-1', 'dag-bulk-sync-2', 'dag-bulk-sync-3'},
                {row[0] for row in session.query(DagModel.dag_id).all()}
            )
            self.assertEqual(
                {
                    ('dag-bulk-sync-0', 'test-dag'),
                    ('dag-bulk-sync-0', 'test-dag2'),
                    ('dag-bulk-sync-1', 'test-dag'),
                    ('dag-bulk-sync-1', 'test-dag2'),
                    ('dag-bulk-sync-2', 'test-dag'),
                    ('dag-bulk-sync-2', 'test-dag2'),
                    ('dag-bulk-sync-3', 'test-dag'),
                    ('dag-bulk-sync-3', 'test-dag2'),
                },
                set(session.query(DagTag.dag_id, DagTag.name).all())
            )
        # Removing tags
        for dag in dags:
            dag.tags.remove("test-dag")
        with assert_queries_count(3):
            DAG.bulk_sync_to_db(dags)
        with create_session() as session:
            self.assertEqual(
                {'dag-bulk-sync-0', 'dag-bulk-sync-1', 'dag-bulk-sync-2', 'dag-bulk-sync-3'},
                {row[0] for row in session.query(DagModel.dag_id).all()}
            )
            self.assertEqual(
                {
                    ('dag-bulk-sync-0', 'test-dag2'),
                    ('dag-bulk-sync-1', 'test-dag2'),
                    ('dag-bulk-sync-2', 'test-dag2'),
                    ('dag-bulk-sync-3', 'test-dag2'),
                },
                set(session.query(DagTag.dag_id, DagTag.name).all())
            )
Esempio n. 22
0
 def tearDownClass(cls) -> None:
     clear_db_runs()
     clear_db_dags()
Esempio n. 23
0
def dagbag_to_db():
    dagbag = DagBag(include_examples=True)
    for dag in dagbag.dags.values():
        dag.sync_to_db()
    yield
    clear_db_dags()