예제 #1
0
 def tearDown(self):
     session = Session()
     session.query(DagRun).delete()
     session.query(TaskInstance).delete()
     session.commit()
     session.close()
     super(TestApiExperimental, self).tearDown()
예제 #2
0
 def setUpClass(cls):
     super(TestApiExperimental, cls).setUpClass()
     session = Session()
     session.query(DagRun).delete()
     session.query(TaskInstance).delete()
     session.commit()
     session.close()
예제 #3
0
class TestChartModelView(unittest.TestCase):

    CREATE_ENDPOINT = '/admin/chart/new/?url=/admin/chart/'

    @classmethod
    def setUpClass(cls):
        super(TestChartModelView, cls).setUpClass()
        session = Session()
        session.query(models.Chart).delete()
        session.query(models.User).delete()
        session.commit()
        user = models.User(username='******')
        session.add(user)
        session.commit()
        session.close()

    def setUp(self):
        super(TestChartModelView, self).setUp()
        configuration.load_test_config()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        self.chart = {
            'label': 'chart',
            'owner': 'airflow',
            'conn_id': 'airflow_ci',
        }

    def tearDown(self):
        self.session.query(models.Chart).delete()
        self.session.commit()
        self.session.close()
        super(TestChartModelView, self).tearDown()

    @classmethod
    def tearDownClass(cls):
        session = Session()
        session.query(models.User).delete()
        session.commit()
        session.close()
        super(TestChartModelView, cls).tearDownClass()

    def test_create_chart(self):
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.chart,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertEqual(self.session.query(models.Chart).count(), 1)

    def test_get_chart(self):
        response = self.app.get(
            '/admin/chart?sort=3',
            follow_redirects=True,
        )
        print(response.data)
        self.assertEqual(response.status_code, 200)
        self.assertIn('Sort by Owner', response.data.decode('utf-8'))
예제 #4
0
파일: core.py 프로젝트: moritzpein/airflow
 def tearDown(self):
     configuration.test_mode()
     session = Session()
     session.query(models.User).delete()
     session.commit()
     session.close()
     configuration.conf.set("webserver", "authenticate", "False")
def clear_session():
    """Manage airflow database state for tests"""
    session = Session()
    session.query(DagRun).delete()
    session.query(TI).delete()
    session.commit()
    session.close()
예제 #6
0
class TestTriggerDag(unittest.TestCase):

    def setUp(self):
        conf.load_test_config()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        models.DagBag().get_dag("example_bash_operator").sync_to_db()

    def test_trigger_dag_button_normal_exist(self):
        resp = self.app.get('/', follow_redirects=True)
        self.assertIn('/trigger?dag_id=example_bash_operator', resp.data.decode('utf-8'))
        self.assertIn("return confirmDeleteDag('example_bash_operator')", resp.data.decode('utf-8'))

    def test_trigger_dag_button(self):

        test_dag_id = "example_bash_operator"

        DR = models.DagRun
        self.session.query(DR).delete()
        self.session.commit()

        self.app.get('/admin/airflow/trigger?dag_id={}'.format(test_dag_id))

        run = self.session.query(DR).filter(DR.dag_id == test_dag_id).first()
        self.assertIsNotNone(run)
        self.assertIn("manual__", run.run_id)
 def tearDown(self):
     session = Session()
     session.query(models.TaskInstance).filter_by(
         dag_id=TEST_DAG_ID).delete()
     session.query(TaskFail).filter_by(
         dag_id=TEST_DAG_ID).delete()
     session.commit()
     session.close()
    def setUpClass(cls):
        super(PythonOperatorTest, cls).setUpClass()

        session = Session()

        session.query(DagRun).delete()
        session.query(TI).delete()
        session.commit()
        session.close()
    def tearDown(self):
        super(ShortCircuitOperatorTest, self).tearDown()

        session = Session()

        session.query(DagRun).delete()
        session.query(TI).delete()
        session.commit()
        session.close()
예제 #10
0
 def setUpClass(cls):
     super(TestLogView, cls).setUpClass()
     session = Session()
     session.query(TaskInstance).filter(
         TaskInstance.dag_id == cls.DAG_ID and
         TaskInstance.task_id == cls.TASK_ID and
         TaskInstance.execution_date == cls.DEFAULT_DATE).delete()
     session.commit()
     session.close()
예제 #11
0
 def setUpClass(cls):
     super(TestVarImportView, cls).setUpClass()
     session = Session()
     session.query(models.User).delete()
     session.commit()
     user = models.User(username='******')
     session.add(user)
     session.commit()
     session.close()
예제 #12
0
class TestVariableView(unittest.TestCase):

    CREATE_ENDPOINT = '/admin/variable/new/?url=/admin/variable/'

    @classmethod
    def setUpClass(cls):
        super(TestVariableView, cls).setUpClass()
        session = Session()
        session.query(models.Variable).delete()
        session.commit()
        session.close()

    def setUp(self):
        super(TestVariableView, self).setUp()
        configuration.load_test_config()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        self.variable = {
            'key': 'test_key',
            'val': 'text_val',
            'is_encrypted': True
        }

    def tearDown(self):
        self.session.query(models.Variable).delete()
        self.session.commit()
        self.session.close()
        super(TestVariableView, self).tearDown()

    def test_can_handle_error_on_decrypt(self):
        # create valid variable
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.variable,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)

        # update the variable with a wrong value, given that is encrypted
        Var = models.Variable
        (self.session.query(Var)
            .filter(Var.key == self.variable['key'])
            .update({
                'val': 'failed_value_not_encrypted'
            }, synchronize_session=False))
        self.session.commit()

        # retrieve Variables page, should not fail and contain the Invalid
        # label for the variable
        response = self.app.get('/admin/variable', follow_redirects=True)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(self.session.query(models.Variable).count(), 1)
        self.assertIn('<span class="label label-danger">Invalid</span>',
                      response.data.decode('utf-8'))
예제 #13
0
    def tearDown(self):
        super(BranchOperatorTest, self).tearDown()

        session = Session()

        session.query(DagRun).delete()
        session.query(TI).delete()
        print(len(session.query(DagRun).all()))
        session.commit()
        session.close()
예제 #14
0
 def setUpClass(cls):
     super(TestKnownEventView, cls).setUpClass()
     session = Session()
     session.query(models.KnownEvent).delete()
     session.query(models.User).delete()
     session.commit()
     user = models.User(username='******')
     session.add(user)
     session.commit()
     cls.user_id = user.id
     session.close()
예제 #15
0
파일: app.py 프로젝트: pombredanne/Airflow
    def log(self):
        BASE_LOG_FOLDER = conf.get('core', 'BASE_LOG_FOLDER')
        dag_id = request.args.get('dag_id')
        task_id = request.args.get('task_id')
        execution_date = request.args.get('execution_date')
        dag = dagbag.dags[dag_id]
        log_relative = "/{dag_id}/{task_id}/{execution_date}".format(
            **locals())
        loc = BASE_LOG_FOLDER + log_relative
        loc = loc.format(**locals())
        log = ""
        TI = models.TaskInstance
        session = Session()
        dttm = dateutil.parser.parse(execution_date)
        ti = session.query(TI).filter(
            TI.dag_id == dag_id, TI.task_id == task_id,
            TI.execution_date == dttm).first()
        if ti:
            host = ti.hostname
            if socket.gethostname() == host:
                try:
                    f = open(loc)
                    log += "".join(f.readlines())
                    f.close()
                except:
                    log = "Log file isn't where expected.\n".format(loc)
            else:
                WORKER_LOG_SERVER_PORT = \
                    conf.get('celery', 'WORKER_LOG_SERVER_PORT')
                url = (
                    "http://{host}:{WORKER_LOG_SERVER_PORT}/log"
                    "{log_relative}").format(**locals())
                log += "Log file isn't local."
                log += "Fetching here: {url}\n".format(**locals())
                try:
                    import urllib2
                    w = urllib2.urlopen(url)
                    log += w.read()
                    w.close()
                except:
                    log += "Failed to fetch log file.".format(**locals())
            session.commit()
            session.close()

        log = "<pre><code>{0}</code></pre>".format(log)
        title = "Logs for {task_id} on {execution_date}".format(**locals())
        html_code = log

        return self.render(
            'airflow/dag_code.html', html_code=html_code, dag=dag, title=title)
예제 #16
0
    def tearDown(self):
        super(PythonOperatorTest, self).tearDown()

        session = Session()

        session.query(DagRun).delete()
        session.query(TI).delete()
        print(len(session.query(DagRun).all()))
        session.commit()
        session.close()

        for var in TI_CONTEXT_ENV_VARS:
            if var in os.environ:
                del os.environ[var]
예제 #17
0
class TestBase(unittest.TestCase):
    def setUp(self):
        conf.load_test_config()
        self.app, self.appbuilder = application.create_app(testing=True)
        self.app.config['WTF_CSRF_ENABLED'] = False
        self.client = self.app.test_client()
        self.session = Session()
        self.login()

    def login(self):
        sm_session = self.appbuilder.sm.get_session()
        self.user = sm_session.query(ab_user).first()
        if not self.user:
            role_admin = self.appbuilder.sm.find_role('Admin')
            self.appbuilder.sm.add_user(
                username='******',
                first_name='test',
                last_name='test',
                email='*****@*****.**',
                role=role_admin,
                password='******')
        return self.client.post('/login/', data=dict(
            username='******',
            password='******'
        ), follow_redirects=True)

    def logout(self):
        return self.client.get('/logout/')

    def clear_table(self, model):
        self.session.query(model).delete()
        self.session.commit()
        self.session.close()

    def check_content_in_response(self, text, resp, resp_code=200):
        resp_html = resp.data.decode('utf-8')
        self.assertEqual(resp_code, resp.status_code)
        if isinstance(text, list):
            for kw in text:
                self.assertIn(kw, resp_html)
        else:
            self.assertIn(text, resp_html)

    def percent_encode(self, obj):
        if PY2:
            return urllib.quote_plus(str(obj))
        else:
            return urllib.parse.quote_plus(str(obj))
예제 #18
0
파일: core.py 프로젝트: moritzpein/airflow
    def setUp(self):
        configuration.conf.set("webserver", "authenticate", "True")
        configuration.conf.set("webserver", "auth_backend", "airflow.contrib.auth.backends.password_auth")

        app = application.create_app()
        app.config['TESTING'] = True
        self.app = app.test_client()
        from airflow.contrib.auth.backends.password_auth import PasswordUser

        session = Session()
        user = models.User()
        password_user = PasswordUser(user)
        password_user.username = '******'
        password_user.password = '******'
        print(password_user._password)
        session.add(password_user)
        session.commit()
        session.close()
예제 #19
0
    def test_delete_dag_button_for_dag_on_scheduler_only(self):
        # Test for JIRA AIRFLOW-3233 (PR 4069):
        # The delete-dag URL should be generated correctly for DAGs
        # that exist on the scheduler (DB) but not the webserver DagBag

        test_dag_id = "non_existent_dag"

        session = Session()
        DM = models.DagModel
        session.query(DM).filter(DM.dag_id == 'example_bash_operator').update({'dag_id': test_dag_id})
        session.commit()

        resp = self.app.get('/', follow_redirects=True)
        self.assertIn('/delete?dag_id={}'.format(test_dag_id), resp.data.decode('utf-8'))
        self.assertIn("return confirmDeleteDag('{}')".format(test_dag_id), resp.data.decode('utf-8'))

        session.query(DM).filter(DM.dag_id == test_dag_id).update({'dag_id': 'example_bash_operator'})
        session.commit()
    def registered(self, driver, frameworkId, masterInfo):
        logging.info("AirflowScheduler registered to mesos with framework ID %s", frameworkId.value)

        if configuration.getboolean('mesos', 'CHECKPOINT') and configuration.get('mesos', 'FAILOVER_TIMEOUT'):
            # Import here to work around a circular import error
            from airflow.models import Connection

            # Update the Framework ID in the database.
            session = Session()
            conn_id = FRAMEWORK_CONNID_PREFIX + get_framework_name()
            connection = Session.query(Connection).filter_by(conn_id=conn_id).first()
            if connection is None:
                connection = Connection(conn_id=conn_id, conn_type='mesos_framework-id',
                                        extra=frameworkId.value)
            else:
                connection.extra = frameworkId.value

            session.add(connection)
            session.commit()
            Session.remove()
    def setUp(self):
        configuration.load_test_config()
        try:
            configuration.conf.add_section("api")
        except DuplicateSectionError:
            pass

        configuration.conf.set("api",
                               "auth_backend",
                               "airflow.contrib.auth.backends.password_auth")

        self.app = application.create_app(testing=True)

        session = Session()
        user = models.User()
        password_user = PasswordUser(user)
        password_user.username = '******'
        password_user.password = '******'
        session.add(password_user)
        session.commit()
        session.close()
예제 #22
0
def reset(dag_id=TEST_DAG_ID):
    session = Session()
    tis = session.query(models.TaskInstance).filter_by(dag_id=dag_id)
    tis.delete()
    session.commit()
    session.close()
예제 #23
0
 def setUpClass(cls):
     super(TestVariableView, cls).setUpClass()
     session = Session()
     session.query(models.Variable).delete()
     session.commit()
     session.close()
예제 #24
0
class TestDecorators(unittest.TestCase):
    EXAMPLE_DAG_DEFAULT_DATE = dates.days_ago(2)
    run_id = "test_{}".format(DagRun.id_for_date(EXAMPLE_DAG_DEFAULT_DATE))

    @classmethod
    def setUpClass(cls):
        cls.dagbag = DagBag(include_examples=True)
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        cls.app = app.test_client()

    def setUp(self):
        self.session = Session()
        self.cleanup_dagruns()
        self.prepare_dagruns()

    def cleanup_dagruns(self):
        DR = DagRun
        dag_ids = 'example_bash_operator'
        (self.session.query(DR).filter(DR.dag_id == dag_ids).filter(
            DR.run_id == self.run_id).delete(synchronize_session='fetch'))
        self.session.commit()

    def prepare_dagruns(self):
        self.bash_dag = self.dagbag.dags['example_bash_operator']
        self.bash_dag.sync_to_db()

        self.bash_dagrun = self.bash_dag.create_dagrun(
            run_id=self.run_id,
            execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
            start_date=timezone.utcnow(),
            state=State.RUNNING)

    def check_last_log(self, dag_id, event, execution_date=None):
        qry = self.session.query(Log.dag_id, Log.task_id, Log.event,
                                 Log.execution_date, Log.owner, Log.extra)
        qry = qry.filter(Log.dag_id == dag_id, Log.event == event)
        if execution_date:
            qry = qry.filter(Log.execution_date == execution_date)
        logs = qry.order_by(Log.dttm.desc()).limit(5).all()
        self.assertGreaterEqual(len(logs), 1)
        self.assertTrue(logs[0].extra)

    def test_action_logging_get(self):
        url = '/admin/airflow/graph?dag_id=example_bash_operator&execution_date={}'.format(
            quote_plus(
                self.EXAMPLE_DAG_DEFAULT_DATE.isoformat().encode('utf-8')))
        self.app.get(url, follow_redirects=True)

        # In mysql backend, this commit() is needed to write down the logs
        self.session.commit()
        self.check_last_log("example_bash_operator",
                            event="graph",
                            execution_date=self.EXAMPLE_DAG_DEFAULT_DATE)

    def test_action_logging_post(self):
        form = dict(
            task_id="runme_1",
            dag_id="example_bash_operator",
            execution_date=self.EXAMPLE_DAG_DEFAULT_DATE.isoformat().encode(
                'utf-8'),
            upstream="false",
            downstream="false",
            future="false",
            past="false",
            only_failed="false",
        )
        self.app.post("/admin/airflow/clear", data=form)
        # In mysql backend, this commit() is needed to write down the logs
        self.session.commit()
        self.check_last_log("example_bash_operator",
                            event="clear",
                            execution_date=self.EXAMPLE_DAG_DEFAULT_DATE)
예제 #25
0
class TestMarkDAGRun(unittest.TestCase):
    def setUp(self):
        self.dagbag = models.DagBag(include_examples=True)
        self.dag1 = self.dagbag.dags['example_bash_operator']
        self.dag2 = self.dagbag.dags['example_subdag_operator']

        self.execution_dates = [days_ago(2), days_ago(1), days_ago(0)]

        self.session = Session()

    def _set_default_task_instance_states(self, dr):
        if dr.dag_id != 'example_bash_operator':
            return
        # success task
        dr.get_task_instance('runme_0').set_state(State.SUCCESS, self.session)
        # skipped task
        dr.get_task_instance('runme_1').set_state(State.SKIPPED, self.session)
        # retry task
        dr.get_task_instance('runme_2').set_state(State.UP_FOR_RETRY, self.session)
        # queued task
        dr.get_task_instance('also_run_this').set_state(State.QUEUED, self.session)
        # running task
        dr.get_task_instance('run_after_loop').set_state(State.RUNNING, self.session)
        # failed task
        dr.get_task_instance('run_this_last').set_state(State.FAILED, self.session)

    def _verify_task_instance_states_remain_default(self, dr):
        self.assertEqual(dr.get_task_instance('runme_0').state, State.SUCCESS)
        self.assertEqual(dr.get_task_instance('runme_1').state, State.SKIPPED)
        self.assertEqual(dr.get_task_instance('runme_2').state, State.UP_FOR_RETRY)
        self.assertEqual(dr.get_task_instance('also_run_this').state, State.QUEUED, )
        self.assertEqual(dr.get_task_instance('run_after_loop').state, State.RUNNING)
        self.assertEqual(dr.get_task_instance('run_this_last').state, State.FAILED)

    def _verify_task_instance_states(self, dag, date, state):
        TI = models.TaskInstance
        tis = self.session.query(TI).filter(TI.dag_id == dag.dag_id,
                                            TI.execution_date == date)
        for ti in tis:
            self.assertEqual(ti.state, state)

    def _create_test_dag_run(self, state, date):
        return self.dag1.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=state,
            execution_date=date,
            session=self.session
        )

    def _verify_dag_run_state(self, dag, date, state):
        drs = models.DagRun.find(dag_id=dag.dag_id, execution_date=date)
        dr = drs[0]

        self.assertEqual(dr.get_state(), state)

    def _verify_dag_run_dates(self, dag, date, state, middle_time):
        # When target state is RUNNING, we should set start_date,
        # otherwise we should set end_date.
        drs = models.DagRun.find(dag_id=dag.dag_id, execution_date=date)
        dr = drs[0]
        if state == State.RUNNING:
            self.assertGreater(dr.start_date, middle_time)
            self.assertIsNone(dr.end_date)
        else:
            self.assertLess(dr.start_date, middle_time)
            self.assertGreater(dr.end_date, middle_time)

    def test_set_running_dag_run_to_success(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.RUNNING, date)
        middle_time = timezone.utcnow()
        self._set_default_task_instance_states(dr)

        altered = set_dag_run_state_to_success(self.dag1, date, commit=True)

        # All except the SUCCESS task should be altered.
        self.assertEqual(len(altered), 5)
        self._verify_dag_run_state(self.dag1, date, State.SUCCESS)
        self._verify_task_instance_states(self.dag1, date, State.SUCCESS)
        self._verify_dag_run_dates(self.dag1, date, State.SUCCESS, middle_time)

    def test_set_running_dag_run_to_failed(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.RUNNING, date)
        middle_time = timezone.utcnow()
        self._set_default_task_instance_states(dr)

        altered = set_dag_run_state_to_failed(self.dag1, date, commit=True)

        # Only running task should be altered.
        self.assertEqual(len(altered), 1)
        self._verify_dag_run_state(self.dag1, date, State.FAILED)
        self.assertEqual(dr.get_task_instance('run_after_loop').state, State.FAILED)
        self._verify_dag_run_dates(self.dag1, date, State.FAILED, middle_time)

    def test_set_running_dag_run_to_running(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.RUNNING, date)
        middle_time = timezone.utcnow()
        self._set_default_task_instance_states(dr)

        altered = set_dag_run_state_to_running(self.dag1, date, commit=True)

        # None of the tasks should be altered.
        self.assertEqual(len(altered), 0)
        self._verify_dag_run_state(self.dag1, date, State.RUNNING)
        self._verify_task_instance_states_remain_default(dr)
        self._verify_dag_run_dates(self.dag1, date, State.RUNNING, middle_time)

    def test_set_success_dag_run_to_success(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.SUCCESS, date)
        middle_time = timezone.utcnow()
        self._set_default_task_instance_states(dr)

        altered = set_dag_run_state_to_success(self.dag1, date, commit=True)

        # All except the SUCCESS task should be altered.
        self.assertEqual(len(altered), 5)
        self._verify_dag_run_state(self.dag1, date, State.SUCCESS)
        self._verify_task_instance_states(self.dag1, date, State.SUCCESS)
        self._verify_dag_run_dates(self.dag1, date, State.SUCCESS, middle_time)

    def test_set_success_dag_run_to_failed(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.SUCCESS, date)
        middle_time = timezone.utcnow()
        self._set_default_task_instance_states(dr)

        altered = set_dag_run_state_to_failed(self.dag1, date, commit=True)

        # Only running task should be altered.
        self.assertEqual(len(altered), 1)
        self._verify_dag_run_state(self.dag1, date, State.FAILED)
        self.assertEqual(dr.get_task_instance('run_after_loop').state, State.FAILED)
        self._verify_dag_run_dates(self.dag1, date, State.FAILED, middle_time)

    def test_set_success_dag_run_to_running(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.SUCCESS, date)
        middle_time = timezone.utcnow()
        self._set_default_task_instance_states(dr)

        altered = set_dag_run_state_to_running(self.dag1, date, commit=True)

        # None of the tasks should be altered.
        self.assertEqual(len(altered), 0)
        self._verify_dag_run_state(self.dag1, date, State.RUNNING)
        self._verify_task_instance_states_remain_default(dr)
        self._verify_dag_run_dates(self.dag1, date, State.RUNNING, middle_time)

    def test_set_failed_dag_run_to_success(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.SUCCESS, date)
        middle_time = timezone.utcnow()
        self._set_default_task_instance_states(dr)

        altered = set_dag_run_state_to_success(self.dag1, date, commit=True)

        # All except the SUCCESS task should be altered.
        self.assertEqual(len(altered), 5)
        self._verify_dag_run_state(self.dag1, date, State.SUCCESS)
        self._verify_task_instance_states(self.dag1, date, State.SUCCESS)
        self._verify_dag_run_dates(self.dag1, date, State.SUCCESS, middle_time)

    def test_set_failed_dag_run_to_failed(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.SUCCESS, date)
        middle_time = timezone.utcnow()
        self._set_default_task_instance_states(dr)

        altered = set_dag_run_state_to_failed(self.dag1, date, commit=True)

        # Only running task should be altered.
        self.assertEqual(len(altered), 1)
        self._verify_dag_run_state(self.dag1, date, State.FAILED)
        self.assertEqual(dr.get_task_instance('run_after_loop').state, State.FAILED)
        self._verify_dag_run_dates(self.dag1, date, State.FAILED, middle_time)

    def test_set_failed_dag_run_to_running(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.SUCCESS, date)
        middle_time = timezone.utcnow()
        self._set_default_task_instance_states(dr)

        altered = set_dag_run_state_to_running(self.dag1, date, commit=True)

        # None of the tasks should be altered.
        self.assertEqual(len(altered), 0)
        self._verify_dag_run_state(self.dag1, date, State.RUNNING)
        self._verify_task_instance_states_remain_default(dr)
        self._verify_dag_run_dates(self.dag1, date, State.RUNNING, middle_time)

    def test_set_state_without_commit(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.RUNNING, date)
        self._set_default_task_instance_states(dr)

        will_be_altered = set_dag_run_state_to_running(self.dag1, date, commit=False)

        # None of the tasks will be altered.
        self.assertEqual(len(will_be_altered), 0)
        self._verify_dag_run_state(self.dag1, date, State.RUNNING)
        self._verify_task_instance_states_remain_default(dr)

        will_be_altered = set_dag_run_state_to_failed(self.dag1, date, commit=False)

        # Only the running task will be altered.
        self.assertEqual(len(will_be_altered), 1)
        self._verify_dag_run_state(self.dag1, date, State.RUNNING)
        self._verify_task_instance_states_remain_default(dr)

        will_be_altered = set_dag_run_state_to_success(self.dag1, date, commit=False)

        # All except the SUCCESS task should be altered.
        self.assertEqual(len(will_be_altered), 5)
        self._verify_dag_run_state(self.dag1, date, State.RUNNING)
        self._verify_task_instance_states_remain_default(dr)

    def test_set_state_with_multiple_dagruns(self):
        self.dag2.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.FAILED,
            execution_date=self.execution_dates[0],
            session=self.session
        )
        self.dag2.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.FAILED,
            execution_date=self.execution_dates[1],
            session=self.session
        )
        self.dag2.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.RUNNING,
            execution_date=self.execution_dates[2],
            session=self.session
        )

        altered = set_dag_run_state_to_success(self.dag2, self.execution_dates[1],
                                               commit=True)

        # Recursively count number of tasks in the dag
        def count_dag_tasks(dag):
            count = len(dag.tasks)
            subdag_counts = [count_dag_tasks(subdag) for subdag in dag.subdags]
            count += sum(subdag_counts)
            return count

        self.assertEqual(len(altered), count_dag_tasks(self.dag2))
        self._verify_dag_run_state(self.dag2, self.execution_dates[1], State.SUCCESS)

        # Make sure other dag status are not changed
        models.DagRun.find(dag_id=self.dag2.dag_id,
                           execution_date=self.execution_dates[0])
        self._verify_dag_run_state(self.dag2, self.execution_dates[0], State.FAILED)
        models.DagRun.find(dag_id=self.dag2.dag_id,
                           execution_date=self.execution_dates[2])
        self._verify_dag_run_state(self.dag2, self.execution_dates[2], State.RUNNING)

    def test_set_dag_run_state_edge_cases(self):
        # Dag does not exist
        altered = set_dag_run_state_to_success(None, self.execution_dates[0])
        self.assertEqual(len(altered), 0)
        altered = set_dag_run_state_to_failed(None, self.execution_dates[0])
        self.assertEqual(len(altered), 0)
        altered = set_dag_run_state_to_running(None, self.execution_dates[0])
        self.assertEqual(len(altered), 0)

        # Invalid execution date
        altered = set_dag_run_state_to_success(self.dag1, None)
        self.assertEqual(len(altered), 0)
        altered = set_dag_run_state_to_failed(self.dag1, None)
        self.assertEqual(len(altered), 0)
        altered = set_dag_run_state_to_running(self.dag1, None)
        self.assertEqual(len(altered), 0)

        # This will throw AssertionError since dag.latest_execution_date
        # need to be 0 does not exist.
        self.assertRaises(AssertionError, set_dag_run_state_to_success, self.dag1,
                          timezone.make_naive(self.execution_dates[0]))

        # altered = set_dag_run_state_to_success(self.dag1, self.execution_dates[0])
        # DagRun does not exist
        # This will throw AssertionError since dag.latest_execution_date does not exist
        self.assertRaises(AssertionError, set_dag_run_state_to_success,
                          self.dag1, self.execution_dates[0])

    def tearDown(self):
        self.dag1.clear()
        self.dag2.clear()

        self.session.query(models.DagRun).delete()
        self.session.query(models.TaskInstance).delete()
        self.session.query(models.DagStat).delete()
        self.session.commit()
        self.session.close()
예제 #26
0
def deleteApiUser(apiUser):
    session = Session()
    session.query(models.User).filter(models.User.username == apiUser).delete()
    session.commit()
    session.close()
예제 #27
0
class TestMarkDAGRun(unittest.TestCase):
    def setUp(self):
        self.dagbag = models.DagBag(include_examples=True)
        self.dag1 = self.dagbag.dags['test_example_bash_operator']
        self.dag2 = self.dagbag.dags['example_subdag_operator']

        self.execution_dates = [days_ago(3), days_ago(2), days_ago(1)]

        self.session = Session()

    def verify_dag_run_states(self, dag, date, state=State.SUCCESS):
        drs = models.DagRun.find(dag_id=dag.dag_id, execution_date=date)
        dr = drs[0]
        self.assertEqual(dr.get_state(), state)
        tis = dr.get_task_instances(session=self.session)
        for ti in tis:
            self.assertEqual(ti.state, state)

    def test_set_running_dag_run_state(self):
        date = self.execution_dates[0]
        dr = self.dag1.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.RUNNING,
            execution_date=date,
            session=self.session
        )
        for ti in dr.get_task_instances(session=self.session):
            ti.set_state(State.RUNNING, self.session)

        altered = set_dag_run_state(self.dag1, date, state=State.SUCCESS, commit=True)

        # All of the task should be altered
        self.assertEqual(len(altered), len(self.dag1.tasks))
        self.verify_dag_run_states(self.dag1, date)

    def test_set_success_dag_run_state(self):
        date = self.execution_dates[0]

        dr = self.dag1.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.SUCCESS,
            execution_date=date,
            session=self.session
        )
        for ti in dr.get_task_instances(session=self.session):
            ti.set_state(State.SUCCESS, self.session)

        altered = set_dag_run_state(self.dag1, date, state=State.SUCCESS, commit=True)

        # None of the task should be altered
        self.assertEqual(len(altered), 0)
        self.verify_dag_run_states(self.dag1, date)

    def test_set_failed_dag_run_state(self):
        date = self.execution_dates[0]
        dr = self.dag1.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.FAILED,
            execution_date=date,
            session=self.session
        )
        dr.get_task_instance('runme_0').set_state(State.FAILED, self.session)

        altered = set_dag_run_state(self.dag1, date, state=State.SUCCESS, commit=True)

        # All of the task should be altered
        self.assertEqual(len(altered), len(self.dag1.tasks))
        self.verify_dag_run_states(self.dag1, date)

    def test_set_mixed_dag_run_state(self):
        """
        This test checks function set_dag_run_state with mixed task instance
        state.
        """
        date = self.execution_dates[0]
        dr = self.dag1.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.FAILED,
            execution_date=date,
            session=self.session
        )
        # success task
        dr.get_task_instance('runme_0').set_state(State.SUCCESS, self.session)
        # skipped task
        dr.get_task_instance('runme_1').set_state(State.SKIPPED, self.session)
        # retry task
        dr.get_task_instance('runme_2').set_state(State.UP_FOR_RETRY, self.session)
        # queued task
        dr.get_task_instance('also_run_this').set_state(State.QUEUED, self.session)
        # running task
        dr.get_task_instance('run_after_loop').set_state(State.RUNNING, self.session)
        # failed task
        dr.get_task_instance('run_this_last').set_state(State.FAILED, self.session)

        altered = set_dag_run_state(self.dag1, date, state=State.SUCCESS, commit=True)

        self.assertEqual(len(altered), len(self.dag1.tasks) - 1) # only 1 task succeeded
        self.verify_dag_run_states(self.dag1, date)

    def test_set_state_without_commit(self):
        date = self.execution_dates[0]

        # Running dag run and task instances
        dr = self.dag1.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.RUNNING,
            execution_date=date,
            session=self.session
        )
        for ti in dr.get_task_instances(session=self.session):
            ti.set_state(State.RUNNING, self.session)

        altered = set_dag_run_state(self.dag1, date, state=State.SUCCESS, commit=False)

        # All of the task should be altered
        self.assertEqual(len(altered), len(self.dag1.tasks))

        # Both dag run and task instances' states should remain the same
        self.verify_dag_run_states(self.dag1, date, State.RUNNING)

    def test_set_state_with_multiple_dagruns(self):
        dr1 = self.dag2.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.FAILED,
            execution_date=self.execution_dates[0],
            session=self.session
        )
        dr2 = self.dag2.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.FAILED,
            execution_date=self.execution_dates[1],
            session=self.session
        )
        dr3 = self.dag2.create_dagrun(
            run_id='manual__' + datetime.now().isoformat(),
            state=State.RUNNING,
            execution_date=self.execution_dates[2],
            session=self.session
        )

        altered = set_dag_run_state(self.dag2, self.execution_dates[1],
                                state=State.SUCCESS, commit=True)

        # Recursively count number of tasks in the dag
        def count_dag_tasks(dag):
            count = len(dag.tasks)
            subdag_counts = [count_dag_tasks(subdag) for subdag in dag.subdags]
            count += sum(subdag_counts)
            return count

        self.assertEqual(len(altered), count_dag_tasks(self.dag2))
        self.verify_dag_run_states(self.dag2, self.execution_dates[1])

        # Make sure other dag status are not changed
        dr1 = models.DagRun.find(dag_id=self.dag2.dag_id, execution_date=self.execution_dates[0])
        dr1 = dr1[0]
        self.assertEqual(dr1.get_state(), State.FAILED)
        dr3 = models.DagRun.find(dag_id=self.dag2.dag_id, execution_date=self.execution_dates[2])
        dr3 = dr3[0]
        self.assertEqual(dr3.get_state(), State.RUNNING)

    def test_set_dag_run_state_edge_cases(self):
        # Dag does not exist
        altered = set_dag_run_state(None, self.execution_dates[0])
        self.assertEqual(len(altered), 0)

        # Invalid execution date
        altered = set_dag_run_state(self.dag1, None)
        self.assertEqual(len(altered), 0)
        self.assertRaises(AssertionError, set_dag_run_state, self.dag1, timedelta(microseconds=-1))

        # DagRun does not exist
        # This will throw AssertionError since dag.latest_execution_date does not exist
        self.assertRaises(AssertionError, set_dag_run_state, self.dag1, self.execution_dates[0])

    def tearDown(self):
        self.dag1.clear()
        self.dag2.clear()

        self.session.query(models.DagRun).delete()
        self.session.query(models.TaskInstance).delete()
        self.session.query(models.DagStat).delete()
        self.session.commit()
예제 #28
0
 def tearDown(self):
     session = Session()
     session.query(TaskInstance).filter_by(dag_id=TEST_DAG_ID).delete()
     session.query(TaskFail).filter_by(dag_id=TEST_DAG_ID).delete()
     session.commit()
     session.close()
def set_state(task,
              execution_date,
              upstream=False,
              downstream=False,
              future=False,
              past=False,
              state=State.SUCCESS,
              commit=False):
    """
    Set the state of a task instance and if needed its relatives. Can set state
    for future tasks (calculated from execution_date) and retroactively
    for past tasks. Will verify integrity of past dag runs in order to create
    tasks that did not exist. It will not create dag runs that are missing
    on the schedule (but it will as for subdag dag runs if needed).
    :param task: the task from which to work. task.task.dag needs to be set
    :param execution_date: the execution date from which to start looking
    :param upstream: Mark all parents (upstream tasks)
    :param downstream: Mark all siblings (downstream tasks) of task_id, including SubDags
    :param future: Mark all future tasks on the interval of the dag up until
        last execution date.
    :param past: Retroactively mark all tasks starting from start_date of the DAG
    :param state: State to which the tasks need to be set
    :param commit: Commit tasks to be altered to the database
    :return: list of tasks that have been created and updated
    """
    assert timezone.is_localized(execution_date)

    # microseconds are supported by the database, but is not handled
    # correctly by airflow on e.g. the filesystem and in other places
    execution_date = execution_date.replace(microsecond=0)

    assert task.dag is not None
    dag = task.dag

    latest_execution_date = dag.latest_execution_date
    assert latest_execution_date is not None

    # determine date range of dag runs and tasks to consider
    end_date = latest_execution_date if future else execution_date

    if 'start_date' in dag.default_args:
        start_date = dag.default_args['start_date']
    elif dag.start_date:
        start_date = dag.start_date
    else:
        start_date = execution_date

    start_date = execution_date if not past else start_date

    if dag.schedule_interval == '@once':
        dates = [start_date]
    else:
        dates = dag.date_range(start_date=start_date, end_date=end_date)

    # find relatives (siblings = downstream, parents = upstream) if needed
    task_ids = [task.task_id]
    if downstream:
        relatives = task.get_flat_relatives(upstream=False)
        task_ids += [t.task_id for t in relatives]
    if upstream:
        relatives = task.get_flat_relatives(upstream=True)
        task_ids += [t.task_id for t in relatives]

    # verify the integrity of the dag runs in case a task was added or removed
    # set the confirmed execution dates as they might be different
    # from what was provided
    confirmed_dates = []
    drs = DagRun.find(dag_id=dag.dag_id, execution_date=dates)
    for dr in drs:
        dr.dag = dag
        dr.verify_integrity()
        confirmed_dates.append(dr.execution_date)

    # go through subdagoperators and create dag runs. We will only work
    # within the scope of the subdag. We wont propagate to the parent dag,
    # but we will propagate from parent to subdag.
    session = Session()
    dags = [dag]
    sub_dag_ids = []
    while len(dags) > 0:
        current_dag = dags.pop()
        for task_id in task_ids:
            if not current_dag.has_task(task_id):
                continue

            current_task = current_dag.get_task(task_id)
            if isinstance(current_task, SubDagOperator):
                # this works as a kind of integrity check
                # it creates missing dag runs for subdagoperators,
                # maybe this should be moved to dagrun.verify_integrity
                drs = _create_dagruns(
                    current_task.subdag,
                    execution_dates=confirmed_dates,
                    state=State.RUNNING,
                    run_id_template=BackfillJob.ID_FORMAT_PREFIX)

                for dr in drs:
                    dr.dag = current_task.subdag
                    dr.verify_integrity()
                    if commit:
                        dr.state = state
                        session.merge(dr)

                dags.append(current_task.subdag)
                sub_dag_ids.append(current_task.subdag.dag_id)

    # now look for the task instances that are affected
    TI = TaskInstance

    # get all tasks of the main dag that will be affected by a state change
    qry_dag = session.query(TI).filter(TI.dag_id == dag.dag_id,
                                       TI.execution_date.in_(confirmed_dates),
                                       TI.task_id.in_(task_ids)).filter(
                                           or_(TI.state.is_(None),
                                               TI.state != state))

    # get *all* tasks of the sub dags
    if len(sub_dag_ids) > 0:
        qry_sub_dag = session.query(TI).filter(
            TI.dag_id.in_(sub_dag_ids),
            TI.execution_date.in_(confirmed_dates)).filter(
                or_(TI.state.is_(None), TI.state != state))

    if commit:
        tis_altered = qry_dag.with_for_update().all()
        if len(sub_dag_ids) > 0:
            tis_altered += qry_sub_dag.with_for_update().all()
        for ti in tis_altered:
            ti.state = state
        session.commit()
    else:
        tis_altered = qry_dag.all()
        if len(sub_dag_ids) > 0:
            tis_altered += qry_sub_dag.all()

    session.expunge_all()
    session.close()

    return tis_altered
예제 #30
0
 def setUpClass(cls):
     super(TestPoolApiExperimental, cls).setUpClass()
     session = Session()
     session.query(Pool).delete()
     session.commit()
     session.close()
예제 #31
0
 def setUpClass(cls):
     super(TestPoolModelView, cls).setUpClass()
     session = Session()
     session.query(models.Pool).delete()
     session.commit()
     session.close()
예제 #32
0
class TestPoolApiExperimental(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
        super(TestPoolApiExperimental, cls).setUpClass()
        session = Session()
        session.query(Pool).delete()
        session.commit()
        session.close()

    def setUp(self):
        super(TestPoolApiExperimental, self).setUp()
        configuration.load_test_config()
        app, _ = application.create_app(testing=True)
        self.app = app.test_client()
        self.session = Session()
        self.pools = []
        for i in range(2):
            name = 'experimental_%s' % (i + 1)
            pool = Pool(
                pool=name,
                slots=i,
                description=name,
            )
            self.session.add(pool)
            self.pools.append(pool)
        self.session.commit()
        self.pool = self.pools[0]

    def tearDown(self):
        self.session.query(Pool).delete()
        self.session.commit()
        self.session.close()
        super(TestPoolApiExperimental, self).tearDown()

    def _get_pool_count(self):
        response = self.app.get('/api/experimental/pools')
        self.assertEqual(response.status_code, 200)
        return len(json.loads(response.data.decode('utf-8')))

    def test_get_pool(self):
        response = self.app.get(
            '/api/experimental/pools/{}'.format(self.pool.pool), )
        self.assertEqual(response.status_code, 200)
        self.assertEqual(json.loads(response.data.decode('utf-8')),
                         self.pool.to_json())

    def test_get_pool_non_existing(self):
        response = self.app.get('/api/experimental/pools/foo')
        self.assertEqual(response.status_code, 404)
        self.assertEqual(
            json.loads(response.data.decode('utf-8'))['error'],
            "Pool 'foo' doesn't exist")

    def test_get_pools(self):
        response = self.app.get('/api/experimental/pools')
        self.assertEqual(response.status_code, 200)
        pools = json.loads(response.data.decode('utf-8'))
        self.assertEqual(len(pools), 2)
        for i, pool in enumerate(sorted(pools, key=lambda p: p['pool'])):
            self.assertDictEqual(pool, self.pools[i].to_json())

    def test_create_pool(self):
        response = self.app.post(
            '/api/experimental/pools',
            data=json.dumps({
                'name': 'foo',
                'slots': 1,
                'description': '',
            }),
            content_type='application/json',
        )
        self.assertEqual(response.status_code, 200)
        pool = json.loads(response.data.decode('utf-8'))
        self.assertEqual(pool['pool'], 'foo')
        self.assertEqual(pool['slots'], 1)
        self.assertEqual(pool['description'], '')
        self.assertEqual(self._get_pool_count(), 3)

    def test_create_pool_with_bad_name(self):
        for name in ('', '    '):
            response = self.app.post(
                '/api/experimental/pools',
                data=json.dumps({
                    'name': name,
                    'slots': 1,
                    'description': '',
                }),
                content_type='application/json',
            )
            self.assertEqual(response.status_code, 400)
            self.assertEqual(
                json.loads(response.data.decode('utf-8'))['error'],
                "Pool name shouldn't be empty",
            )
        self.assertEqual(self._get_pool_count(), 2)

    def test_delete_pool(self):
        response = self.app.delete(
            '/api/experimental/pools/{}'.format(self.pool.pool), )
        self.assertEqual(response.status_code, 200)
        self.assertEqual(json.loads(response.data.decode('utf-8')),
                         self.pool.to_json())
        self.assertEqual(self._get_pool_count(), 1)

    def test_delete_pool_non_existing(self):
        response = self.app.delete('/api/experimental/pools/foo', )
        self.assertEqual(response.status_code, 404)
        self.assertEqual(
            json.loads(response.data.decode('utf-8'))['error'],
            "Pool 'foo' doesn't exist")
예제 #33
0
class TestConnectionModelView(unittest.TestCase):

    CREATE_ENDPOINT = '/admin/connection/new/?url=/admin/connection/'
    CONN_ID = "new_conn"

    CONN = {
        "conn_id": CONN_ID,
        "conn_type": "http",
        "host": "https://example.com",
    }

    @classmethod
    def setUpClass(cls):
        super(TestConnectionModelView, cls).setUpClass()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        cls.app = app.test_client()

    def setUp(self):
        self.session = Session()

    def tearDown(self):
        self.session.query(models.Connection) \
                    .filter(models.Connection.conn_id == self.CONN_ID).delete()
        self.session.commit()
        self.session.close()
        super(TestConnectionModelView, self).tearDown()

    def test_create(self):
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.CONN,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertEqual(
            self.session.query(models.Connection).filter(
                models.Connection.conn_id == self.CONN_ID).count(), 1)

    def test_create_error(self):
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data={"conn_type": "http"},
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertIn(b'has-error', response.data)
        self.assertEqual(
            self.session.query(models.Connection).filter(
                models.Connection.conn_id == self.CONN_ID).count(), 0)

    def test_create_extras(self):
        data = self.CONN.copy()
        data.update({
            "conn_type": "google_cloud_platform",
            "extra__google_cloud_platform__num_retries": "2",
        })
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=data,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        conn = self.session.query(models.Connection).filter(
            models.Connection.conn_id == self.CONN_ID).one()

        self.assertEqual(
            conn.extra_dejson['extra__google_cloud_platform__num_retries'], 2)

    def test_create_extras_empty_field(self):
        data = self.CONN.copy()
        data.update({
            "conn_type": "google_cloud_platform",
            "extra__google_cloud_platform__num_retries": "",
        })
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=data,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        conn = self.session.query(models.Connection).filter(
            models.Connection.conn_id == self.CONN_ID).one()

        self.assertIsNone(
            conn.extra_dejson['extra__google_cloud_platform__num_retries'])
예제 #34
0
class TestLogView(unittest.TestCase):
    DAG_ID = 'dag_for_testing_log_view'
    TASK_ID = 'task_for_testing_log_view'
    DEFAULT_DATE = datetime(2017, 9, 1)
    ENDPOINT = '/admin/airflow/log?dag_id={dag_id}&task_id={task_id}&execution_date={execution_date}'.format(
        dag_id=DAG_ID,
        task_id=TASK_ID,
        execution_date=DEFAULT_DATE,
    )

    @classmethod
    def setUpClass(cls):
        super(TestLogView, cls).setUpClass()
        session = Session()
        session.query(TaskInstance).filter(
            TaskInstance.dag_id == cls.DAG_ID
            and TaskInstance.task_id == cls.TASK_ID
            and TaskInstance.execution_date == cls.DEFAULT_DATE).delete()
        session.commit()
        session.close()

    def setUp(self):
        super(TestLogView, self).setUp()
        # Make sure that the configure_logging is not cached
        self.old_modules = dict(sys.modules)

        conf.load_test_config()

        # Create a custom logging configuration
        configuration.load_test_config()
        logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
        current_dir = os.path.dirname(os.path.abspath(__file__))
        logging_config['handlers']['task'][
            'base_log_folder'] = os.path.normpath(
                os.path.join(current_dir, 'test_logs'))
        logging_config['handlers']['task']['filename_template'] = \
            '{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(":", ".") }}/{{ try_number }}.log'

        # Write the custom logging configuration to a file
        self.settings_folder = tempfile.mkdtemp()
        settings_file = os.path.join(self.settings_folder,
                                     "airflow_local_settings.py")
        new_logging_file = "LOGGING_CONFIG = {}".format(logging_config)
        with open(settings_file, 'w') as handle:
            handle.writelines(new_logging_file)
        sys.path.append(self.settings_folder)
        conf.set('core', 'logging_config_class',
                 'airflow_local_settings.LOGGING_CONFIG')

        app = application.create_app(testing=True)
        self.app = app.test_client()
        self.session = Session()
        from airflow.www.views import dagbag
        dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE)
        task = DummyOperator(task_id=self.TASK_ID, dag=dag)
        dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag)
        ti = TaskInstance(task=task, execution_date=self.DEFAULT_DATE)
        ti.try_number = 1
        self.session.merge(ti)
        self.session.commit()

    def tearDown(self):
        logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)
        self.session.query(TaskInstance).filter(
            TaskInstance.dag_id == self.DAG_ID
            and TaskInstance.task_id == self.TASK_ID
            and TaskInstance.execution_date == self.DEFAULT_DATE).delete()
        self.session.commit()
        self.session.close()

        # Remove any new modules imported during the test run. This lets us
        # import the same source files for more than one test.
        for m in [m for m in sys.modules if m not in self.old_modules]:
            del sys.modules[m]

        sys.path.remove(self.settings_folder)
        shutil.rmtree(self.settings_folder)
        conf.set('core', 'logging_config_class', '')

        super(TestLogView, self).tearDown()

    def test_get_file_task_log(self):
        response = self.app.get(
            TestLogView.ENDPOINT,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertIn('Log by attempts', response.data.decode('utf-8'))

    def test_get_logs_with_metadata_as_download_file(self):
        url_template = "/admin/airflow/get_logs_with_metadata?dag_id={}&" \
                       "task_id={}&execution_date={}&" \
                       "try_number={}&metadata={}&format=file"
        try_number = 1
        url = url_template.format(self.DAG_ID, self.TASK_ID,
                                  quote_plus(self.DEFAULT_DATE.isoformat()),
                                  try_number, json.dumps({}))
        response = self.app.get(url)
        expected_filename = '{}/{}/{}/{}.log'.format(
            self.DAG_ID, self.TASK_ID, self.DEFAULT_DATE.isoformat(),
            try_number)

        content_disposition = response.headers.get('Content-Disposition')
        self.assertTrue(content_disposition.startswith('attachment'))
        self.assertTrue(expected_filename in content_disposition)
        self.assertEqual(200, response.status_code)
        self.assertIn('Log for testing.', response.data.decode('utf-8'))

    def test_get_logs_with_metadata_as_download_large_file(self):
        with mock.patch(
                "airflow.utils.log.file_task_handler.FileTaskHandler.read"
        ) as read_mock:
            first_return = (['1st line'], [{}])
            second_return = (['2nd line'], [{'end_of_log': False}])
            third_return = (['3rd line'], [{'end_of_log': True}])
            fourth_return = (['should never be read'], [{'end_of_log': True}])
            read_mock.side_effect = [
                first_return, second_return, third_return, fourth_return
            ]
            url_template = "/admin/airflow/get_logs_with_metadata?dag_id={}&" \
                           "task_id={}&execution_date={}&" \
                           "try_number={}&metadata={}&format=file"
            try_number = 1
            url = url_template.format(
                self.DAG_ID, self.TASK_ID,
                quote_plus(self.DEFAULT_DATE.isoformat()), try_number,
                json.dumps({}))
            response = self.app.get(url)

            self.assertIn('1st line', response.data.decode('utf-8'))
            self.assertIn('2nd line', response.data.decode('utf-8'))
            self.assertIn('3rd line', response.data.decode('utf-8'))
            self.assertNotIn('should never be read',
                             response.data.decode('utf-8'))

    def test_get_logs_with_metadata(self):
        url_template = "/admin/airflow/get_logs_with_metadata?dag_id={}&" \
                       "task_id={}&execution_date={}&" \
                       "try_number={}&metadata={}"
        response = \
            self.app.get(url_template.format(self.DAG_ID,
                                             self.TASK_ID,
                                             quote_plus(self.DEFAULT_DATE.isoformat()),
                                             1,
                                             json.dumps({})))

        self.assertIn('"message":', response.data.decode('utf-8'))
        self.assertIn('"metadata":', response.data.decode('utf-8'))
        self.assertIn('Log for testing.', response.data.decode('utf-8'))
        self.assertEqual(200, response.status_code)

    def test_get_logs_with_null_metadata(self):
        url_template = "/admin/airflow/get_logs_with_metadata?dag_id={}&" \
                       "task_id={}&execution_date={}&" \
                       "try_number={}&metadata=null"
        response = \
            self.app.get(url_template.format(self.DAG_ID,
                                             self.TASK_ID,
                                             quote_plus(self.DEFAULT_DATE.isoformat()),
                                             1))

        self.assertIn('"message":', response.data.decode('utf-8'))
        self.assertIn('"metadata":', response.data.decode('utf-8'))
        self.assertIn('Log for testing.', response.data.decode('utf-8'))
        self.assertEqual(200, response.status_code)
예제 #35
0
 def _reset_db(self):
     session = Session()
     session.query(DagRun).delete()
     session.query(TaskInstance).delete()
     session.commit()
     session.close()
예제 #36
0
class ViewWithDateTimeAndNumRunsAndDagRunsFormTester:
    DAG_ID = 'dag_for_testing_dt_nr_dr_form'
    DEFAULT_DATE = datetime(2017, 9, 1)
    RUNS_DATA = [
        ('dag_run_for_testing_dt_nr_dr_form_4', datetime(2018, 4, 4)),
        ('dag_run_for_testing_dt_nr_dr_form_3', datetime(2018, 3, 3)),
        ('dag_run_for_testing_dt_nr_dr_form_2', datetime(2018, 2, 2)),
        ('dag_run_for_testing_dt_nr_dr_form_1', datetime(2018, 1, 1)),
    ]

    def __init__(self, test, endpoint):
        self.test = test
        self.endpoint = endpoint

    def setUp(self):
        configuration.load_test_config()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        from airflow.www.views import dagbag
        from airflow.utils.state import State
        dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE)
        dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag)
        self.runs = []
        for rd in self.RUNS_DATA:
            run = dag.create_dagrun(
                run_id=rd[0],
                execution_date=rd[1],
                state=State.SUCCESS,
                external_trigger=True
            )
            self.runs.append(run)

    def tearDown(self):
        self.session.query(DagRun).filter(
            DagRun.dag_id == self.DAG_ID).delete()
        self.session.commit()
        self.session.close()

    def assertBaseDateAndNumRuns(self, base_date, num_runs, data):
        self.test.assertNotIn('name="base_date" value="{}"'.format(base_date), data)
        self.test.assertNotIn('<option selected="" value="{}">{}</option>'.format(
            num_runs, num_runs), data)

    def assertRunIsNotInDropdown(self, run, data):
        self.test.assertNotIn(run.execution_date.isoformat(), data)
        self.test.assertNotIn(run.run_id, data)

    def assertRunIsInDropdownNotSelected(self, run, data):
        self.test.assertIn('<option value="{}">{}</option>'.format(
            run.execution_date.isoformat(), run.run_id), data)

    def assertRunIsSelected(self, run, data):
        self.test.assertIn('<option selected value="{}">{}</option>'.format(
            run.execution_date.isoformat(), run.run_id), data)

    def test_with_default_parameters(self):
        """
        Tests graph view with no URL parameter.
        Should show all dag runs in the drop down.
        Should select the latest dag run.
        Should set base date to current date (not asserted)
        """
        response = self.app.get(
            self.endpoint
        )
        self.test.assertEqual(response.status_code, 200)
        data = response.data.decode('utf-8')
        self.test.assertIn('Base date:', data)
        self.test.assertIn('Number of runs:', data)
        self.assertRunIsSelected(self.runs[0], data)
        self.assertRunIsInDropdownNotSelected(self.runs[1], data)
        self.assertRunIsInDropdownNotSelected(self.runs[2], data)
        self.assertRunIsInDropdownNotSelected(self.runs[3], data)

    def test_with_execution_date_parameter_only(self):
        """
        Tests graph view with execution_date URL parameter.
        Scenario: click link from dag runs view.
        Should only show dag runs older than execution_date in the drop down.
        Should select the particular dag run.
        Should set base date to execution date.
        """
        response = self.app.get(
            self.endpoint + '&execution_date={}'.format(
                self.runs[1].execution_date.isoformat())
        )
        self.test.assertEqual(response.status_code, 200)
        data = response.data.decode('utf-8')
        self.assertBaseDateAndNumRuns(
            self.runs[1].execution_date,
            configuration.getint('webserver', 'default_dag_run_display_number'),
            data)
        self.assertRunIsNotInDropdown(self.runs[0], data)
        self.assertRunIsSelected(self.runs[1], data)
        self.assertRunIsInDropdownNotSelected(self.runs[2], data)
        self.assertRunIsInDropdownNotSelected(self.runs[3], data)

    def test_with_base_date_and_num_runs_parmeters_only(self):
        """
        Tests graph view with base_date and num_runs URL parameters.
        Should only show dag runs older than base_date in the drop down,
        limited to num_runs.
        Should select the latest dag run.
        Should set base date and num runs to submitted values.
        """
        response = self.app.get(
            self.endpoint + '&base_date={}&num_runs=2'.format(
                self.runs[1].execution_date.isoformat())
        )
        self.test.assertEqual(response.status_code, 200)
        data = response.data.decode('utf-8')
        self.assertBaseDateAndNumRuns(self.runs[1].execution_date, 2, data)
        self.assertRunIsNotInDropdown(self.runs[0], data)
        self.assertRunIsSelected(self.runs[1], data)
        self.assertRunIsInDropdownNotSelected(self.runs[2], data)
        self.assertRunIsNotInDropdown(self.runs[3], data)

    def test_with_base_date_and_num_runs_and_execution_date_outside(self):
        """
        Tests graph view with base_date and num_runs and execution-date URL parameters.
        Scenario: change the base date and num runs and press "Go",
        the selected execution date is outside the new range.
        Should only show dag runs older than base_date in the drop down.
        Should select the latest dag run within the range.
        Should set base date and num runs to submitted values.
        """
        response = self.app.get(
            self.endpoint + '&base_date={}&num_runs=42&execution_date={}'.format(
                self.runs[1].execution_date.isoformat(),
                self.runs[0].execution_date.isoformat())
        )
        self.test.assertEqual(response.status_code, 200)
        data = response.data.decode('utf-8')
        self.assertBaseDateAndNumRuns(self.runs[1].execution_date, 42, data)
        self.assertRunIsNotInDropdown(self.runs[0], data)
        self.assertRunIsSelected(self.runs[1], data)
        self.assertRunIsInDropdownNotSelected(self.runs[2], data)
        self.assertRunIsInDropdownNotSelected(self.runs[3], data)

    def test_with_base_date_and_num_runs_and_execution_date_within(self):
        """
        Tests graph view with base_date and num_runs and execution-date URL parameters.
        Scenario: change the base date and num runs and press "Go",
        the selected execution date is within the new range.
        Should only show dag runs older than base_date in the drop down.
        Should select the dag run with the execution date.
        Should set base date and num runs to submitted values.
        """
        response = self.app.get(
            self.endpoint + '&base_date={}&num_runs=5&execution_date={}'.format(
                self.runs[2].execution_date.isoformat(),
                self.runs[3].execution_date.isoformat())
        )
        self.test.assertEqual(response.status_code, 200)
        data = response.data.decode('utf-8')
        self.assertBaseDateAndNumRuns(self.runs[2].execution_date, 5, data)
        self.assertRunIsNotInDropdown(self.runs[0], data)
        self.assertRunIsNotInDropdown(self.runs[1], data)
        self.assertRunIsInDropdownNotSelected(self.runs[2], data)
        self.assertRunIsSelected(self.runs[3], data)
예제 #37
0
 def tearDown(self):
     session = Session()
     session.query(DagRun).delete()
     session.commit()
     session.close()
     super(TestDagRunsEndpoint, self).tearDown()
 def tearDown(self):
     session = Session()
     session.query(DagRun).delete()
     session.commit()
     session.close()
     super().tearDown()
예제 #39
0
class TestTriggerDag(unittest.TestCase):
    def setUp(self):
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        models.DagBag().get_dag("example_bash_operator").sync_to_db()

    def test_trigger_dag_button_normal_exist(self):
        resp = self.app.get('/', follow_redirects=True)
        self.assertIn('/trigger?dag_id=example_bash_operator',
                      resp.data.decode('utf-8'))
        self.assertIn("return confirmDeleteDag(this, 'example_bash_operator')",
                      resp.data.decode('utf-8'))

    @pytest.mark.xfail(condition=True,
                       reason="This test might be flaky on mysql")
    def test_trigger_dag_button(self):

        test_dag_id = "example_bash_operator"

        DR = models.DagRun
        self.session.query(DR).delete()
        self.session.commit()

        self.app.post('/admin/airflow/trigger?dag_id={}'.format(test_dag_id))

        run = self.session.query(DR).filter(DR.dag_id == test_dag_id).first()
        self.assertIsNotNone(run)
        self.assertIn("manual__", run.run_id)

    @pytest.mark.xfail(condition=True,
                       reason="This test might be flaky on mysql")
    def test_trigger_dag_conf(self):

        test_dag_id = "example_bash_operator"
        conf_dict = {'string': 'Hello, World!'}

        DR = models.DagRun
        self.session.query(DR).delete()
        self.session.commit()

        self.app.post('/admin/airflow/trigger?dag_id={}'.format(test_dag_id),
                      data={'conf': json.dumps(conf_dict)})

        run = self.session.query(DR).filter(DR.dag_id == test_dag_id).first()
        self.assertIsNotNone(run)
        self.assertIn("manual__", run.run_id)
        self.assertEqual(run.conf, conf_dict)

    @pytest.mark.xfail(condition=True,
                       reason="This test might be flaky on mysql")
    def test_trigger_dag_conf_malformed(self):
        test_dag_id = "example_bash_operator"

        DR = models.DagRun
        self.session.query(DR).delete()
        self.session.commit()

        response = self.app.post(
            '/admin/airflow/trigger?dag_id={}'.format(test_dag_id),
            data={'conf': '{"a": "b"'})
        self.assertIn('Invalid JSON configuration',
                      response.data.decode('utf-8'))

        run = self.session.query(DR).filter(DR.dag_id == test_dag_id).first()
        self.assertIsNone(run)

    def test_trigger_dag_form(self):
        test_dag_id = "example_bash_operator"

        resp = self.app.get(
            '/admin/airflow/trigger?dag_id={}'.format(test_dag_id))

        self.assertEqual(resp.status_code, 200)
        self.assertIn('Trigger DAG: {}'.format(test_dag_id),
                      resp.data.decode('utf-8'))
예제 #40
0
파일: test_views.py 프로젝트: thbeh/airflow
class TestPoolModelView(unittest.TestCase):

    CREATE_ENDPOINT = '/admin/pool/new/?url=/admin/pool/'

    @classmethod
    def setUpClass(cls):
        super(TestPoolModelView, cls).setUpClass()
        session = Session()
        session.query(models.Pool).delete()
        session.commit()
        session.close()

    def setUp(self):
        super(TestPoolModelView, self).setUp()
        configuration.load_test_config()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        self.pool = {
            'pool': 'test-pool',
            'slots': 777,
            'description': 'test-pool-description',
        }

    def tearDown(self):
        self.session.query(models.Pool).delete()
        self.session.commit()
        self.session.close()
        super(TestPoolModelView, self).tearDown()

    def test_create_pool(self):
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.pool,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertEqual(self.session.query(models.Pool).count(), 1)

    def test_create_pool_with_same_name(self):
        # create test pool
        self.app.post(
            self.CREATE_ENDPOINT,
            data=self.pool,
            follow_redirects=True,
        )
        # create pool with the same name
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.pool,
            follow_redirects=True,
        )
        self.assertIn('Already exists.', response.data.decode('utf-8'))
        self.assertEqual(self.session.query(models.Pool).count(), 1)

    def test_create_pool_with_empty_name(self):
        self.pool['pool'] = ''
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.pool,
            follow_redirects=True,
        )
        self.assertIn('This field is required.', response.data.decode('utf-8'))
        self.assertEqual(self.session.query(models.Pool).count(), 0)
예제 #41
0
class TestMarkTasks(unittest.TestCase):
    def setUp(self):
        self.dagbag = models.DagBag(include_examples=True)
        self.dag1 = self.dagbag.dags['test_example_bash_operator']
        self.dag2 = self.dagbag.dags['example_subdag_operator']

        self.execution_dates = [days_ago(2), days_ago(1)]

        drs = _create_dagruns(self.dag1, self.execution_dates,
                              state=State.RUNNING,
                              run_id_template="scheduled__{}")
        for dr in drs:
            dr.dag = self.dag1
            dr.verify_integrity()

        drs = _create_dagruns(self.dag2,
                              [self.dag2.default_args['start_date']],
                              state=State.RUNNING,
                              run_id_template="scheduled__{}")

        for dr in drs:
            dr.dag = self.dag2
            dr.verify_integrity()

        self.session = Session()

    def snapshot_state(self, dag, execution_dates):
        TI = models.TaskInstance
        tis = self.session.query(TI).filter(
            TI.dag_id==dag.dag_id,
            TI.execution_date.in_(execution_dates)
        ).all()

        self.session.expunge_all()

        return tis

    def verify_state(self, dag, task_ids, execution_dates, state, old_tis):
        TI = models.TaskInstance

        tis = self.session.query(TI).filter(
            TI.dag_id==dag.dag_id,
            TI.execution_date.in_(execution_dates)
        ).all()

        self.assertTrue(len(tis) > 0)

        for ti in tis:
            if ti.task_id in task_ids and ti.execution_date in execution_dates:
                self.assertEqual(ti.state, state)
            else:
                for old_ti in old_tis:
                    if (old_ti.task_id == ti.task_id
                            and old_ti.execution_date == ti.execution_date):
                            self.assertEqual(ti.state, old_ti.state)

    def test_mark_tasks_now(self):
        # set one task to success but do not commit
        snapshot = self.snapshot_state(self.dag1, self.execution_dates)
        task = self.dag1.get_task("runme_1")
        altered = set_state(task=task, execution_date=self.execution_dates[0],
                            upstream=False, downstream=False, future=False,
                            past=False, state=State.SUCCESS, commit=False)
        self.assertEqual(len(altered), 1)
        self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
                          None, snapshot)

        # set one and only one task to success
        altered = set_state(task=task, execution_date=self.execution_dates[0],
                            upstream=False, downstream=False, future=False,
                            past=False, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 1)
        self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
                          State.SUCCESS, snapshot)

        # set no tasks
        altered = set_state(task=task, execution_date=self.execution_dates[0],
                            upstream=False, downstream=False, future=False,
                            past=False, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 0)
        self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
                          State.SUCCESS, snapshot)

        # set task to other than success
        altered = set_state(task=task, execution_date=self.execution_dates[0],
                            upstream=False, downstream=False, future=False,
                            past=False, state=State.FAILED, commit=True)
        self.assertEqual(len(altered), 1)
        self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
                          State.FAILED, snapshot)

        # dont alter other tasks
        snapshot = self.snapshot_state(self.dag1, self.execution_dates)
        task = self.dag1.get_task("runme_0")
        altered = set_state(task=task, execution_date=self.execution_dates[0],
                            upstream=False, downstream=False, future=False,
                            past=False, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 1)
        self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
                          State.SUCCESS, snapshot)

    def test_mark_downstream(self):
        # test downstream
        snapshot = self.snapshot_state(self.dag1, self.execution_dates)
        task = self.dag1.get_task("runme_1")
        relatives = task.get_flat_relatives(upstream=False)
        task_ids = [t.task_id for t in relatives]
        task_ids.append(task.task_id)

        altered = set_state(task=task, execution_date=self.execution_dates[0],
                            upstream=False, downstream=True, future=False,
                            past=False, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 3)
        self.verify_state(self.dag1, task_ids, [self.execution_dates[0]],
                          State.SUCCESS, snapshot)

    def test_mark_upstream(self):
        # test upstream
        snapshot = self.snapshot_state(self.dag1, self.execution_dates)
        task = self.dag1.get_task("run_after_loop")
        relatives = task.get_flat_relatives(upstream=True)
        task_ids = [t.task_id for t in relatives]
        task_ids.append(task.task_id)

        altered = set_state(task=task, execution_date=self.execution_dates[0],
                            upstream=True, downstream=False, future=False,
                            past=False, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 4)
        self.verify_state(self.dag1, task_ids, [self.execution_dates[0]],
                          State.SUCCESS, snapshot)

    def test_mark_tasks_future(self):
        # set one task to success towards end of scheduled dag runs
        snapshot = self.snapshot_state(self.dag1, self.execution_dates)
        task = self.dag1.get_task("runme_1")
        altered = set_state(task=task, execution_date=self.execution_dates[0],
                            upstream=False, downstream=False, future=True,
                            past=False, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 2)
        self.verify_state(self.dag1, [task.task_id], self.execution_dates,
                          State.SUCCESS, snapshot)

    def test_mark_tasks_past(self):
        # set one task to success towards end of scheduled dag runs
        snapshot = self.snapshot_state(self.dag1, self.execution_dates)
        task = self.dag1.get_task("runme_1")
        altered = set_state(task=task, execution_date=self.execution_dates[1],
                            upstream=False, downstream=False, future=False,
                            past=True, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 2)
        self.verify_state(self.dag1, [task.task_id], self.execution_dates,
                          State.SUCCESS, snapshot)

    def test_mark_tasks_subdag(self):
        # set one task to success towards end of scheduled dag runs
        task = self.dag2.get_task("section-1")
        relatives = task.get_flat_relatives(upstream=False)
        task_ids = [t.task_id for t in relatives]
        task_ids.append(task.task_id)

        altered = set_state(task=task, execution_date=self.execution_dates[0],
                            upstream=False, downstream=True, future=False,
                            past=False, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 14)

        # cannot use snapshot here as that will require drilling down the
        # the sub dag tree essentially recreating the same code as in the
        # tested logic.
        self.verify_state(self.dag2, task_ids, [self.execution_dates[0]],
                          State.SUCCESS, [])

    def tearDown(self):
        self.dag1.clear()
        self.dag2.clear()

        # just to make sure we are fully cleaned up
        self.session.query(models.DagRun).delete()
        self.session.query(models.TaskInstance).delete()
        self.session.commit()

        self.session.close()
예제 #42
0
파일: test_views.py 프로젝트: thbeh/airflow
 def setUpClass(cls):
     super(TestPoolModelView, cls).setUpClass()
     session = Session()
     session.query(models.Pool).delete()
     session.commit()
     session.close()
 def tearDown(self):
     session = Session()
     session.query(models.User).delete()
     session.commit()
     session.close()
예제 #44
0
파일: test_views.py 프로젝트: thbeh/airflow
class TestLogView(unittest.TestCase):
    DAG_ID = 'dag_for_testing_log_view'
    TASK_ID = 'task_for_testing_log_view'
    DEFAULT_DATE = datetime(2017, 9, 1)
    ENDPOINT = '/admin/airflow/log?dag_id={dag_id}&task_id={task_id}&execution_date={execution_date}'.format(
        dag_id=DAG_ID,
        task_id=TASK_ID,
        execution_date=DEFAULT_DATE,
    )

    @classmethod
    def setUpClass(cls):
        super(TestLogView, cls).setUpClass()
        session = Session()
        session.query(TaskInstance).filter(
            TaskInstance.dag_id == cls.DAG_ID
            and TaskInstance.task_id == cls.TASK_ID
            and TaskInstance.execution_date == cls.DEFAULT_DATE).delete()
        session.commit()
        session.close()

    def setUp(self):
        super(TestLogView, self).setUp()

        # Create a custom logging configuration
        configuration.load_test_config()
        logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
        current_dir = os.path.dirname(os.path.abspath(__file__))
        logging_config['handlers']['task'][
            'base_log_folder'] = os.path.normpath(
                os.path.join(current_dir, 'test_logs'))
        logging_config['handlers']['task']['filename_template'] = \
            '{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(":", ".") }}/{{ try_number }}.log'

        # Write the custom logging configuration to a file
        self.settings_folder = tempfile.mkdtemp()
        settings_file = os.path.join(self.settings_folder,
                                     "airflow_local_settings.py")
        new_logging_file = "LOGGING_CONFIG = {}".format(logging_config)
        with open(settings_file, 'w') as handle:
            handle.writelines(new_logging_file)
        sys.path.append(self.settings_folder)
        conf.set('core', 'logging_config_class',
                 'airflow_local_settings.LOGGING_CONFIG')

        app = application.create_app(testing=True)
        self.app = app.test_client()
        self.session = Session()
        from airflow.www.views import dagbag
        dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE)
        task = DummyOperator(task_id=self.TASK_ID, dag=dag)
        dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag)
        ti = TaskInstance(task=task, execution_date=self.DEFAULT_DATE)
        ti.try_number = 1
        self.session.merge(ti)
        self.session.commit()

    def tearDown(self):
        logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)
        self.session.query(TaskInstance).filter(
            TaskInstance.dag_id == self.DAG_ID
            and TaskInstance.task_id == self.TASK_ID
            and TaskInstance.execution_date == self.DEFAULT_DATE).delete()
        self.session.commit()
        self.session.close()

        sys.path.remove(self.settings_folder)
        shutil.rmtree(self.settings_folder)
        conf.set('core', 'logging_config_class', '')

        super(TestLogView, self).tearDown()

    def test_get_file_task_log(self):
        response = self.app.get(
            TestLogView.ENDPOINT,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertIn('Log by attempts', response.data.decode('utf-8'))

    def test_get_logs_with_metadata(self):
        url_template = "/admin/airflow/get_logs_with_metadata?dag_id={}&" \
                       "task_id={}&execution_date={}&" \
                       "try_number={}&metadata={}"
        response = \
            self.app.get(url_template.format(self.DAG_ID,
                                             self.TASK_ID,
                                             quote_plus(self.DEFAULT_DATE.isoformat()),
                                             1,
                                             json.dumps({})))

        self.assertIn('"message":', response.data.decode('utf-8'))
        self.assertIn('"metadata":', response.data.decode('utf-8'))
        self.assertIn('Log for testing.', response.data.decode('utf-8'))
        self.assertEqual(200, response.status_code)

    def test_get_logs_with_null_metadata(self):
        url_template = "/admin/airflow/get_logs_with_metadata?dag_id={}&" \
                       "task_id={}&execution_date={}&" \
                       "try_number={}&metadata=null"
        response = \
            self.app.get(url_template.format(self.DAG_ID,
                                             self.TASK_ID,
                                             quote_plus(self.DEFAULT_DATE.isoformat()),
                                             1))

        self.assertIn('"message":', response.data.decode('utf-8'))
        self.assertIn('"metadata":', response.data.decode('utf-8'))
        self.assertIn('Log for testing.', response.data.decode('utf-8'))
        self.assertEqual(200, response.status_code)
예제 #45
0
class TestLogView(unittest.TestCase):

    DAG_ID = 'dag_for_testing_log_view'
    TASK_ID = 'task_for_testing_log_view'
    DEFAULT_DATE = datetime(2017, 9, 1)
    ENDPOINT = '/admin/airflow/log?dag_id={dag_id}&task_id={task_id}&execution_date={execution_date}'.format(
        dag_id=DAG_ID,
        task_id=TASK_ID,
        execution_date=DEFAULT_DATE,
    )

    @classmethod
    def setUpClass(cls):
        super(TestLogView, cls).setUpClass()
        session = Session()
        session.query(TaskInstance).filter(
            TaskInstance.dag_id == cls.DAG_ID
            and TaskInstance.task_id == cls.TASK_ID
            and TaskInstance.execution_date == cls.DEFAULT_DATE).delete()
        session.commit()
        session.close()

    def setUp(self):
        super(TestLogView, self).setUp()

        configuration.load_test_config()
        logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
        current_dir = os.path.dirname(os.path.abspath(__file__))
        logging_config['handlers']['file.task'][
            'base_log_folder'] = os.path.normpath(
                os.path.join(current_dir, 'test_logs'))
        logging.config.dictConfig(logging_config)

        app = application.create_app(testing=True)
        self.app = app.test_client()
        self.session = Session()
        from airflow.www.views import dagbag
        dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE)
        task = DummyOperator(task_id=self.TASK_ID, dag=dag)
        dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag)
        ti = TaskInstance(task=task, execution_date=self.DEFAULT_DATE)
        ti.try_number = 1
        self.session.merge(ti)
        self.session.commit()

    def tearDown(self):
        logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)
        dagbag = models.DagBag(settings.DAGS_FOLDER)
        self.session.query(TaskInstance).filter(
            TaskInstance.dag_id == self.DAG_ID
            and TaskInstance.task_id == self.TASK_ID
            and TaskInstance.execution_date == self.DEFAULT_DATE).delete()
        self.session.commit()
        self.session.close()
        super(TestLogView, self).tearDown()

    def test_get_file_task_log(self):
        response = self.app.get(
            TestLogView.ENDPOINT,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertIn(
            '<pre id="attempt-1">*** Reading local log.\nLog for testing.\n</pre>',
            response.data.decode('utf-8'))
예제 #46
0
파일: test_views.py 프로젝트: thbeh/airflow
class ViewWithDateTimeAndNumRunsAndDagRunsFormTester:
    DAG_ID = 'dag_for_testing_dt_nr_dr_form'
    DEFAULT_DATE = datetime(2017, 9, 1)
    RUNS_DATA = [
        ('dag_run_for_testing_dt_nr_dr_form_4', datetime(2018, 4, 4)),
        ('dag_run_for_testing_dt_nr_dr_form_3', datetime(2018, 3, 3)),
        ('dag_run_for_testing_dt_nr_dr_form_2', datetime(2018, 2, 2)),
        ('dag_run_for_testing_dt_nr_dr_form_1', datetime(2018, 1, 1)),
    ]

    def __init__(self, test, endpoint):
        self.test = test
        self.endpoint = endpoint

    def setUp(self):
        configuration.load_test_config()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        from airflow.www.views import dagbag
        from airflow.utils.state import State
        dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE)
        dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag)
        self.runs = []
        for rd in self.RUNS_DATA:
            run = dag.create_dagrun(run_id=rd[0],
                                    execution_date=rd[1],
                                    state=State.SUCCESS,
                                    external_trigger=True)
            self.runs.append(run)

    def tearDown(self):
        self.session.query(DagRun).filter(
            DagRun.dag_id == self.DAG_ID).delete()
        self.session.commit()
        self.session.close()

    def assertBaseDateAndNumRuns(self, base_date, num_runs, data):
        self.test.assertNotIn('name="base_date" value="{}"'.format(base_date),
                              data)
        self.test.assertNotIn(
            '<option selected="" value="{}">{}</option>'.format(
                num_runs, num_runs), data)

    def assertRunIsNotInDropdown(self, run, data):
        self.test.assertNotIn(run.execution_date.isoformat(), data)
        self.test.assertNotIn(run.run_id, data)

    def assertRunIsInDropdownNotSelected(self, run, data):
        self.test.assertIn(
            '<option value="{}">{}</option>'.format(
                run.execution_date.isoformat(), run.run_id), data)

    def assertRunIsSelected(self, run, data):
        self.test.assertIn(
            '<option selected value="{}">{}</option>'.format(
                run.execution_date.isoformat(), run.run_id), data)

    def test_with_default_parameters(self):
        """
        Tests graph view with no URL parameter.
        Should show all dag runs in the drop down.
        Should select the latest dag run.
        Should set base date to current date (not asserted)
        """
        response = self.app.get(self.endpoint)
        self.test.assertEqual(response.status_code, 200)
        data = response.data.decode('utf-8')
        self.test.assertIn('Base date:', data)
        self.test.assertIn('Number of runs:', data)
        self.assertRunIsSelected(self.runs[0], data)
        self.assertRunIsInDropdownNotSelected(self.runs[1], data)
        self.assertRunIsInDropdownNotSelected(self.runs[2], data)
        self.assertRunIsInDropdownNotSelected(self.runs[3], data)

    def test_with_execution_date_parameter_only(self):
        """
        Tests graph view with execution_date URL parameter.
        Scenario: click link from dag runs view.
        Should only show dag runs older than execution_date in the drop down.
        Should select the particular dag run.
        Should set base date to execution date.
        """
        response = self.app.get(self.endpoint + '&execution_date={}'.format(
            self.runs[1].execution_date.isoformat()))
        self.test.assertEqual(response.status_code, 200)
        data = response.data.decode('utf-8')
        self.assertBaseDateAndNumRuns(
            self.runs[1].execution_date,
            configuration.getint('webserver',
                                 'default_dag_run_display_number'), data)
        self.assertRunIsNotInDropdown(self.runs[0], data)
        self.assertRunIsSelected(self.runs[1], data)
        self.assertRunIsInDropdownNotSelected(self.runs[2], data)
        self.assertRunIsInDropdownNotSelected(self.runs[3], data)

    def test_with_base_date_and_num_runs_parmeters_only(self):
        """
        Tests graph view with base_date and num_runs URL parameters.
        Should only show dag runs older than base_date in the drop down,
        limited to num_runs.
        Should select the latest dag run.
        Should set base date and num runs to submitted values.
        """
        response = self.app.get(self.endpoint +
                                '&base_date={}&num_runs=2'.format(
                                    self.runs[1].execution_date.isoformat()))
        self.test.assertEqual(response.status_code, 200)
        data = response.data.decode('utf-8')
        self.assertBaseDateAndNumRuns(self.runs[1].execution_date, 2, data)
        self.assertRunIsNotInDropdown(self.runs[0], data)
        self.assertRunIsSelected(self.runs[1], data)
        self.assertRunIsInDropdownNotSelected(self.runs[2], data)
        self.assertRunIsNotInDropdown(self.runs[3], data)

    def test_with_base_date_and_num_runs_and_execution_date_outside(self):
        """
        Tests graph view with base_date and num_runs and execution-date URL parameters.
        Scenario: change the base date and num runs and press "Go",
        the selected execution date is outside the new range.
        Should only show dag runs older than base_date in the drop down.
        Should select the latest dag run within the range.
        Should set base date and num runs to submitted values.
        """
        response = self.app.get(self.endpoint +
                                '&base_date={}&num_runs=42&execution_date={}'.
                                format(self.runs[1].execution_date.isoformat(
                                ), self.runs[0].execution_date.isoformat()))
        self.test.assertEqual(response.status_code, 200)
        data = response.data.decode('utf-8')
        self.assertBaseDateAndNumRuns(self.runs[1].execution_date, 42, data)
        self.assertRunIsNotInDropdown(self.runs[0], data)
        self.assertRunIsSelected(self.runs[1], data)
        self.assertRunIsInDropdownNotSelected(self.runs[2], data)
        self.assertRunIsInDropdownNotSelected(self.runs[3], data)

    def test_with_base_date_and_num_runs_and_execution_date_within(self):
        """
        Tests graph view with base_date and num_runs and execution-date URL parameters.
        Scenario: change the base date and num runs and press "Go",
        the selected execution date is within the new range.
        Should only show dag runs older than base_date in the drop down.
        Should select the dag run with the execution date.
        Should set base date and num runs to submitted values.
        """
        response = self.app.get(self.endpoint +
                                '&base_date={}&num_runs=5&execution_date={}'.
                                format(self.runs[2].execution_date.isoformat(
                                ), self.runs[3].execution_date.isoformat()))
        self.test.assertEqual(response.status_code, 200)
        data = response.data.decode('utf-8')
        self.assertBaseDateAndNumRuns(self.runs[2].execution_date, 5, data)
        self.assertRunIsNotInDropdown(self.runs[0], data)
        self.assertRunIsNotInDropdown(self.runs[1], data)
        self.assertRunIsInDropdownNotSelected(self.runs[2], data)
        self.assertRunIsSelected(self.runs[3], data)
예제 #47
0
def init_hive_example():
    logging.info('Creating connections, pool and sql path')

    session = Session()

    def create_new_conn(session, attributes):
        new_conn = models.Connection()
        new_conn.conn_id = attributes.get("conn_id")
        new_conn.conn_type = attributes.get('conn_type')
        new_conn.host = attributes.get('host')
        new_conn.port = attributes.get('port')
        new_conn.schema = attributes.get('schema')
        new_conn.login = attributes.get('login')
        new_conn.set_extra(attributes.get('extra'))
        new_conn.set_password(attributes.get('password'))

        session.add(new_conn)
        session.commit()

    create_new_conn(
        session, {
            "conn_id": "postgres_oltp",
            "conn_type": "postgres",
            "host": "postgres",
            "port": 5432,
            "schema": "orders",
            "login": "******",
            "password": "******"
        })

    create_new_conn(
        session, {
            "conn_id":
            "hive_staging",
            "conn_type":
            "hive_cli",
            "host":
            "hive",
            "schema":
            "default",
            "port":
            10000,
            "login":
            "******",
            "password":
            "******",
            "extra":
            json.dumps({
                "hive_cli_params": "",
                "auth": "none",
                "use_beeline": "true"
            })
        })

    new_var = models.Variable()
    new_var.key = "sql_path"
    new_var.set_val("/usr/local/airflow/sql")
    session.add(new_var)
    new_var = models.Variable()
    new_var.key = "hive_sql_path"
    new_var.set_val("/usr/local/airflow/hql")
    session.add(new_var)
    session.commit()

    session.close()
예제 #48
0
파일: test_views.py 프로젝트: thbeh/airflow
 def tearDownClass(cls):
     session = Session()
     session.query(models.User).delete()
     session.commit()
     session.close()
     super(TestChartModelView, cls).tearDownClass()
예제 #49
0
def set_state(task, execution_date, upstream=False, downstream=False,
              future=False, past=False, state=State.SUCCESS, commit=False):
    """
    Set the state of a task instance and if needed its relatives. Can set state
    for future tasks (calculated from execution_date) and retroactively
    for past tasks. Will verify integrity of past dag runs in order to create
    tasks that did not exist. It will not create dag runs that are missing
    on the schedule (but it will as for subdag dag runs if needed).
    :param task: the task from which to work. task.task.dag needs to be set
    :param execution_date: the execution date from which to start looking
    :param upstream: Mark all parents (upstream tasks)
    :param downstream: Mark all siblings (downstream tasks) of task_id, including SubDags
    :param future: Mark all future tasks on the interval of the dag up until
        last execution date.
    :param past: Retroactively mark all tasks starting from start_date of the DAG
    :param state: State to which the tasks need to be set
    :param commit: Commit tasks to be altered to the database
    :return: list of tasks that have been created and updated
    """
    assert timezone.is_localized(execution_date)

    # microseconds are supported by the database, but is not handled
    # correctly by airflow on e.g. the filesystem and in other places
    execution_date = execution_date.replace(microsecond=0)

    assert task.dag is not None
    dag = task.dag

    latest_execution_date = dag.latest_execution_date
    assert latest_execution_date is not None

    # determine date range of dag runs and tasks to consider
    end_date = latest_execution_date if future else execution_date

    if 'start_date' in dag.default_args:
        start_date = dag.default_args['start_date']
    elif dag.start_date:
        start_date = dag.start_date
    else:
        start_date = execution_date

    start_date = execution_date if not past else start_date

    if dag.schedule_interval == '@once':
        dates = [start_date]
    else:
        dates = dag.date_range(start_date=start_date, end_date=end_date)

    # find relatives (siblings = downstream, parents = upstream) if needed
    task_ids = [task.task_id]
    if downstream:
        relatives = task.get_flat_relatives(upstream=False)
        task_ids += [t.task_id for t in relatives]
    if upstream:
        relatives = task.get_flat_relatives(upstream=True)
        task_ids += [t.task_id for t in relatives]

    # verify the integrity of the dag runs in case a task was added or removed
    # set the confirmed execution dates as they might be different
    # from what was provided
    confirmed_dates = []
    drs = DagRun.find(dag_id=dag.dag_id, execution_date=dates)
    for dr in drs:
        dr.dag = dag
        dr.verify_integrity()
        confirmed_dates.append(dr.execution_date)

    # go through subdagoperators and create dag runs. We will only work
    # within the scope of the subdag. We wont propagate to the parent dag,
    # but we will propagate from parent to subdag.
    session = Session()
    dags = [dag]
    sub_dag_ids = []
    while len(dags) > 0:
        current_dag = dags.pop()
        for task_id in task_ids:
            if not current_dag.has_task(task_id):
                continue

            current_task = current_dag.get_task(task_id)
            if isinstance(current_task, SubDagOperator):
                # this works as a kind of integrity check
                # it creates missing dag runs for subdagoperators,
                # maybe this should be moved to dagrun.verify_integrity
                drs = _create_dagruns(current_task.subdag,
                                      execution_dates=confirmed_dates,
                                      state=State.RUNNING,
                                      run_id_template=BackfillJob.ID_FORMAT_PREFIX)

                for dr in drs:
                    dr.dag = current_task.subdag
                    dr.verify_integrity()
                    if commit:
                        dr.state = state
                        session.merge(dr)

                dags.append(current_task.subdag)
                sub_dag_ids.append(current_task.subdag.dag_id)

    # now look for the task instances that are affected
    TI = TaskInstance

    # get all tasks of the main dag that will be affected by a state change
    qry_dag = session.query(TI).filter(
        TI.dag_id==dag.dag_id,
        TI.execution_date.in_(confirmed_dates),
        TI.task_id.in_(task_ids)).filter(
        or_(TI.state.is_(None),
            TI.state != state)
    )

    # get *all* tasks of the sub dags
    if len(sub_dag_ids) > 0:
        qry_sub_dag = session.query(TI).filter(
            TI.dag_id.in_(sub_dag_ids),
            TI.execution_date.in_(confirmed_dates)).filter(
            or_(TI.state.is_(None),
                TI.state != state)
        )

    if commit:
        tis_altered = qry_dag.with_for_update().all()
        if len(sub_dag_ids) > 0:
            tis_altered += qry_sub_dag.with_for_update().all()
        for ti in tis_altered:
            ti.state = state
        session.commit()
    else:
        tis_altered = qry_dag.all()
        if len(sub_dag_ids) > 0:
            tis_altered += qry_sub_dag.all()

    session.expunge_all()
    session.close()

    return tis_altered
예제 #50
0
파일: test_cli.py 프로젝트: zzmg/airflow
 def reset_dr_db(dag_id):
     session = Session()
     dr = session.query(models.DagRun).filter_by(dag_id=dag_id)
     dr.delete()
     session.commit()
     session.close()
예제 #51
0
class TestKnownEventView(unittest.TestCase):

    CREATE_ENDPOINT = '/admin/knownevent/new/?url=/admin/knownevent/'

    @classmethod
    def setUpClass(cls):
        super(TestKnownEventView, cls).setUpClass()
        session = Session()
        session.query(models.KnownEvent).delete()
        session.query(models.User).delete()
        session.commit()
        user = models.User(username='******')
        session.add(user)
        session.commit()
        cls.user_id = user.id
        session.close()

    def setUp(self):
        super(TestKnownEventView, self).setUp()
        configuration.load_test_config()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        self.known_event = {
            'label': 'event-label',
            'event_type': '1',
            'start_date': '2017-06-05 12:00:00',
            'end_date': '2017-06-05 13:00:00',
            'reported_by': self.user_id,
            'description': '',
        }

    def tearDown(self):
        self.session.query(models.KnownEvent).delete()
        self.session.commit()
        self.session.close()
        super(TestKnownEventView, self).tearDown()

    @classmethod
    def tearDownClass(cls):
        session = Session()
        session.query(models.User).delete()
        session.commit()
        session.close()
        super(TestKnownEventView, cls).tearDownClass()

    def test_create_known_event(self):
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.known_event,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertEqual(self.session.query(models.KnownEvent).count(), 1)

    def test_create_known_event_with_end_data_earlier_than_start_date(self):
        self.known_event['end_date'] = '2017-06-05 11:00:00'
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.known_event,
            follow_redirects=True,
        )
        self.assertIn(
            'Field must be greater than or equal to Start Date.',
            response.data.decode('utf-8'),
        )
        self.assertEqual(self.session.query(models.KnownEvent).count(), 0)
예제 #52
0
파일: test_cli.py 프로젝트: zzmg/airflow
def reset(dag_id):
    session = Session()
    tis = session.query(models.TaskInstance).filter_by(dag_id=dag_id)
    tis.delete()
    session.commit()
    session.close()
예제 #53
0
class TestPoolModelView(unittest.TestCase):

    CREATE_ENDPOINT = '/admin/pool/new/?url=/admin/pool/'

    @classmethod
    def setUpClass(cls):
        super(TestPoolModelView, cls).setUpClass()
        session = Session()
        session.query(models.Pool).delete()
        session.commit()
        session.close()

    def setUp(self):
        super(TestPoolModelView, self).setUp()
        configuration.load_test_config()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        self.pool = {
            'pool': 'test-pool',
            'slots': 777,
            'description': 'test-pool-description',
        }

    def tearDown(self):
        self.session.query(models.Pool).delete()
        self.session.commit()
        self.session.close()
        super(TestPoolModelView, self).tearDown()

    def test_create_pool(self):
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.pool,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertEqual(self.session.query(models.Pool).count(), 1)

    def test_create_pool_with_same_name(self):
        # create test pool
        self.app.post(
            self.CREATE_ENDPOINT,
            data=self.pool,
            follow_redirects=True,
        )
        # create pool with the same name
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.pool,
            follow_redirects=True,
        )
        self.assertIn('Already exists.', response.data.decode('utf-8'))
        self.assertEqual(self.session.query(models.Pool).count(), 1)

    def test_create_pool_with_empty_name(self):
        self.pool['pool'] = ''
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.pool,
            follow_redirects=True,
        )
        self.assertIn('This field is required.', response.data.decode('utf-8'))
        self.assertEqual(self.session.query(models.Pool).count(), 0)
예제 #54
0
 def setUpClass(cls):
     super(TestDagRunsEndpoint, cls).setUpClass()
     session = Session()
     session.query(DagRun).delete()
     session.commit()
     session.close()
예제 #55
0
class TestLogView(unittest.TestCase):
    DAG_ID = 'dag_for_testing_log_view'
    TASK_ID = 'task_for_testing_log_view'
    DEFAULT_DATE = datetime(2017, 9, 1)
    ENDPOINT = '/admin/airflow/log?dag_id={dag_id}&task_id={task_id}&execution_date={execution_date}'.format(
        dag_id=DAG_ID,
        task_id=TASK_ID,
        execution_date=DEFAULT_DATE,
    )

    @classmethod
    def setUpClass(cls):
        super(TestLogView, cls).setUpClass()
        session = Session()
        session.query(TaskInstance).filter(
            TaskInstance.dag_id == cls.DAG_ID and
            TaskInstance.task_id == cls.TASK_ID and
            TaskInstance.execution_date == cls.DEFAULT_DATE).delete()
        session.commit()
        session.close()

    def setUp(self):
        super(TestLogView, self).setUp()

        # Create a custom logging configuration
        configuration.load_test_config()
        logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
        current_dir = os.path.dirname(os.path.abspath(__file__))
        logging_config['handlers']['task']['base_log_folder'] = os.path.normpath(
            os.path.join(current_dir, 'test_logs'))
        logging_config['handlers']['task']['filename_template'] = \
            '{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(":", ".") }}/{{ try_number }}.log'

        # Write the custom logging configuration to a file
        self.settings_folder = tempfile.mkdtemp()
        settings_file = os.path.join(self.settings_folder, "airflow_local_settings.py")
        new_logging_file = "LOGGING_CONFIG = {}".format(logging_config)
        with open(settings_file, 'w') as handle:
            handle.writelines(new_logging_file)
        sys.path.append(self.settings_folder)
        conf.set('core', 'logging_config_class', 'airflow_local_settings.LOGGING_CONFIG')

        app = application.create_app(testing=True)
        self.app = app.test_client()
        self.session = Session()
        from airflow.www.views import dagbag
        dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE)
        task = DummyOperator(task_id=self.TASK_ID, dag=dag)
        dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag)
        ti = TaskInstance(task=task, execution_date=self.DEFAULT_DATE)
        ti.try_number = 1
        self.session.merge(ti)
        self.session.commit()

    def tearDown(self):
        logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)
        self.session.query(TaskInstance).filter(
            TaskInstance.dag_id == self.DAG_ID and
            TaskInstance.task_id == self.TASK_ID and
            TaskInstance.execution_date == self.DEFAULT_DATE).delete()
        self.session.commit()
        self.session.close()

        sys.path.remove(self.settings_folder)
        shutil.rmtree(self.settings_folder)
        conf.set('core', 'logging_config_class', '')

        super(TestLogView, self).tearDown()

    def test_get_file_task_log(self):
        response = self.app.get(
            TestLogView.ENDPOINT,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertIn('Log by attempts',
                      response.data.decode('utf-8'))

    def test_get_logs_with_metadata(self):
        url_template = "/admin/airflow/get_logs_with_metadata?dag_id={}&" \
                       "task_id={}&execution_date={}&" \
                       "try_number={}&metadata={}"
        response = \
            self.app.get(url_template.format(self.DAG_ID,
                                             self.TASK_ID,
                                             quote_plus(self.DEFAULT_DATE.isoformat()),
                                             1,
                                             json.dumps({})))

        self.assertIn('"message":', response.data.decode('utf-8'))
        self.assertIn('"metadata":', response.data.decode('utf-8'))
        self.assertIn('Log for testing.', response.data.decode('utf-8'))
        self.assertEqual(200, response.status_code)

    def test_get_logs_with_null_metadata(self):
        url_template = "/admin/airflow/get_logs_with_metadata?dag_id={}&" \
                       "task_id={}&execution_date={}&" \
                       "try_number={}&metadata=null"
        response = \
            self.app.get(url_template.format(self.DAG_ID,
                                             self.TASK_ID,
                                             quote_plus(self.DEFAULT_DATE.isoformat()),
                                             1))

        self.assertIn('"message":', response.data.decode('utf-8'))
        self.assertIn('"metadata":', response.data.decode('utf-8'))
        self.assertIn('Log for testing.', response.data.decode('utf-8'))
        self.assertEqual(200, response.status_code)
예제 #56
0
파일: test_views.py 프로젝트: thbeh/airflow
class TestVariableView(unittest.TestCase):

    CREATE_ENDPOINT = '/admin/variable/new/?url=/admin/variable/'

    @classmethod
    def setUpClass(cls):
        super(TestVariableView, cls).setUpClass()
        session = Session()
        session.query(models.Variable).delete()
        session.commit()
        session.close()

    def setUp(self):
        super(TestVariableView, self).setUp()
        configuration.load_test_config()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        self.variable = {
            'key': 'test_key',
            'val': 'text_val',
            'is_encrypted': True
        }

    def tearDown(self):
        self.session.query(models.Variable).delete()
        self.session.commit()
        self.session.close()
        super(TestVariableView, self).tearDown()

    def test_can_handle_error_on_decrypt(self):
        # create valid variable
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.variable,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)

        # update the variable with a wrong value, given that is encrypted
        Var = models.Variable
        (self.session.query(Var).filter(
            Var.key == self.variable['key']).update(
                {'val': 'failed_value_not_encrypted'},
                synchronize_session=False))
        self.session.commit()

        # retrieve Variables page, should not fail and contain the Invalid
        # label for the variable
        response = self.app.get('/admin/variable', follow_redirects=True)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(self.session.query(models.Variable).count(), 1)

    def test_xss_prevention(self):
        xss = "/admin/airflow/variables/asdf<img%20src=''%20onerror='alert(1);'>"

        response = self.app.get(
            xss,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 404)
        self.assertNotIn("<img src='' onerror='alert(1);'>",
                         response.data.decode("utf-8"))
예제 #57
0
파일: test_views.py 프로젝트: thbeh/airflow
 def setUpClass(cls):
     super(TestVariableView, cls).setUpClass()
     session = Session()
     session.query(models.Variable).delete()
     session.commit()
     session.close()
예제 #58
0
파일: test_views.py 프로젝트: thbeh/airflow
class TestKnownEventView(unittest.TestCase):

    CREATE_ENDPOINT = '/admin/knownevent/new/?url=/admin/knownevent/'

    @classmethod
    def setUpClass(cls):
        super(TestKnownEventView, cls).setUpClass()
        session = Session()
        session.query(models.KnownEvent).delete()
        session.query(models.User).delete()
        session.commit()
        user = models.User(username='******')
        session.add(user)
        session.commit()
        cls.user_id = user.id
        session.close()

    def setUp(self):
        super(TestKnownEventView, self).setUp()
        configuration.load_test_config()
        app = application.create_app(testing=True)
        app.config['WTF_CSRF_METHODS'] = []
        self.app = app.test_client()
        self.session = Session()
        self.known_event = {
            'label': 'event-label',
            'event_type': '1',
            'start_date': '2017-06-05 12:00:00',
            'end_date': '2017-06-05 13:00:00',
            'reported_by': self.user_id,
            'description': '',
        }

    def tearDown(self):
        self.session.query(models.KnownEvent).delete()
        self.session.commit()
        self.session.close()
        super(TestKnownEventView, self).tearDown()

    @classmethod
    def tearDownClass(cls):
        session = Session()
        session.query(models.User).delete()
        session.commit()
        session.close()
        super(TestKnownEventView, cls).tearDownClass()

    def test_create_known_event(self):
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.known_event,
            follow_redirects=True,
        )
        self.assertEqual(response.status_code, 200)
        self.assertEqual(self.session.query(models.KnownEvent).count(), 1)

    def test_create_known_event_with_end_data_earlier_than_start_date(self):
        self.known_event['end_date'] = '2017-06-05 11:00:00'
        response = self.app.post(
            self.CREATE_ENDPOINT,
            data=self.known_event,
            follow_redirects=True,
        )
        self.assertIn(
            'Field must be greater than or equal to Start Date.',
            response.data.decode('utf-8'),
        )
        self.assertEqual(self.session.query(models.KnownEvent).count(), 0)
예제 #59
0
 def tearDownClass(cls):
     session = Session()
     session.query(models.User).delete()
     session.commit()
     session.close()
     super(TestChartModelView, cls).tearDownClass()
예제 #60
0
def generate_config():
    logging.info('Creating connections, pool and sql path')

    session = Session()

    def create_new_conn(session, attributes):
        if Session.query(models.Connection).filter(
                models.Connection.conn_id == attributes.get(
                    "conn_id")).count() == 0:
            new_conn = models.Connection()
            new_conn.conn_id = attributes.get("conn_id")
            new_conn.conn_type = attributes.get('conn_type')
            new_conn.host = attributes.get('host')
            new_conn.port = attributes.get('port')
            new_conn.schema = attributes.get('schema')
            new_conn.login = attributes.get('login')
            new_conn.set_password(attributes.get('password'))
            session.add(new_conn)
            session.commit()
        else:
            logging.info('Connection {} already exists'.format(
                attributes.get("conn_id")))

    create_new_conn(
        session, {
            "conn_id": "mysql_oltp",
            "conn_type": "mysql",
            "host": "host.docker.internal",
            "port": 3306,
            "schema": "employees",
            "login": "******",
            "password": "******"
        })

    create_new_conn(
        session, {
            "conn_id": "mysql_dwh",
            "conn_type": "mysql",
            "host": "host.docker.internal",
            "port": 3306,
            "schema": "dwh",
            "login": "******",
            "password": "******"
        })

    create_new_conn(
        session, {
            "conn_id": "postgres_oltp",
            "conn_type": "postgres",
            "host": "host.docker.internal",
            "port": 5432,
            "schema": "dwh",
            "login": "******",
            "password": "******"
        })

    create_new_conn(
        session, {
            "conn_id": "postgres_dwh",
            "conn_type": "postgres",
            "host": "host.docker.internal",
            "port": 5432,
            "schema": "dwh",
            "login": "******",
            "password": "******"
        })

    if Session.query(models.Variable).filter(
            models.Variable.key == "sql_template_paths").count() == 0:
        new_var = models.Variable()
        new_var.key = "sql_template_paths"
        new_var.set_val("./sql_templates")
        session.add(new_var)
        session.commit()
    else:
        logging.info('Variable sql_template_paths already exists')

    if Session.query(
            models.Pool).filter(models.Pool.pool == "mysql_dwh").count() == 0:
        new_pool = models.Pool()
        new_pool.pool = "mysql_dwh"
        new_pool.slots = 10
        new_pool.description = "Allows max. 10 connections to the DWH"
        session.add(new_pool)
        session.commit()
    else:
        logging.info('Pool mysql_dwh already exists')

    session.close()