示例#1
0
 def tearDown(self):
     configuration.test_mode()
     session = Session()
     session.query(models.User).delete()
     session.commit()
     session.close()
     configuration.conf.set("webserver", "authenticate", "False")
示例#2
0
 def tearDown(self):
     configuration.test_mode()
     session = Session()
     session.query(models.User).delete()
     session.commit()
     session.close()
     configuration.conf.set("webserver", "authenticate", "False")
示例#3
0
文件: core.py 项目: 0x68/airflow
 def setUp(self):
     configuration.test_mode()
     utils.initdb()
     args = {"owner": "airflow", "start_date": datetime(2015, 1, 1)}
     dag = DAG("hive_test", default_args=args)
     self.dag = dag
     self.hql = """
示例#4
0
 def setUp(self):
     configuration.test_mode()
     app = create_app()
     app.config['TESTING'] = True
     self.parser = cli.get_parser()
     self.dagbag = models.DagBag(
         dag_folder=DEV_NULL, include_examples=True)
示例#5
0
 def setUp(self):
     configuration.test_mode()
     utils.initdb()
     args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)}
     dag = DAG(TEST_DAG_ID, default_args=args)
     dag.clear(start_date=DEFAULT_DATE, end_date=datetime.now())
     self.dag = dag
示例#6
0
文件: core.py 项目: DingaGa/airflow
 def setUp(self):
     configuration.test_mode()
     app = application.create_app()
     app.config['TESTING'] = True
     self.parser = cli.get_parser()
     self.dagbag = models.DagBag(
         dag_folder=DEV_NULL, include_examples=True)
示例#7
0
 def setUp(self):
     configuration.test_mode()
     utils.initdb()
     args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)}
     dag = DAG(TEST_DAG_ID, default_args=args)
     self.dag = dag
     self.hql = """
示例#8
0
文件: core.py 项目: kundeng/airflow
 def setUp(self):
     configuration.test_mode()
     utils.initdb()
     args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)}
     dag = DAG(TEST_DAG_ID, default_args=args)
     dag.clear(start_date=DEFAULT_DATE, end_date=datetime.now())
     self.dag = dag
示例#9
0
 def setUp(self):
     configuration.test_mode()
     utils.initdb()
     os.environ['AIRFLOW_CONN_TEST_URI'] = (
         'postgres://*****:*****@ec2.compute.com:5432/the_database')
     os.environ['AIRFLOW_CONN_TEST_URI_NO_CREDS'] = (
         'postgres://ec2.compute.com/the_database')
示例#10
0
 def setUp(self):
     configuration.test_mode()
     utils.initdb()
     os.environ['AIRFLOW_CONN_TEST_URI'] = (
         'postgres://*****:*****@ec2.compute.com:5432/the_database')
     os.environ['AIRFLOW_CONN_TEST_URI_NO_CREDS'] = (
         'postgres://ec2.compute.com/the_database')
示例#11
0
文件: core.py 项目: jbalogh/airflow
 def setUp(self):
     configuration.test_mode()
     utils.initdb()
     args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)}
     dag = DAG('hive_test', default_args=args)
     self.dag = dag
     self.hql = """
示例#12
0
 def setUp(self):
     configuration.test_mode()
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE
     }
     dag = DAG(TEST_DAG_ID, default_args=args)
     self.dag = dag
示例#13
0
文件: core.py 项目: johnw424/airflow
 def setUp(self):
     configuration.test_mode()
     self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True)
     self.args = {"owner": "airflow", "start_date": datetime(2015, 1, 1)}
     dag = DAG(TEST_DAG_ID, default_args=self.args)
     self.dag = dag
     self.dag_bash = self.dagbag.dags["example_bash_operator"]
     self.runme_0 = self.dag_bash.get_task("runme_0")
示例#14
0
 def setUp(self):
     configuration.test_mode()
     self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True)
     args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)}
     dag = DAG(TEST_DAG_ID, default_args=args)
     self.dag = dag
     self.dag_bash = self.dagbag.dags['example_bash_operator']
     self.runme_0 = self.dag_bash.get_task('runme_0')
示例#15
0
    def setUp(self):
        configuration.test_mode()

        # Mock out the emr_client (moto has incorrect response)
        mock_emr_client = MagicMock()
        mock_emr_client.add_job_flow_steps.return_value = ADD_STEPS_SUCCESS_RETURN

        # Mock out the emr_client creator
        self.boto3_client_mock = MagicMock(return_value=mock_emr_client)
示例#16
0
 def setUp(self):
     configuration.test_mode()
     args = {
         'owner': 'airflow',
         'mysql_conn_id': 'airflow_db',
         'start_date': datetime(2015, 1, 1)
     }
     dag = DAG(TEST_DAG_ID, default_args=args)
     self.dag = dag
    def setUp(self):
        configuration.test_mode()

        # Mock out the emr_client (moto has incorrect response)
        mock_emr_client = MagicMock()
        mock_emr_client.add_job_flow_steps.return_value = ADD_STEPS_SUCCESS_RETURN

        # Mock out the emr_client creator
        self.boto3_client_mock = MagicMock(return_value=mock_emr_client)
示例#18
0
 def setUp(self):
     configuration.test_mode()
     args = {
         'owner': 'airflow',
         'mysql_conn_id': 'airflow_db',
         'start_date': DEFAULT_DATE
     }
     dag = DAG(TEST_DAG_ID, default_args=args)
     self.dag = dag
示例#19
0
 def setUp(self):
     configuration.test_mode()
     self.dagbag = models.DagBag(
         dag_folder=DEV_NULL, include_examples=True)
     self.args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)}
     dag = DAG(TEST_DAG_ID, default_args=self.args)
     self.dag = dag
     self.dag_bash = self.dagbag.dags['example_bash_operator']
     self.runme_0 = self.dag_bash.get_task('runme_0')
    def setUp(self):
        configuration.test_mode()

        # Mock out the emr_client (moto has incorrect response)
        mock_emr_client = MagicMock()
        mock_emr_client.run_job_flow.return_value = RUN_JOB_FLOW_SUCCESS_RETURN

        # Mock out the emr_client creator
        self.boto3_client_mock = MagicMock(return_value=mock_emr_client)
示例#21
0
    def setUp(self):
        configuration.test_mode()

        # Mock out the emr_client (moto has incorrect response)
        mock_emr_client = MagicMock()
        mock_emr_client.terminate_job_flows.return_value = TERMINATE_SUCCESS_RETURN

        # Mock out the emr_client creator
        self.boto3_client_mock = MagicMock(return_value=mock_emr_client)
    def setUp(self):
        configuration.test_mode()
        from airflow.contrib.hooks.fs_hook import FSHook

        hook = FSHook()
        args = {"owner": "airflow", "start_date": DEFAULT_DATE, "provide_context": True}
        dag = DAG(TEST_DAG_ID + "test_schedule_dag_once", default_args=args)
        dag.schedule_interval = "@once"
        self.hook = hook
        self.dag = dag
示例#23
0
文件: core.py 项目: praveev/airflow
 def setUp(self):
     configuration.test_mode()
     self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True)
     self.args = {"owner": "airflow", "start_date": DEFAULT_DATE}
     dag = DAG(TEST_DAG_ID, default_args=self.args)
     self.dag = dag
     self.dag_bash = self.dagbag.dags["example_bash_operator"]
     self.runme_0 = self.dag_bash.get_task("runme_0")
     self.run_after_loop = self.dag_bash.get_task("run_after_loop")
     self.run_this_last = self.dag_bash.get_task("run_this_last")
示例#24
0
 def setUp(self):
     configuration.test_mode()
     self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True)
     self.args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
     dag = DAG(TEST_DAG_ID, default_args=self.args)
     self.dag = dag
     self.dag_bash = self.dagbag.dags['example_bash_operator']
     self.runme_0 = self.dag_bash.get_task('runme_0')
     self.run_after_loop = self.dag_bash.get_task('run_after_loop')
     self.run_this_last = self.dag_bash.get_task('run_this_last')
示例#25
0
 def setUp(self):
     configuration.test_mode()
     self.dagbag = models.DagBag(
         dag_folder=DEV_NULL, include_examples=True)
     self.args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
     dag = DAG(TEST_DAG_ID, default_args=self.args)
     self.dag = dag
     self.dag_bash = self.dagbag.dags['example_bash_operator']
     self.runme_0 = self.dag_bash.get_task('runme_0')
     self.run_after_loop = self.dag_bash.get_task('run_after_loop')
     self.run_this_last = self.dag_bash.get_task('run_this_last')
示例#26
0
    def setUp(self):
        configuration.test_mode()

        # Mock out the emr_client (moto has incorrect response)
        self.mock_emr_client = MagicMock()
        self.mock_emr_client.describe_cluster.side_effect = [
            DESCRIBE_CLUSTER_RUNNING_RETURN, DESCRIBE_CLUSTER_TERMINATED_RETURN
        ]

        # Mock out the emr_client creator
        self.boto3_client_mock = MagicMock(return_value=self.mock_emr_client)
    def setUp(self):
        configuration.test_mode()

        # Mock out the emr_client (moto has incorrect response)
        self.mock_emr_client = MagicMock()
        self.mock_emr_client.describe_cluster.side_effect = [
            DESCRIBE_CLUSTER_RUNNING_RETURN,
            DESCRIBE_CLUSTER_TERMINATED_RETURN
        ]

        # Mock out the emr_client creator
        self.boto3_client_mock = MagicMock(return_value=self.mock_emr_client)
示例#28
0
 def setUp(self):
     configuration.test_mode()
     from airflow.contrib.hooks.fs_hook import FSHook
     hook = FSHook()
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
示例#29
0
 def setUp(self):
     configuration.test_mode()
     from airflow.contrib.hooks.ssh_hook import SSHHook
     hook = SSHHook()
     hook.no_host_key_check = True
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
示例#30
0
 def setUp(self):
     configuration.test_mode()
     configuration.conf.set("webserver", "authenticate", "False")
     app = application.create_app()
     app.config['TESTING'] = True
     self.app = app.test_client()
示例#31
0
文件: core.py 项目: praveev/airflow
 def setUp(self):
     configuration.test_mode()
     args = {"owner": "airflow", "start_date": DEFAULT_DATE_ISO}
     dag = DAG(TEST_DAG_ID, default_args=args)
     self.dag = dag
示例#32
0
文件: core.py 项目: jbalogh/airflow
 def setUp(self):
     configuration.test_mode()
     utils.initdb()
     app.config['TESTING'] = True
     self.app = app.test_client()
示例#33
0
 def setUp(self):
     configuration.test_mode()
     args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
     dag = DAG('test_dag_id', default_args=args)
     self.dag = dag
     self.hql = """
示例#34
0
 def setUp(self):
     configuration.test_mode()
示例#35
0
文件: core.py 项目: DingaGa/airflow
 def tearDown(self):
     configuration.test_mode()
     configuration.conf.set("webserver", "authenticate", "False")
示例#36
0
 def setUp(self):
     configuration.test_mode()
     configuration.conf.set("webserver", "authenticate", "False")
     app = application.create_app()
     app.config['TESTING'] = True
     self.app = app.test_client()
示例#37
0
 def setUp(self):
     configuration.test_mode()
     from airflow.contrib.hooks.ssh_hook import SSHHook
     self.hook = SSHHook()
     self.hook.no_host_key_check = True
示例#38
0
 def setUp(self):
     configuration.test_mode()
     self.s3_test_url = "s3://test/this/is/not/a-real-key.txt"
示例#39
0
 def setUp(self):
     configuration.test_mode()
     self.s3_test_url = "s3://test/this/is/not/a-real-key.txt"
示例#40
0
import datetime
import logging
import unittest

from airflow import AirflowException, settings
from airflow.bin import cli
from airflow.executors import DEFAULT_EXECUTOR
from airflow.jobs import BackfillJob, SchedulerJob
from airflow.models import DAG, DagBag, DagRun, Pool, TaskInstance as TI
from airflow.operators import DummyOperator
from airflow.utils.db import provide_session
from airflow.utils.state import State
from airflow.utils.timeout import timeout

from airflow import configuration
configuration.test_mode()

DEV_NULL = '/dev/null'
DEFAULT_DATE = datetime.datetime(2016, 1, 1)


class BackfillJobTest(unittest.TestCase):
    def setUp(self):
        self.parser = cli.CLIFactory.get_parser()
        self.dagbag = DagBag(include_examples=True)

    def test_backfill_examples(self):
        """
        Test backfilling example dags
        """
示例#41
0
文件: core.py 项目: johnw424/airflow
 def setUp(self):
     configuration.test_mode()
     args = {"owner": "airflow", "start_date": datetime(2015, 1, 1)}
     dag = DAG(TEST_DAG_ID, default_args=args)
     self.dag = dag
     self.hql = """
示例#42
0
 def setUp(self):
     configuration.test_mode()
     app = create_app()
     app.config['TESTING'] = True
     self.app = app.test_client()
示例#43
0
 def setUp(self):
     configuration.test_mode()
示例#44
0
文件: core.py 项目: johnw424/airflow
 def setUp(self):
     configuration.test_mode()
     utils.initdb()
     os.environ["AIRFLOW_CONN_TEST_URI"] = "postgres://*****:*****@ec2.compute.com:5432/the_database"
示例#45
0
 def setUp(self):
     configuration.test_mode()
     app.config['TESTING'] = True
     self.app = app.test_client()
示例#46
0
文件: core.py 项目: jbalogh/airflow
from datetime import datetime, time, timedelta
from time import sleep
import unittest
from airflow import configuration
configuration.test_mode()
from airflow import jobs, models, DAG, executors, utils, operators
from airflow.www.app import app
from airflow import utils

NUM_EXAMPLE_DAGS = 5
DEV_NULL = '/dev/null'
LOCAL_EXECUTOR = executors.LocalExecutor()
DEFAULT_DATE = datetime(2015, 1, 1)
configuration.test_mode()


class TransferTests(unittest.TestCase):

    def setUp(self):
        configuration.test_mode()
        utils.initdb()
        args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)}
        dag = DAG('hive_test', default_args=args)
        self.dag = dag

    def test_mysql_to_hive(self):
        sql = "SELECT * FROM task_instance LIMIT 1000;"
        t = operators.MySqlToHiveTransfer(
            task_id='test_m2h',
            mysql_conn_id='airflow_db',
            sql=sql,
示例#47
0
文件: core.py 项目: johnw424/airflow
 def setUp(self):
     configuration.test_mode()
     app.config["TESTING"] = True
     self.app = app.test_client()
示例#48
0
文件: core.py 项目: sepsyk/airflow
 def setUp(self):
     configuration.test_mode()
     from airflow.contrib.hooks.ssh_hook import SSHHook
     self.hook = SSHHook()
     self.hook.no_host_key_check = True
示例#49
0
文件: core.py 项目: umingpeng/airflow
 def tearDown(self):
     configuration.test_mode()
     configuration.conf.set("webserver", "authenticate", "False")