def tearDown(self): configuration.test_mode() session = Session() session.query(models.User).delete() session.commit() session.close() configuration.conf.set("webserver", "authenticate", "False")
def setUp(self): configuration.test_mode() utils.initdb() args = {"owner": "airflow", "start_date": datetime(2015, 1, 1)} dag = DAG("hive_test", default_args=args) self.dag = dag self.hql = """
def setUp(self): configuration.test_mode() app = create_app() app.config['TESTING'] = True self.parser = cli.get_parser() self.dagbag = models.DagBag( dag_folder=DEV_NULL, include_examples=True)
def setUp(self): configuration.test_mode() utils.initdb() args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)} dag = DAG(TEST_DAG_ID, default_args=args) dag.clear(start_date=DEFAULT_DATE, end_date=datetime.now()) self.dag = dag
def setUp(self): configuration.test_mode() app = application.create_app() app.config['TESTING'] = True self.parser = cli.get_parser() self.dagbag = models.DagBag( dag_folder=DEV_NULL, include_examples=True)
def setUp(self): configuration.test_mode() utils.initdb() args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)} dag = DAG(TEST_DAG_ID, default_args=args) self.dag = dag self.hql = """
def setUp(self): configuration.test_mode() utils.initdb() os.environ['AIRFLOW_CONN_TEST_URI'] = ( 'postgres://*****:*****@ec2.compute.com:5432/the_database') os.environ['AIRFLOW_CONN_TEST_URI_NO_CREDS'] = ( 'postgres://ec2.compute.com/the_database')
def setUp(self): configuration.test_mode() utils.initdb() args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)} dag = DAG('hive_test', default_args=args) self.dag = dag self.hql = """
def setUp(self): configuration.test_mode() args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE } dag = DAG(TEST_DAG_ID, default_args=args) self.dag = dag
def setUp(self): configuration.test_mode() self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True) self.args = {"owner": "airflow", "start_date": datetime(2015, 1, 1)} dag = DAG(TEST_DAG_ID, default_args=self.args) self.dag = dag self.dag_bash = self.dagbag.dags["example_bash_operator"] self.runme_0 = self.dag_bash.get_task("runme_0")
def setUp(self): configuration.test_mode() self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True) args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)} dag = DAG(TEST_DAG_ID, default_args=args) self.dag = dag self.dag_bash = self.dagbag.dags['example_bash_operator'] self.runme_0 = self.dag_bash.get_task('runme_0')
def setUp(self): configuration.test_mode() # Mock out the emr_client (moto has incorrect response) mock_emr_client = MagicMock() mock_emr_client.add_job_flow_steps.return_value = ADD_STEPS_SUCCESS_RETURN # Mock out the emr_client creator self.boto3_client_mock = MagicMock(return_value=mock_emr_client)
def setUp(self): configuration.test_mode() args = { 'owner': 'airflow', 'mysql_conn_id': 'airflow_db', 'start_date': datetime(2015, 1, 1) } dag = DAG(TEST_DAG_ID, default_args=args) self.dag = dag
def setUp(self): configuration.test_mode() args = { 'owner': 'airflow', 'mysql_conn_id': 'airflow_db', 'start_date': DEFAULT_DATE } dag = DAG(TEST_DAG_ID, default_args=args) self.dag = dag
def setUp(self): configuration.test_mode() self.dagbag = models.DagBag( dag_folder=DEV_NULL, include_examples=True) self.args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)} dag = DAG(TEST_DAG_ID, default_args=self.args) self.dag = dag self.dag_bash = self.dagbag.dags['example_bash_operator'] self.runme_0 = self.dag_bash.get_task('runme_0')
def setUp(self): configuration.test_mode() # Mock out the emr_client (moto has incorrect response) mock_emr_client = MagicMock() mock_emr_client.run_job_flow.return_value = RUN_JOB_FLOW_SUCCESS_RETURN # Mock out the emr_client creator self.boto3_client_mock = MagicMock(return_value=mock_emr_client)
def setUp(self): configuration.test_mode() # Mock out the emr_client (moto has incorrect response) mock_emr_client = MagicMock() mock_emr_client.terminate_job_flows.return_value = TERMINATE_SUCCESS_RETURN # Mock out the emr_client creator self.boto3_client_mock = MagicMock(return_value=mock_emr_client)
def setUp(self): configuration.test_mode() from airflow.contrib.hooks.fs_hook import FSHook hook = FSHook() args = {"owner": "airflow", "start_date": DEFAULT_DATE, "provide_context": True} dag = DAG(TEST_DAG_ID + "test_schedule_dag_once", default_args=args) dag.schedule_interval = "@once" self.hook = hook self.dag = dag
def setUp(self): configuration.test_mode() self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True) self.args = {"owner": "airflow", "start_date": DEFAULT_DATE} dag = DAG(TEST_DAG_ID, default_args=self.args) self.dag = dag self.dag_bash = self.dagbag.dags["example_bash_operator"] self.runme_0 = self.dag_bash.get_task("runme_0") self.run_after_loop = self.dag_bash.get_task("run_after_loop") self.run_this_last = self.dag_bash.get_task("run_this_last")
def setUp(self): configuration.test_mode() self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True) self.args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} dag = DAG(TEST_DAG_ID, default_args=self.args) self.dag = dag self.dag_bash = self.dagbag.dags['example_bash_operator'] self.runme_0 = self.dag_bash.get_task('runme_0') self.run_after_loop = self.dag_bash.get_task('run_after_loop') self.run_this_last = self.dag_bash.get_task('run_this_last')
def setUp(self): configuration.test_mode() self.dagbag = models.DagBag( dag_folder=DEV_NULL, include_examples=True) self.args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} dag = DAG(TEST_DAG_ID, default_args=self.args) self.dag = dag self.dag_bash = self.dagbag.dags['example_bash_operator'] self.runme_0 = self.dag_bash.get_task('runme_0') self.run_after_loop = self.dag_bash.get_task('run_after_loop') self.run_this_last = self.dag_bash.get_task('run_this_last')
def setUp(self): configuration.test_mode() # Mock out the emr_client (moto has incorrect response) self.mock_emr_client = MagicMock() self.mock_emr_client.describe_cluster.side_effect = [ DESCRIBE_CLUSTER_RUNNING_RETURN, DESCRIBE_CLUSTER_TERMINATED_RETURN ] # Mock out the emr_client creator self.boto3_client_mock = MagicMock(return_value=self.mock_emr_client)
def setUp(self): configuration.test_mode() from airflow.contrib.hooks.fs_hook import FSHook hook = FSHook() args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, 'provide_context': True } dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.hook = hook self.dag = dag
def setUp(self): configuration.test_mode() from airflow.contrib.hooks.ssh_hook import SSHHook hook = SSHHook() hook.no_host_key_check = True args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE, 'provide_context': True } dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args) dag.schedule_interval = '@once' self.hook = hook self.dag = dag
def setUp(self): configuration.test_mode() configuration.conf.set("webserver", "authenticate", "False") app = application.create_app() app.config['TESTING'] = True self.app = app.test_client()
def setUp(self): configuration.test_mode() args = {"owner": "airflow", "start_date": DEFAULT_DATE_ISO} dag = DAG(TEST_DAG_ID, default_args=args) self.dag = dag
def setUp(self): configuration.test_mode() utils.initdb() app.config['TESTING'] = True self.app = app.test_client()
def setUp(self): configuration.test_mode() args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} dag = DAG('test_dag_id', default_args=args) self.dag = dag self.hql = """
def setUp(self): configuration.test_mode()
def tearDown(self): configuration.test_mode() configuration.conf.set("webserver", "authenticate", "False")
def setUp(self): configuration.test_mode() from airflow.contrib.hooks.ssh_hook import SSHHook self.hook = SSHHook() self.hook.no_host_key_check = True
def setUp(self): configuration.test_mode() self.s3_test_url = "s3://test/this/is/not/a-real-key.txt"
import datetime import logging import unittest from airflow import AirflowException, settings from airflow.bin import cli from airflow.executors import DEFAULT_EXECUTOR from airflow.jobs import BackfillJob, SchedulerJob from airflow.models import DAG, DagBag, DagRun, Pool, TaskInstance as TI from airflow.operators import DummyOperator from airflow.utils.db import provide_session from airflow.utils.state import State from airflow.utils.timeout import timeout from airflow import configuration configuration.test_mode() DEV_NULL = '/dev/null' DEFAULT_DATE = datetime.datetime(2016, 1, 1) class BackfillJobTest(unittest.TestCase): def setUp(self): self.parser = cli.CLIFactory.get_parser() self.dagbag = DagBag(include_examples=True) def test_backfill_examples(self): """ Test backfilling example dags """
def setUp(self): configuration.test_mode() args = {"owner": "airflow", "start_date": datetime(2015, 1, 1)} dag = DAG(TEST_DAG_ID, default_args=args) self.dag = dag self.hql = """
def setUp(self): configuration.test_mode() app = create_app() app.config['TESTING'] = True self.app = app.test_client()
def setUp(self): configuration.test_mode() utils.initdb() os.environ["AIRFLOW_CONN_TEST_URI"] = "postgres://*****:*****@ec2.compute.com:5432/the_database"
def setUp(self): configuration.test_mode() app.config['TESTING'] = True self.app = app.test_client()
from datetime import datetime, time, timedelta from time import sleep import unittest from airflow import configuration configuration.test_mode() from airflow import jobs, models, DAG, executors, utils, operators from airflow.www.app import app from airflow import utils NUM_EXAMPLE_DAGS = 5 DEV_NULL = '/dev/null' LOCAL_EXECUTOR = executors.LocalExecutor() DEFAULT_DATE = datetime(2015, 1, 1) configuration.test_mode() class TransferTests(unittest.TestCase): def setUp(self): configuration.test_mode() utils.initdb() args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)} dag = DAG('hive_test', default_args=args) self.dag = dag def test_mysql_to_hive(self): sql = "SELECT * FROM task_instance LIMIT 1000;" t = operators.MySqlToHiveTransfer( task_id='test_m2h', mysql_conn_id='airflow_db', sql=sql,
def setUp(self): configuration.test_mode() app.config["TESTING"] = True self.app = app.test_client()