def main(): from airflow import conf from airflow.jobs.scheduler_job import SchedulerJob from airflow.models import DagBag from dbnd import dbnd_bootstrap from dbnd._core.log.logging_utils import create_file_handler from dbnd_airflow.airflow_extensions.airflow_config import reinit_airflow_sql_conn from dbnd_airflow.executors.simple_executor import InProcessExecutor from test_dbnd_airflow.scenarios.scheduler_perf_experiment import ( dag_folder, dag_id, log_scheduler, ) dbnd_bootstrap() reinit_airflow_sql_conn() conf.set("core", "unit_test_mode", "True") logging.root.addHandler(create_file_handler(log_file=log_scheduler)) dag_bag = DagBag(dag_folder=dag_folder) scheduler_job = SchedulerJob( dag_ids=[dag_id], subdir=dag_folder, do_pickle=False, num_runs=3, executor=InProcessExecutor(dag_bag=dag_bag), ) scheduler_job.run()
def setup_unittest_airflow(): os.environ["AIRFLOW__CORE__UNIT_TEST_MODE"] = "True" from airflow import configuration as airflow_configuration from airflow.configuration import TEST_CONFIG_FILE # we can't call load_test_config, as it override airflow.cfg # we want to keep it as base logger.info("Reading Airflow test config at %s" % TEST_CONFIG_FILE) airflow_configuration.conf.read(TEST_CONFIG_FILE) # init db first subprocess_airflow_initdb() # now reconnnect from dbnd_airflow.airflow_extensions.airflow_config import reinit_airflow_sql_conn reinit_airflow_sql_conn()
def dbnd_setup_unittest(): os.environ["AIRFLOW__CORE__UNIT_TEST_MODE"] = "True" from airflow import configuration as airflow_configuration from airflow.configuration import TEST_CONFIG_FILE # we can't call load_test_config, as it override airflow.cfg # we want to keep it as base logger.info("Reading Airflow test config at %s" % TEST_CONFIG_FILE) airflow_configuration.conf.read(TEST_CONFIG_FILE) sql_alchemy_conn = airflow_configuration.get("core", "sql_alchemy_conn") if sql_alchemy_conn.find("unittests.db") == -1: logger.warning( "You should set SQL_ALCHEMY_CONN to sqlite:///.../unittests.db for tests! %s" % sql_alchemy_conn ) from dbnd_airflow.airflow_extensions.airflow_config import reinit_airflow_sql_conn reinit_airflow_sql_conn() from dbnd_airflow.dbnd_airflow_main import subprocess_airflow subprocess_airflow(args=["initdb"])
def configure_airflow_sql_alchemy_conn(): from dbnd_airflow.airflow_extensions.airflow_config import reinit_airflow_sql_conn reinit_airflow_sql_conn()