def test_email_alert_with_config(self, mock_send_email):
        dag = models.DAG(dag_id='test_failure_email')
        task = BashOperator(
            task_id='test_email_alert_with_config',
            dag=dag,
            bash_command='exit 1',
            start_date=DEFAULT_DATE,
            email='to')

        ti = TI(
            task=task, execution_date=datetime.datetime.now())

        conf.set('email', 'subject_template', '/subject/path')
        conf.set('email', 'html_content_template', '/html_content/path')

        opener = mock_open(read_data='template: {{ti.task_id}}')
        with patch('airflow.models.taskinstance.open', opener, create=True):
            try:
                ti.run()
            except AirflowException:
                pass

        (email, title, body), _ = mock_send_email.call_args
        self.assertEqual(email, 'to')
        self.assertEqual('template: test_email_alert_with_config', title)
        self.assertEqual('template: test_email_alert_with_config', body)
Example #2
0
 def test_resolve_xcom_class_fallback_to_basexcom_no_config(self):
     init = conf.get("core", "xcom_backend")
     conf.remove_option("core", "xcom_backend")
     cls = resolve_xcom_backend()
     assert issubclass(cls, BaseXCom)
     assert cls().serialize_value([1]) == b"[1]"
     conf.set("core", "xcom_backend", init)
Example #3
0
def conf_vars(overrides):
    original = {}
    original_env_vars = {}
    for (section, key), value in overrides.items():

        env = conf._env_var_name(section, key)
        if env in os.environ:
            original_env_vars[env] = os.environ.pop(env)

        if conf.has_option(section, key):
            original[(section, key)] = conf.get(section, key)
        else:
            original[(section, key)] = None
        if value is not None:
            conf.set(section, key, value)
        else:
            conf.remove_option(section, key)
    settings.configure_vars()
    try:
        yield
    finally:
        for (section, key), value in original.items():
            if value is not None:
                conf.set(section, key, value)
            else:
                conf.remove_option(section, key)
        for env, value in original_env_vars.items():
            os.environ[env] = value
        settings.configure_vars()
Example #4
0
def validate_logging_config(logging_config):
    # Now lets validate the other logging-related settings
    task_log_reader = conf.get('logging', 'task_log_reader')

    logger = logging.getLogger('airflow.task')

    def _get_handler(name):
        return next((h for h in logger.handlers if h.name == name), None)

    if _get_handler(task_log_reader) is None:
        # Check for pre 1.10 setting that might be in deployed airflow.cfg files
        if task_log_reader == "file.task" and _get_handler("task"):
            warnings.warn(
                "task_log_reader setting in [logging] has a deprecated value of "
                "{!r}, but no handler with this name was found. Please update "
                "your config to use {!r}. Running config has been adjusted to "
                "match".format(
                    task_log_reader,
                    "task",
                ),
                DeprecationWarning,
            )
            conf.set('logging', 'task_log_reader', 'task')
        else:
            raise AirflowConfigException(
                "Configured task_log_reader {!r} was not a handler of the 'airflow.task' "
                "logger.".format(task_log_reader))
Example #5
0
def conf_vars(overrides):
    original = {}
    original_env_vars = {}
    reconfigure_vars = False
    for (section, key), value in overrides.items():

        env = conf._env_var_name(section, key)
        if env in os.environ:
            original_env_vars[env] = os.environ.pop(env)

        if conf.has_option(section, key):
            original[(section, key)] = conf.get(section, key)
        else:
            original[(section, key)] = None
        if value is not None:
            conf.set(section, key, value)
        else:
            conf.remove_option(section, key)

        if section == 'core' and key.lower().endswith('_folder'):
            reconfigure_vars = True
    if reconfigure_vars:
        settings.configure_vars()
    yield
    for (section, key), value in original.items():
        if value is not None:
            conf.set(section, key, value)
        else:
            conf.remove_option(section, key)
    for env, value in original_env_vars.items():
        os.environ[env] = value
    if reconfigure_vars:
        settings.configure_vars()
Example #6
0
    def test_xcom_enable_pickle_type(self):
        json_obj = {"key": "value"}
        execution_date = timezone.utcnow()
        key = "xcom_test2"
        dag_id = "test_dag2"
        task_id = "test_task2"

        conf.set("core", "enable_xcom_pickling", "True")

        XCom.set(key=key,
                 value=json_obj,
                 dag_id=dag_id,
                 task_id=task_id,
                 execution_date=execution_date)

        ret_value = XCom.get_one(key=key,
                                 dag_id=dag_id,
                                 task_id=task_id,
                                 execution_date=execution_date)

        self.assertEqual(ret_value, json_obj)

        session = settings.Session()
        ret_value = session.query(XCom).filter(
            XCom.key == key, XCom.dag_id == dag_id, XCom.task_id == task_id,
            XCom.execution_date == execution_date).first().value

        self.assertEqual(ret_value, json_obj)
 def setUpClass(cls):
     os.environ['AIRFLOW__TESTSECTION__TESTKEY'] = 'testvalue'
     os.environ['AIRFLOW__TESTSECTION__TESTPERCENT'] = 'with%percent'
     os.environ[
         'AIRFLOW__KUBERNETES_ENVIRONMENT_VARIABLES__AIRFLOW__TESTSECTION__TESTKEY'] = 'nested'
     configuration.load_test_config()
     conf.set('core', 'percent', 'with%%inside')
Example #8
0
    def test_xcom_get_many(self):
        json_obj = {"key": "value"}
        execution_date = timezone.utcnow()
        key = "xcom_test4"
        dag_id1 = "test_dag4"
        task_id1 = "test_task4"
        dag_id2 = "test_dag5"
        task_id2 = "test_task5"

        conf.set("core", "xcom_enable_pickling", "True")

        XCom.set(key=key,
                 value=json_obj,
                 dag_id=dag_id1,
                 task_id=task_id1,
                 execution_date=execution_date)

        XCom.set(key=key,
                 value=json_obj,
                 dag_id=dag_id2,
                 task_id=task_id2,
                 execution_date=execution_date)

        results = XCom.get_many(key=key, execution_date=execution_date)

        for result in results:
            self.assertEqual(result.value, json_obj)
Example #9
0
def export_data_directly(sql_alchemy_conn, dag_folder, since, include_logs,
                         include_task_args, dag_ids, tasks):
    from airflow import models, settings, conf
    from airflow.settings import STORE_SERIALIZED_DAGS
    from sqlalchemy import create_engine
    from sqlalchemy.orm import sessionmaker

    conf.set("core", "sql_alchemy_conn", value=sql_alchemy_conn)
    dagbag = models.DagBag(
        dag_folder if dag_folder else settings.DAGS_FOLDER,
        include_examples=True,
        store_serialized_dags=STORE_SERIALIZED_DAGS,
    )

    engine = create_engine(sql_alchemy_conn)
    session = sessionmaker(bind=engine)
    return _handle_export_data(
        dagbag,
        since,
        include_logs,
        include_task_args,
        dag_ids,
        tasks,
        session=session(),
    )
Example #10
0
    def test_s3_to_sftp_operation(self):
        # Setting
        conf.set("core", "enable_xcom_pickling", "True")
        test_remote_file_content = \
            "This is remote file content \n which is also multiline " \
            "another line here \n this is last line. EOF"

        # Test for creation of s3 bucket
        conn = boto3.client('s3')
        conn.create_bucket(Bucket=self.s3_bucket)
        self.assertTrue(self.s3_hook.check_for_bucket(self.s3_bucket))

        with open(LOCAL_FILE_PATH, 'w') as file:
            file.write(test_remote_file_content)
        self.s3_hook.load_file(LOCAL_FILE_PATH,
                               self.s3_key,
                               bucket_name=BUCKET)

        # Check if object was created in s3
        objects_in_dest_bucket = conn.list_objects(Bucket=self.s3_bucket,
                                                   Prefix=self.s3_key)
        # there should be object found, and there should only be one object found
        self.assertEqual(len(objects_in_dest_bucket['Contents']), 1)

        # the object found should be consistent with dest_key specified earlier
        self.assertEqual(objects_in_dest_bucket['Contents'][0]['Key'],
                         self.s3_key)

        # get remote file to local
        run_task = S3ToSFTPOperator(s3_bucket=BUCKET,
                                    s3_key=S3_KEY,
                                    sftp_path=SFTP_PATH,
                                    sftp_conn_id=SFTP_CONN_ID,
                                    s3_conn_id=S3_CONN_ID,
                                    task_id=TASK_ID,
                                    dag=self.dag)
        self.assertIsNotNone(run_task)

        run_task.execute(None)

        # Check that the file is created remotely
        check_file_task = SSHOperator(task_id="test_check_file",
                                      ssh_hook=self.hook,
                                      command="cat {0}".format(self.sftp_path),
                                      do_xcom_push=True,
                                      dag=self.dag)
        self.assertIsNotNone(check_file_task)
        ti3 = TaskInstance(task=check_file_task,
                           execution_date=timezone.utcnow())
        ti3.run()
        self.assertEqual(
            ti3.xcom_pull(task_ids='test_check_file',
                          key='return_value').strip(),
            test_remote_file_content.encode('utf-8'))

        # Clean up after finishing with test
        conn.delete_object(Bucket=self.s3_bucket, Key=self.s3_key)
        conn.delete_bucket(Bucket=self.s3_bucket)
        self.assertFalse((self.s3_hook.check_for_bucket(self.s3_bucket)))
Example #11
0
    def test_get_default_scheduler(self):
        conf.set('core', 'scheduler', SchedulerFactory.DEFAULT_SCHEDULER)
        scheduler_class = SchedulerFactory.get_default_scheduler()
        self.assertEqual(scheduler_class, SchedulerJob)

        conf.set('core', 'scheduler', SchedulerFactory.EVENT_BASED_SCHEDULER)
        scheduler_class = SchedulerFactory.get_default_scheduler()
        self.assertEqual(scheduler_class, EventBasedSchedulerJob)
    def setUp(self):

        if not conf.has_section("kerberos"):
            conf.add_section("kerberos")
        conf.set("kerberos", "keytab",
                 os.environ['KRB5_KTNAME'])
        keytab_from_cfg = conf.get("kerberos", "keytab")
        self.args = Namespace(keytab=keytab_from_cfg, principal=None, pid=None,
                              daemon=None, stdout=None, stderr=None, log_file=None)
 def __enter__(self):
     with open(self.settings_file, 'w') as handle:
         handle.writelines(self.content)
     sys.path.append(self.settings_root)
     conf.set(
         'logging',
         'logging_config_class',
         self.module
     )
     return self.settings_file
    def test_deprecated_options_cmd(self):
        # Guarantee we have a deprecated setting, so we test the deprecation
        # lookup even if we remove this explicit fallback
        conf.deprecated_options['celery'] = {'result_backend': 'celery_result_backend'}
        conf.as_command_stdout.add(('celery', 'celery_result_backend'))

        conf.remove_option('celery', 'result_backend')
        conf.set('celery', 'celery_result_backend_cmd', '/bin/echo 99')

        with self.assertWarns(DeprecationWarning):
            self.assertEquals(conf.getint('celery', 'result_backend'), 99)
    def test_deprecated_options_cmd(self):
        # Guarantee we have a deprecated setting, so we test the deprecation
        # lookup even if we remove this explicit fallback
        conf.deprecated_options['celery'] = {'result_backend': 'celery_result_backend'}
        conf.as_command_stdout.add(('celery', 'celery_result_backend'))

        conf.remove_option('celery', 'result_backend')
        conf.set('celery', 'celery_result_backend_cmd', '/bin/echo 99')

        with self.assertWarns(DeprecationWarning):
            self.assertEquals(conf.getint('celery', 'result_backend'), 99)
    def setUp(self):
        super().setUp()
        DEFAULT_LOGGING_CONFIG['handlers']['task'] = {
            'class': TASK_HANDLER_CLASS,
            'formatter': 'airflow',
            'stream': 'sys.stdout'
        }
        conf.set('logging', 'task_log_prefix_template', "{{ti.dag_id}}-{{ti.task_id}}")

        logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)
        logging.root.disabled = False
    def test_xcom_disable_pickle_type_fail_on_non_json(self):
        class PickleRce:
            def __reduce__(self):
                return os.system, ("ls -alt",)

        conf.set("core", "xcom_enable_pickling", "False")

        self.assertRaises(TypeError, XCom.set,
                          key="xcom_test3",
                          value=PickleRce(),
                          dag_id="test_dag3",
                          task_id="test_task3",
                          execution_date=timezone.utcnow())
    def setUp(self):
        try:
            conf.add_section("api")
        except Exception:  # pylint:disable=broad-except
            pass
        conf.set("api", "auth_backend",
                 "airflow.api.auth.backend.kerberos_auth")
        try:
            conf.add_section("kerberos")
        except Exception:  # pylint:disable=broad-except
            pass
        conf.set("kerberos", "keytab", os.environ['KRB5_KTNAME'])

        self.app, _ = application.create_app(testing=True)
    def test_deprecated_options_cmd(self):
        # Guarantee we have a deprecated setting, so we test the deprecation
        # lookup even if we remove this explicit fallback
        conf.deprecated_options[('celery', "result_backend")] = ('celery', 'celery_result_backend')
        conf.as_command_stdout.add(('celery', 'celery_result_backend'))

        conf.remove_option('celery', 'result_backend')
        conf.set('celery', 'celery_result_backend_cmd', '/bin/echo 99')

        with self.assertWarns(DeprecationWarning):
            tmp = None
            if 'AIRFLOW__CELERY__RESULT_BACKEND' in os.environ:
                tmp = os.environ.pop('AIRFLOW__CELERY__RESULT_BACKEND')
            self.assertEqual(conf.getint('celery', 'result_backend'), 99)
            if tmp:
                os.environ['AIRFLOW__CELERY__RESULT_BACKEND'] = tmp
    def test_deprecated_options_cmd(self):
        # Guarantee we have a deprecated setting, so we test the deprecation
        # lookup even if we remove this explicit fallback
        conf.deprecated_options['celery'] = {'result_backend': 'celery_result_backend'}
        conf.as_command_stdout.add(('celery', 'celery_result_backend'))

        conf.remove_option('celery', 'result_backend')
        conf.set('celery', 'celery_result_backend_cmd', '/bin/echo 99')

        with self.assertWarns(DeprecationWarning):
            tmp = None
            if 'AIRFLOW__CELERY__RESULT_BACKEND' in os.environ:
                tmp = os.environ.pop('AIRFLOW__CELERY__RESULT_BACKEND')
            self.assertEqual(conf.getint('celery', 'result_backend'), 99)
            if tmp:
                os.environ['AIRFLOW__CELERY__RESULT_BACKEND'] = tmp
Example #21
0
def conf_vars(overrides):
    original = {}
    for (section, key), value in overrides.items():
        if conf.has_option(section, key):
            original[(section, key)] = conf.get(section, key)
        else:
            original[(section, key)] = None
        if value is not None:
            conf.set(section, key, value)
        else:
            conf.remove_option(section, key)
    yield
    for (section, key), value in original.items():
        if value is not None:
            conf.set(section, key, value)
        else:
            conf.remove_option(section, key)
    def test_deprecated_options(self):
        # Guarantee we have a deprecated setting, so we test the deprecation
        # lookup even if we remove this explicit fallback
        conf.deprecated_options = {
            ('celery', 'worker_concurrency'): ('celery', 'celeryd_concurrency'),
        }

        # Remove it so we are sure we use the right setting
        conf.remove_option('celery', 'worker_concurrency')

        with self.assertWarns(DeprecationWarning):
            with mock.patch.dict('os.environ', AIRFLOW__CELERY__CELERYD_CONCURRENCY="99"):
                self.assertEqual(conf.getint('celery', 'worker_concurrency'), 99)

        with self.assertWarns(DeprecationWarning):
            conf.set('celery', 'celeryd_concurrency', '99')
            self.assertEqual(conf.getint('celery', 'worker_concurrency'), 99)
            conf.remove_option('celery', 'celeryd_concurrency')
    def test_deprecated_options_with_new_section(self):
        # Guarantee we have a deprecated setting, so we test the deprecation
        # lookup even if we remove this explicit fallback
        conf.deprecated_options = {
            ('logging', 'logging_level'): ('core', 'logging_level'),
        }

        # Remove it so we are sure we use the right setting
        conf.remove_option('core', 'logging_level')
        conf.remove_option('logging', 'logging_level')

        with self.assertWarns(DeprecationWarning):
            with mock.patch.dict('os.environ', AIRFLOW__CORE__LOGGING_LEVEL="VALUE"):
                self.assertEqual(conf.get('logging', 'logging_level'), "VALUE")

        with self.assertWarns(DeprecationWarning):
            conf.set('core', 'logging_level', 'VALUE')
            self.assertEqual(conf.get('logging', 'logging_level'), "VALUE")
            conf.remove_option('core', 'logging_level')
    def test_deprecated_options(self):
        # Guarantee we have a deprecated setting, so we test the deprecation
        # lookup even if we remove this explicit fallback
        conf.deprecated_options['celery'] = {
            'worker_concurrency': 'celeryd_concurrency',
        }

        # Remove it so we are sure we use the right setting
        conf.remove_option('celery', 'worker_concurrency')

        with self.assertWarns(DeprecationWarning):
            os.environ['AIRFLOW__CELERY__CELERYD_CONCURRENCY'] = '99'
            self.assertEqual(conf.getint('celery', 'worker_concurrency'), 99)
            os.environ.pop('AIRFLOW__CELERY__CELERYD_CONCURRENCY')

        with self.assertWarns(DeprecationWarning):
            conf.set('celery', 'celeryd_concurrency', '99')
            self.assertEqual(conf.getint('celery', 'worker_concurrency'), 99)
            conf.remove_option('celery', 'celeryd_concurrency')
Example #25
0
    def tearDown(self):
        logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)
        self.session.query(TaskInstance).filter(
            TaskInstance.dag_id == self.DAG_ID
            and TaskInstance.task_id == self.TASK_ID
            and TaskInstance.execution_date == self.DEFAULT_DATE).delete()
        self.session.commit()
        self.session.close()

        # Remove any new modules imported during the test run. This lets us
        # import the same source files for more than one test.
        for m in [m for m in sys.modules if m not in self.old_modules]:
            del sys.modules[m]

        sys.path.remove(self.settings_folder)
        shutil.rmtree(self.settings_folder)
        conf.set('core', 'logging_config_class', '')

        super(TestLogView, self).tearDown()
Example #26
0
    def test_deprecated_options(self):
        # Guarantee we have a deprecated setting, so we test the deprecation
        # lookup even if we remove this explicit fallback
        conf.deprecated_options['celery'] = {
            'worker_concurrency': 'celeryd_concurrency',
        }

        # Remove it so we are sure we use the right setting
        conf.remove_option('celery', 'worker_concurrency')

        with self.assertWarns(DeprecationWarning):
            os.environ['AIRFLOW__CELERY__CELERYD_CONCURRENCY'] = '99'
            self.assertEqual(conf.getint('celery', 'worker_concurrency'), 99)
            os.environ.pop('AIRFLOW__CELERY__CELERYD_CONCURRENCY')

        with self.assertWarns(DeprecationWarning):
            conf.set('celery', 'celeryd_concurrency', '99')
            self.assertEqual(conf.getint('celery', 'worker_concurrency'), 99)
            conf.remove_option('celery', 'celeryd_concurrency')
    def setUp(self):
        try:
            conf.add_section("atlas")
        except AirflowConfigException:
            pass
        except DuplicateSectionError:
            pass

        conf.set("atlas", "username", "none")
        conf.set("atlas", "password", "none")
        conf.set("atlas", "host", "none")
        conf.set("atlas", "port", "0")

        self.atlas = AtlasBackend()
Example #28
0
def logging_config_for_log_view(tmpdir):
    # Create a custom logging configuration
    logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
    current_dir = os.path.dirname(os.path.abspath(__file__))
    logging_config["handlers"]["task"]["base_log_folder"] = os.path.normpath(
        os.path.join(current_dir, "test_logs"))
    logging_config["handlers"]["task"]["filename_template"] = (
        "{{ ti.dag_id }}/{{ ti.task_id }}/"
        '{{ ts | replace(":", ".") }}/{{ try_number }}.log')

    # Write the custom logging configuration to a file
    settings_folder = str(tmpdir)
    settings_file = os.path.join(settings_folder, "airflow_local_settings.py")
    new_logging_file = "LOGGING_CONFIG = {}".format(logging_config)
    with open(settings_file, "w") as handle:
        handle.writelines(new_logging_file)
    sys.path.append(settings_folder)
    conf.set("core", "logging_config_class",
             "airflow_local_settings.LOGGING_CONFIG")

    yield
    sys.path.remove(settings_folder)
    conf.set("core", "logging_config_class", "")
    def __set_mocked_executor(self):
        """Mock ECS such that there's nothing wrong with anything"""
        from airflow.configuration import conf

        if not conf.has_section('ecs_fargate'):
            conf.add_section('ecs_fargate')
        conf.set('ecs_fargate', 'region', 'us-west-1')
        conf.set('ecs_fargate', 'cluster', 'some-ecs-cluster')
        conf.set('ecs_fargate', 'task_definition', 'some-ecs-task-definition')
        conf.set('ecs_fargate', 'container_name', 'some-ecs-container')
        conf.set('ecs_fargate', 'launch_type', 'FARGATE')
        executor = AwsEcsFargateExecutor()
        executor.start()

        # replace boto3 ecs client with mock
        ecs_mock = mock.Mock(spec=executor.ecs)
        run_task_ret_val = {
            'tasks': [{'taskArn': '001'}],
            'failures': []
        }
        ecs_mock.run_task.return_value = run_task_ret_val
        executor.ecs = ecs_mock

        self.executor = executor
Example #30
0
    def setUp(self):
        super(TestLogView, self).setUp()
        # Make sure that the configure_logging is not cached
        self.old_modules = dict(sys.modules)

        # Create a custom logging configuration
        logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
        current_dir = os.path.dirname(os.path.abspath(__file__))
        logging_config['handlers']['task'][
            'base_log_folder'] = os.path.normpath(
                os.path.join(current_dir, 'test_logs'))
        logging_config['handlers']['task']['filename_template'] = \
            '{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(":", ".") }}/{{ try_number }}.log'

        # Write the custom logging configuration to a file
        self.settings_folder = tempfile.mkdtemp()
        settings_file = os.path.join(self.settings_folder,
                                     "airflow_local_settings.py")
        new_logging_file = "LOGGING_CONFIG = {}".format(logging_config)
        with open(settings_file, 'w') as handle:
            handle.writelines(new_logging_file)
        sys.path.append(self.settings_folder)
        conf.set('core', 'logging_config_class',
                 'airflow_local_settings.LOGGING_CONFIG')

        app = application.create_app(testing=True)
        self.app = app.test_client()
        self.session = Session()
        from airflow.www.views import dagbag
        dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE)
        task = DummyOperator(task_id=self.TASK_ID, dag=dag)
        dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag)
        ti = TaskInstance(task=task, execution_date=self.DEFAULT_DATE)
        ti.try_number = 1
        self.session.merge(ti)
        self.session.commit()
Example #31
0
    def __set_mocked_executor(self):
        """Mock ECS such that there's nothing wrong with anything"""
        from airflow.configuration import conf

        if not conf.has_section('batch'):
            conf.add_section('batch')
        conf.set('batch', 'region', 'us-west-1')
        conf.set('batch', 'job_name', 'some-job-name')
        conf.set('batch', 'job_queue', 'some-job-queue')
        conf.set('batch', 'job_definition', 'some-job-def')
        executor = AwsBatchExecutor()
        executor.start()

        # replace boto3 ecs client with mock
        batch_mock = mock.Mock(spec=executor.batch)
        submit_job_ret_val = {
            'jobName': conf.get('batch', 'job_name'),
            'jobId': 'ABC'
        }
        batch_mock.submit_job.return_value = submit_job_ret_val
        executor.batch = batch_mock

        self.executor = executor
Example #32
0
 def setUpClass(cls):
     os.environ['AIRFLOW__TESTSECTION__TESTKEY'] = 'testvalue'
     os.environ['AIRFLOW__TESTSECTION__TESTPERCENT'] = 'with%percent'
     configuration.load_test_config()
     conf.set('core', 'percent', 'with%%inside')
 def __exit__(self, *exc_info):
     # shutil.rmtree(self.settings_root)
     # Reset config
     conf.set('logging', 'logging_config_class', '')
     sys.path.remove(self.settings_root)
 def setUpClass(cls):
     os.environ['AIRFLOW__TESTSECTION__TESTKEY'] = 'testvalue'
     os.environ['AIRFLOW__TESTSECTION__TESTPERCENT'] = 'with%percent'
     configuration.load_test_config()
     conf.set('core', 'percent', 'with%%inside')