示例#1
0
def conf_vars(overrides):
    original = {}
    original_env_vars = {}
    for (section, key), value in overrides.items():

        env = conf._env_var_name(section, key)
        if env in os.environ:
            original_env_vars[env] = os.environ.pop(env)

        if conf.has_option(section, key):
            original[(section, key)] = conf.get(section, key)
        else:
            original[(section, key)] = None
        if value is not None:
            if not conf.has_section(section):
                conf.add_section(section)
            conf.set(section, key, value)
        else:
            conf.remove_option(section, key)
    settings.configure_vars()
    try:
        yield
    finally:
        for (section, key), value in original.items():
            if value is not None:
                conf.set(section, key, value)
            else:
                conf.remove_option(section, key)
        for env, value in original_env_vars.items():
            os.environ[env] = value
        settings.configure_vars()
    def setUp(self):

        if not conf.has_section("kerberos"):
            conf.add_section("kerberos")
        conf.set("kerberos", "keytab",
                 os.environ['KRB5_KTNAME'])
        keytab_from_cfg = conf.get("kerberos", "keytab")
        self.args = Namespace(keytab=keytab_from_cfg, principal=None, pid=None,
                              daemon=None, stdout=None, stderr=None, log_file=None)
示例#3
0
def get_value(args):
    """Get one value from configuration"""
    if not conf.has_section(args.section):
        print(f'The section [{args.section}] is not found in config.', file=sys.stderr)
        sys.exit(1)

    if not conf.has_option(args.section, args.option):
        print(f'The option [{args.section}/{args.option}] is not found in config.', file=sys.stderr)
        sys.exit(1)

    value = conf.get(args.section, args.option)
    print(value)
示例#4
0
def get_value(args):
    """Get one value from configuration"""
    if not conf.has_section(args.section):
        raise SystemExit(
            f'The section [{args.section}] is not found in config.')

    if not conf.has_option(args.section, args.option):
        raise SystemExit(
            f'The option [{args.section}/{args.option}] is not found in config.'
        )

    value = conf.get(args.section, args.option)
    print(value)
示例#5
0
    def __set_mocked_executor(self):
        """Mock ECS such that there's nothing wrong with anything"""
        from airflow.configuration import conf

        if not conf.has_section('batch'):
            conf.add_section('batch')
        conf.set('batch', 'region', 'us-west-1')
        conf.set('batch', 'job_name', 'some-job-name')
        conf.set('batch', 'job_queue', 'some-job-queue')
        conf.set('batch', 'job_definition', 'some-job-def')
        executor = AwsBatchExecutor()
        executor.start()

        # replace boto3 ecs client with mock
        batch_mock = mock.Mock(spec=executor.batch)
        submit_job_ret_val = {
            'jobName': conf.get('batch', 'job_name'),
            'jobId': 'ABC'
        }
        batch_mock.submit_job.return_value = submit_job_ret_val
        executor.batch = batch_mock

        self.executor = executor
    def __set_mocked_executor(self):
        """Mock ECS such that there's nothing wrong with anything"""
        from airflow.configuration import conf

        if not conf.has_section('ecs_fargate'):
            conf.add_section('ecs_fargate')
        conf.set('ecs_fargate', 'region', 'us-west-1')
        conf.set('ecs_fargate', 'cluster', 'some-ecs-cluster')
        conf.set('ecs_fargate', 'task_definition', 'some-ecs-task-definition')
        conf.set('ecs_fargate', 'container_name', 'some-ecs-container')
        conf.set('ecs_fargate', 'launch_type', 'FARGATE')
        executor = AwsEcsFargateExecutor()
        executor.start()

        # replace boto3 ecs client with mock
        ecs_mock = mock.Mock(spec=executor.ecs)
        run_task_ret_val = {
            'tasks': [{'taskArn': '001'}],
            'failures': []
        }
        ecs_mock.run_task.return_value = run_task_ret_val
        executor.ecs = ecs_mock

        self.executor = executor
示例#7
0
def patch_airflow_config(args):
    """
    Updates current Airflow configuration file to include defaults for cwl-airflow.
    If something went wrong, restores the original airflow.cfg from the backed up copy.
    If update to Airflow 2.0.0 is required, generates new airflow.cfg with some of the
    important parameters copied from the old airflow.cfg. Backed up copy is not deleted in
    this case.
    """

    # TODO: add cwl section with the following parameters:
    # - singularity
    # - use_container

    # CWL-Airflow specific settings
    patches = [
        ["sed", "-i", "-e", "s#^dags_are_paused_at_creation.*#dags_are_paused_at_creation = False#g", args.config],
        ["sed", "-i", "-e", "s#^load_examples.*#load_examples = False#g", args.config],
        ["sed", "-i", "-e", "s#^load_default_connections.*#load_default_connections = False#g", args.config],
        ["sed", "-i", "-e", "s#^logging_config_class.*#logging_config_class = cwl_airflow.config_templates.airflow_local_settings.DEFAULT_LOGGING_CONFIG#g", args.config],
        ["sed", "-i", "-e", "s#^hide_paused_dags_by_default.*#hide_paused_dags_by_default = True#g", args.config]
    ]

    # Minimum amount of setting that should be enough for starting
    # SequentialExecutor, LocalExecutor or CeleryExecutor with
    # the same dags and metadata database after updating to Airflow 2.0.0.
    # All other user specific settings should be manually updated from the
    # backuped airflow.cfg as a lot of them have been refactored.
    transferable_settings = [
        ("core", "dags_folder"),
        ("core", "default_timezone"),
        ("core", "executor"),
        ("core", "sql_alchemy_conn"),
        ("core", "sql_engine_encoding"),   # just in case
        ("core", "fernet_key"),            # to be able to read from the old database
        ("celery", "broker_url"),
        ("celery", "result_backend")
    ]

    # create a temporary backup of airflow.cfg to restore from if we failed to apply patches
    # this backup will be deleted after all patches applied if it wasn't created right before
    # Airflow version update to 2.0.0
    airflow_config_backup = args.config + "_backup_" + str(uuid.uuid4())
    try:
        # reading aiflow.cfg before applying any patches and creating backup
        conf.read(args.config)
        shutil.copyfile(args.config, airflow_config_backup)

        # check if we need to make airflow.cfg correspond to the Airflow 2.0.0
        # we search for [logging] section as it's present only Airflow >= 2.0.0
        airflow_version_update = not conf.has_section("logging")
        if airflow_version_update:
            logging.info("Airflow config will be upgraded to correspond to Airflow 2.0.0")
            for section, key in transferable_settings:
                try:
                    patches.append(
                        ["sed", "-i", "-e", f"s#^{key}.*#{key} = {conf.get(section, key)}#g", args.config]
                    )
                except AirflowConfigException:  # just skip missing in the config section/key
                    pass
            os.remove(args.config)              # remove old config
            create_airflow_config(args)         # create new airflow.cfg with the default values

        # Apply all patches
        for patch in patches:
            logging.debug(f"Applying patch {patch}")
            run(
                patch,
                shell=False,  # for proper handling of filenames with spaces
                check=True,
                stdout=DEVNULL,
                stderr=DEVNULL
            )
    except (CalledProcessError, FileNotFoundError) as err:
        logging.error(f"""Failed to patch Airflow configuration file. Restoring from the backup and exiting.\n{err}""")
        if os.path.isfile(airflow_config_backup):
            shutil.copyfile(airflow_config_backup, args.config)
        sys.exit(1)
    finally:
        if os.path.isfile(airflow_config_backup) and not airflow_version_update:
            os.remove(airflow_config_backup)