Beispiel #1
0
def generate(arguments,
             extra_templates=list(),
             extra_sources=list(),
             extra_targets=list()):
    # To maintain the old API where we passed arguments rather than the new name.
    user_arguments = arguments
    arguments = None

    sources, targets, templates = get_dcosconfig_source_target_and_templates(
        user_arguments, extra_templates, extra_sources)

    # TODO(cmaloney): Make it so we only get out the dcosconfig target arguments not all the config target arguments.
    resolver = gen.internals.resolve_configuration(sources,
                                                   targets + extra_targets)
    status = resolver.status_dict

    if status['status'] == 'errors':
        raise ValidationError(errors=status['errors'], unset=status['unset'])

    # Gather out the late variables. The presence of late variables changes
    # whether or not a late package is created
    late_variables = dict()
    # TODO(branden): Get the late vars and expressions from resolver.late
    for source in sources:
        for setter_list in source.setters.values():
            for setter in setter_list:
                if not setter.is_late:
                    continue

                if setter.name not in resolver.late:
                    continue

                # Skip late vars that aren't referenced by config.
                if not resolver.arguments[setter.name].is_finalized:
                    continue

                # Validate a late variable should only have one source.
                assert setter.name not in late_variables

                late_variables[setter.name] = setter.late_expression

    argument_dict = {
        k: v.value
        for k, v in resolver.arguments.items() if v.is_finalized
    }

    # expanded_config is a special result which contains all other arguments. It has to come after
    # the calculation of all the other arguments so it can be filled with everything which was
    # calculated. Can't be calculated because that would have an infinite recursion problem (the set
    # of all arguments would want to include itself).
    # Explicitly / manaully setup so that it'll fit where we want it.
    # TODO(cmaloney): Make this late-bound by gen.internals
    argument_dict['expanded_config'] = textwrap.indent(
        json_prettyprint({
            k: v
            for k, v in argument_dict.items()
            if not v.startswith(gen.internals.LATE_BIND_PLACEHOLDER_START)
        }),
        prefix='  ' * 3,
    )
    log.debug("Final arguments:" + json_prettyprint(argument_dict))

    # Fill in the template parameters
    # TODO(cmaloney): render_templates should ideally take the template targets.
    rendered_templates = render_templates(templates, argument_dict)

    # Validate there aren't any unexpected top level directives in any of the files
    # (likely indicates a misspelling)
    for name, template in rendered_templates.items():
        if name == 'dcos-services.yaml':  # yaml list of the service files
            assert isinstance(template, list)
        elif name == 'cloud-config.yaml':
            assert template.keys() <= CLOUDCONFIG_KEYS, template.keys()
        elif isinstance(template, str):  # Not a yaml template
            pass
        else:  # yaml template file
            log.debug("validating template file %s", name)
            assert template.keys() <= PACKAGE_KEYS, template.keys()

    # Find all files which contain late bind variables and turn them into a "late bind package"
    # TODO(cmaloney): check there are no late bound variables in cloud-config.yaml
    late_files, regular_files = extract_files_containing_late_variables(
        rendered_templates['dcos-config.yaml']['package'])
    # put the regular files right back
    rendered_templates['dcos-config.yaml'] = {'package': regular_files}

    def make_package_filename(package_id, extension):
        return 'packages/{0}/{1}{2}'.format(package_id.name, repr(package_id),
                                            extension)

    # Render all the cluster packages
    cluster_package_info = {}

    # Prepare late binding config, if any.
    late_package = build_late_package(late_files, argument_dict['config_id'],
                                      argument_dict['provider'])
    if late_variables:
        # Render the late binding package. This package will be downloaded onto
        # each cluster node during bootstrap and rendered into the final config
        # using the values from the late config file.
        late_package_id = PackageId(late_package['name'])
        late_package_filename = make_package_filename(late_package_id,
                                                      '.dcos_config')
        os.makedirs(os.path.dirname(late_package_filename), mode=0o755)
        write_yaml(late_package_filename, {'package': late_package['package']},
                   default_flow_style=False)
        cluster_package_info[late_package_id.name] = {
            'id': late_package['name'],
            'filename': late_package_filename
        }

        # Add the late config file to cloud config. The expressions in
        # late_variables will be resolved by the service handling the cloud
        # config (e.g. Amazon CloudFormation). The rendered late config file
        # on a cluster node's filesystem will contain the final values.
        rendered_templates['cloud-config.yaml']['root'].append({
            'path':
            '/etc/mesosphere/setup-flags/late-config.yaml',
            'permissions':
            '0644',
            'owner':
            'root',
            # TODO(cmaloney): don't prettyprint to save bytes.
            # NOTE: Use yaml here simply to make avoiding painful escaping and
            # unescaping easier.
            'content':
            render_yaml({
                'late_bound_package_id': late_package['name'],
                'bound_values': late_variables
            })
        })

    # Render the rest of the packages.
    for package_id_str in json.loads(argument_dict['cluster_packages']):
        package_id = PackageId(package_id_str)
        package_filename = make_package_filename(package_id, '.tar.xz')

        # Build the package
        do_gen_package(rendered_templates[package_id.name + '.yaml'],
                       package_filename)

        cluster_package_info[package_id.name] = {
            'id': package_id_str,
            'filename': package_filename
        }

    # Convert cloud-config to just contain write_files rather than root
    cc = rendered_templates['cloud-config.yaml']

    # Shouldn't contain any packages. Providers should pull what they need to
    # late bind out of other packages via cc_package_file.
    assert 'package' not in cc
    cc_root = cc.pop('root', [])
    # Make sure write_files exists.
    assert 'write_files' not in cc
    cc['write_files'] = []
    # Do the transform
    for item in cc_root:
        assert item['path'].startswith('/')
        cc['write_files'].append(item)
    rendered_templates['cloud-config.yaml'] = cc

    # Add in the add_services util. Done here instead of the initial
    # map since we need to bind in parameters
    def add_services(cloudconfig, cloud_init_implementation):
        return add_units(cloudconfig, rendered_templates['dcos-services.yaml'],
                         cloud_init_implementation)

    utils.add_services = add_services

    return Bunch({
        'arguments': argument_dict,
        'cluster_packages': cluster_package_info,
        'templates': rendered_templates,
        'utils': utils
    })
Beispiel #2
0
def generate(
        arguments,
        extra_templates=list(),
        cc_package_files=list()):
    # To maintain the old API where we passed arguments rather than the new name.
    user_arguments = arguments
    arguments = None

    sources, targets, templates = get_dcosconfig_source_target_and_templates(user_arguments, extra_templates)

    # TODO(cmaloney): Make it so we only get out the dcosconfig target arguments not all the config target arguments.
    resolver = gen.internals.resolve_configuration(sources, targets, user_arguments)
    status = resolver.status_dict

    if status['status'] == 'errors':
        raise ValidationError(errors=status['errors'], unset=status['unset'])

    argument_dict = {k: v.value for k, v in resolver.arguments.items()}
    log.debug("Final arguments:" + json_prettyprint(argument_dict))

    # expanded_config is a special result which contains all other arguments. It has to come after
    # the calculation of all the other arguments so it can be filled with everything which was
    # calculated. Can't be calculated because that would have an infinite recursion problem (the set
    # of all arguments would want to include itself).
    # Explicitly / manaully setup so that it'll fit where we want it.
    # TODO(cmaloney): Make this late-bound by gen.internals
    argument_dict['expanded_config'] = textwrap.indent(json_prettyprint(argument_dict), prefix='  ' * 3)

    # Fill in the template parameters
    # TODO(cmaloney): render_templates should ideally take the template targets.
    rendered_templates = render_templates(templates, argument_dict)

    # Validate there aren't any unexpected top level directives in any of the files
    # (likely indicates a misspelling)
    for name, template in rendered_templates.items():
        if name == 'dcos-services.yaml':  # yaml list of the service files
            assert isinstance(template, list)
        elif name == 'cloud-config.yaml':
            assert template.keys() <= CLOUDCONFIG_KEYS, template.keys()
        elif isinstance(template, str):  # Not a yaml template
            pass
        else:  # yaml template file
            log.debug("validating template file %s", name)
            assert template.keys() <= PACKAGE_KEYS, template.keys()

    # Extract cc_package_files out of the dcos-config template and put them into
    # the cloud-config package.
    cc_package_files, dcos_config_files = extract_files_with_path(rendered_templates['dcos-config.yaml']['package'],
                                                                  cc_package_files)
    rendered_templates['dcos-config.yaml'] = {'package': dcos_config_files}

    # Add a empty pkginfo.json to the cc_package_files.
    # Also assert there isn't one already (can only write out a file once).
    for item in cc_package_files:
        assert item['path'] != '/pkginfo.json'

    # If there aren't any files for a cloud-config package don't make one start
    # existing adding a pkginfo.json
    if len(cc_package_files) > 0:
        cc_package_files.append({
            "path": "/pkginfo.json",
            "content": "{}"})

    for item in cc_package_files:
        assert item['path'].startswith('/')
        item['path'] = '/etc/mesosphere/setup-packages/dcos-provider-{}--setup'.format(
            argument_dict['provider']) + item['path']
        rendered_templates['cloud-config.yaml']['root'].append(item)

    cluster_package_info = {}

    # Render all the cluster packages
    for package_id_str in json.loads(argument_dict['cluster_packages']):
        package_id = PackageId(package_id_str)
        package_filename = 'packages/{}/{}.tar.xz'.format(
            package_id.name,
            package_id_str)

        # Build the package
        do_gen_package(rendered_templates[package_id.name + '.yaml'], package_filename)

        cluster_package_info[package_id.name] = {
            'id': package_id_str,
            'filename': package_filename
        }

    # Convert cloud-config to just contain write_files rather than root
    cc = rendered_templates['cloud-config.yaml']

    # Shouldn't contain any packages. Providers should pull what they need to
    # late bind out of other packages via cc_package_file.
    assert 'package' not in cc
    cc_root = cc.pop('root', [])
    # Make sure write_files exists.
    assert 'write_files' not in cc
    cc['write_files'] = []
    # Do the transform
    for item in cc_root:
        assert item['path'].startswith('/')
        cc['write_files'].append(item)
    rendered_templates['cloud-config.yaml'] = cc

    # Add in the add_services util. Done here instead of the initial
    # map since we need to bind in parameters
    def add_services(cloudconfig, cloud_init_implementation):
        return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation)

    utils.add_services = add_services

    return Bunch({
        'arguments': argument_dict,
        'cluster_packages': cluster_package_info,
        'templates': rendered_templates,
        'utils': utils
    })
Beispiel #3
0
def generate(
        arguments,
        extra_templates=list(),
        extra_sources=list(),
        extra_targets=list()):
    # To maintain the old API where we passed arguments rather than the new name.
    user_arguments = arguments
    arguments = None

    sources, targets, templates = get_dcosconfig_source_target_and_templates(
        user_arguments, extra_templates, extra_sources)

    resolver = validate_and_raise(sources, targets + extra_targets)
    argument_dict = get_final_arguments(resolver)
    late_variables = get_late_variables(resolver, sources)

    # expanded_config is a special result which contains all other arguments. It has to come after
    # the calculation of all the other arguments so it can be filled with everything which was
    # calculated. Can't be calculated because that would have an infinite recursion problem (the set
    # of all arguments would want to include itself).
    # Explicitly / manaully setup so that it'll fit where we want it.
    # TODO(cmaloney): Make this late-bound by gen.internals
    argument_dict['expanded_config'] = textwrap.indent(
        json_prettyprint(
            {k: v for k, v in argument_dict.items() if not v.startswith(gen.internals.LATE_BIND_PLACEHOLDER_START)}
        ),
        prefix='  ' * 3,
    )
    log.debug("Final arguments:" + json_prettyprint(argument_dict))

    # Fill in the template parameters
    # TODO(cmaloney): render_templates should ideally take the template targets.
    rendered_templates = render_templates(templates, argument_dict)

    # Validate there aren't any unexpected top level directives in any of the files
    # (likely indicates a misspelling)
    for name, template in rendered_templates.items():
        if name == 'dcos-services.yaml':  # yaml list of the service files
            assert isinstance(template, list)
        elif name == 'cloud-config.yaml':
            assert template.keys() <= CLOUDCONFIG_KEYS, template.keys()
        elif isinstance(template, str):  # Not a yaml template
            pass
        else:  # yaml template file
            log.debug("validating template file %s", name)
            assert template.keys() <= PACKAGE_KEYS, template.keys()

    # Find all files which contain late bind variables and turn them into a "late bind package"
    # TODO(cmaloney): check there are no late bound variables in cloud-config.yaml
    late_files, regular_files = extract_files_containing_late_variables(
        rendered_templates['dcos-config.yaml']['package'])
    # put the regular files right back
    rendered_templates['dcos-config.yaml'] = {'package': regular_files}

    def make_package_filename(package_id, extension):
        return 'packages/{0}/{1}{2}'.format(
            package_id.name,
            repr(package_id),
            extension)

    # Render all the cluster packages
    cluster_package_info = {}

    # Prepare late binding config, if any.
    late_package = build_late_package(late_files, argument_dict['config_id'], argument_dict['provider'])
    if late_variables:
        # Render the late binding package. This package will be downloaded onto
        # each cluster node during bootstrap and rendered into the final config
        # using the values from the late config file.
        late_package_id = PackageId(late_package['name'])
        late_package_filename = make_package_filename(late_package_id, '.dcos_config')
        os.makedirs(os.path.dirname(late_package_filename), mode=0o755)
        write_yaml(late_package_filename, {'package': late_package['package']}, default_flow_style=False)
        log.info('Package filename: {}'.format(late_package_filename))

        # Add the late config file to cloud config. The expressions in
        # late_variables will be resolved by the service handling the cloud
        # config (e.g. Amazon CloudFormation). The rendered late config file
        # on a cluster node's filesystem will contain the final values.
        rendered_templates['cloud-config.yaml']['root'].append({
            'path': '/etc/mesosphere/setup-flags/late-config.yaml',
            'permissions': '0644',
            'owner': 'root',
            # TODO(cmaloney): don't prettyprint to save bytes.
            # NOTE: Use yaml here simply to make avoiding painful escaping and
            # unescaping easier.
            'content': render_yaml({
                'late_bound_package_id': late_package['name'],
                'bound_values': late_variables
            })})

    # Collect metadata for cluster packages.
    for package_id_str in json.loads(argument_dict['cluster_packages']):
        package_id = PackageId(package_id_str)
        package_filename = make_package_filename(package_id, '.tar.xz')

        cluster_package_info[package_id.name] = {
            'id': package_id_str,
            'filename': package_filename
        }

    # Render config packages.
    config_package_ids = json.loads(argument_dict['config_package_ids'])
    for package_id_str in config_package_ids:
        package_id = PackageId(package_id_str)
        do_gen_package(rendered_templates[package_id.name + '.yaml'], cluster_package_info[package_id.name]['filename'])

    # Convert cloud-config to just contain write_files rather than root
    cc = rendered_templates['cloud-config.yaml']

    # Shouldn't contain any packages. Providers should pull what they need to
    # late bind out of other packages via cc_package_file.
    assert 'package' not in cc
    cc_root = cc.pop('root', [])
    # Make sure write_files exists.
    assert 'write_files' not in cc
    cc['write_files'] = []
    # Do the transform
    for item in cc_root:
        assert item['path'].startswith('/')
        cc['write_files'].append(item)
    rendered_templates['cloud-config.yaml'] = cc

    # Add in the add_services util. Done here instead of the initial
    # map since we need to bind in parameters
    def add_services(cloudconfig, cloud_init_implementation):
        return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation)

    utils.add_services = add_services

    return Bunch({
        'arguments': argument_dict,
        'cluster_packages': cluster_package_info,
        'config_package_ids': config_package_ids,
        'late_package_id': late_package['name'] if late_package else None,
        'templates': rendered_templates,
        'utils': utils
    })
Beispiel #4
0
def generate(arguments,
             extra_templates=list(),
             extra_sources=list(),
             extra_targets=list()):
    # To maintain the old API where we passed arguments rather than the new name.
    user_arguments = arguments
    arguments = None

    sources, targets, templates = get_dcosconfig_source_target_and_templates(
        user_arguments, extra_templates, extra_sources)

    resolver = validate_and_raise(sources, targets + extra_targets)
    argument_dict = get_final_arguments(resolver)
    late_variables = get_late_variables(resolver, sources)
    secret_builtins = [
        'expanded_config_full', 'user_arguments_full', 'config_yaml_full'
    ]
    secret_variables = set(get_secret_variables(sources) + secret_builtins)
    masked_value = '**HIDDEN**'

    # Calculate values for builtin variables.
    user_arguments_masked = {
        k: (masked_value if k in secret_variables else v)
        for k, v in user_arguments.items()
    }
    argument_dict['user_arguments_full'] = json_prettyprint(user_arguments)
    argument_dict['user_arguments'] = json_prettyprint(user_arguments_masked)
    argument_dict['config_yaml_full'] = user_arguments_to_yaml(user_arguments)
    argument_dict['config_yaml'] = user_arguments_to_yaml(
        user_arguments_masked)

    # The expanded_config and expanded_config_full variables contain all other variables and their values.
    # expanded_config is a copy of expanded_config_full with secret values removed. Calculating these variables' values
    # must come after the calculation of all other variables to prevent infinite recursion.
    # TODO(cmaloney): Make this late-bound by gen.internals
    expanded_config_full = {
        k: v
        for k, v in argument_dict.items()
        # Omit late-bound variables whose values have not yet been calculated.
        if not v.startswith(gen.internals.LATE_BIND_PLACEHOLDER_START)
    }
    expanded_config_scrubbed = {
        k: v
        for k, v in expanded_config_full.items() if k not in secret_variables
    }
    argument_dict['expanded_config_full'] = format_expanded_config(
        expanded_config_full)
    argument_dict['expanded_config'] = format_expanded_config(
        expanded_config_scrubbed)

    log.debug("Final arguments:" + json_prettyprint({
        # Mask secret config values.
        k: (masked_value if k in secret_variables else v)
        for k, v in argument_dict.items()
    }))

    # Fill in the template parameters
    # TODO(cmaloney): render_templates should ideally take the template targets.
    rendered_templates = render_templates(templates, argument_dict)

    # Validate there aren't any unexpected top level directives in any of the files
    # (likely indicates a misspelling)
    for name, template in rendered_templates.items():
        if name == 'dcos-services.yaml':  # yaml list of the service files
            assert isinstance(template, list)
        elif name == 'cloud-config.yaml':
            assert template.keys() <= CLOUDCONFIG_KEYS, template.keys()
        elif isinstance(template, str):  # Not a yaml template
            pass
        else:  # yaml template file
            log.debug("validating template file %s", name)
            assert template.keys() <= PACKAGE_KEYS, template.keys()

    stable_artifacts = []

    # Find all files which contain late bind variables and turn them into a "late bind package"
    # TODO(cmaloney): check there are no late bound variables in cloud-config.yaml
    late_files, regular_files = extract_files_containing_late_variables(
        rendered_templates['dcos-config.yaml']['package'])
    # put the regular files right back
    rendered_templates['dcos-config.yaml'] = {'package': regular_files}

    # Render cluster package list artifact.
    cluster_package_list_filename = 'package_lists/{}.package_list.json'.format(
        argument_dict['cluster_package_list_id'])
    os.makedirs(os.path.dirname(cluster_package_list_filename),
                mode=0o755,
                exist_ok=True)
    write_string(cluster_package_list_filename,
                 argument_dict['cluster_packages'])
    log.info('Cluster package list: {}'.format(cluster_package_list_filename))
    stable_artifacts.append(cluster_package_list_filename)

    def make_package_filename(package_id, extension):
        return 'packages/{0}/{1}{2}'.format(package_id.name, repr(package_id),
                                            extension)

    # Render all the cluster packages
    cluster_package_info = {}

    # Prepare late binding config, if any.
    late_package = build_late_package(late_files, argument_dict['config_id'],
                                      argument_dict['provider'])
    if late_variables:
        # Render the late binding package. This package will be downloaded onto
        # each cluster node during bootstrap and rendered into the final config
        # using the values from the late config file.
        late_package_id = PackageId(late_package['name'])
        late_package_filename = make_package_filename(late_package_id,
                                                      '.dcos_config')
        os.makedirs(os.path.dirname(late_package_filename), mode=0o755)
        write_yaml(late_package_filename, {'package': late_package['package']},
                   default_flow_style=False)
        log.info('Package filename: {}'.format(late_package_filename))
        stable_artifacts.append(late_package_filename)

        # Add the late config file to cloud config. The expressions in
        # late_variables will be resolved by the service handling the cloud
        # config (e.g. Amazon CloudFormation). The rendered late config file
        # on a cluster node's filesystem will contain the final values.
        rendered_templates['cloud-config.yaml']['root'].append({
            'path':
            '/etc/mesosphere/setup-flags/late-config.yaml',
            'permissions':
            '0644',
            'owner':
            'root',
            # TODO(cmaloney): don't prettyprint to save bytes.
            # NOTE: Use yaml here simply to make avoiding painful escaping and
            # unescaping easier.
            'content':
            render_yaml({
                'late_bound_package_id': late_package['name'],
                'bound_values': late_variables
            })
        })

    # Collect metadata for cluster packages.
    for package_id_str in json.loads(argument_dict['cluster_packages']):
        package_id = PackageId(package_id_str)
        package_filename = make_package_filename(package_id, '.tar.xz')

        cluster_package_info[package_id.name] = {
            'id': package_id_str,
            'filename': package_filename
        }

    # Render config packages.
    config_package_ids = json.loads(argument_dict['config_package_ids'])
    for package_id_str in config_package_ids:
        package_id = PackageId(package_id_str)
        package_filename = cluster_package_info[package_id.name]['filename']
        do_gen_package(rendered_templates[package_id.name + '.yaml'],
                       cluster_package_info[package_id.name]['filename'])
        stable_artifacts.append(package_filename)

    # Convert cloud-config to just contain write_files rather than root
    cc = rendered_templates['cloud-config.yaml']

    # Shouldn't contain any packages. Providers should pull what they need to
    # late bind out of other packages via cc_package_file.
    assert 'package' not in cc
    cc_root = cc.pop('root', [])
    # Make sure write_files exists.
    assert 'write_files' not in cc
    cc['write_files'] = []
    # Do the transform
    for item in cc_root:
        assert item['path'].startswith('/')
        cc['write_files'].append(item)
    rendered_templates['cloud-config.yaml'] = cc

    # Add in the add_services util. Done here instead of the initial
    # map since we need to bind in parameters
    def add_services(cloudconfig, cloud_init_implementation):
        return add_units(cloudconfig, rendered_templates['dcos-services.yaml'],
                         cloud_init_implementation)

    utils.add_services = add_services

    return Bunch({
        'arguments': argument_dict,
        'cluster_packages': cluster_package_info,
        'stable_artifacts': stable_artifacts,
        'templates': rendered_templates,
        'utils': utils
    })
Beispiel #5
0
def generate(
        arguments,
        extra_templates=list(),
        cc_package_files=list()):
    # To maintain the old API where we passed arguments rather than the new name.
    user_arguments = arguments
    arguments = None

    config_target, templates = get_dcosconfig_target_and_templates(user_arguments, extra_templates)

    # TODO(cmaloney): Make it so we only get out the dcosconfig target arguments not all the config target arguments.
    arguments = calculate_config_for_targets([config_target], user_arguments)
    log.debug("Final arguments:" + json.dumps(arguments, **json_prettyprint_args))

    # expanded_config is a special result which contains all other arguments. It has to come after
    # the calculation of all the other arguments so it can be filled with everything which was
    # calculated. Can't be calculated because that would have an infinite recursion problem (the set
    # of all arguments would want to include itself).
    # Explicitly / manaully setup so that it'll fit where we want it.
    arguments['expanded_config'] = textwrap.indent(json.dumps(arguments, **json_prettyprint_args),
                                                   prefix='  ' * 3)

    # Fill in the template parameters
    rendered_templates = render_templates(templates, arguments)

    # Validate there aren't any unexpected top level directives in any of the files
    # (likely indicates a misspelling)
    for name, template in rendered_templates.items():
        if name == 'dcos-services.yaml':  # yaml list of the service files
            assert isinstance(template, list)
        elif name == 'cloud-config.yaml':
            assert template.keys() <= CLOUDCONFIG_KEYS, template.keys()
        elif isinstance(template, str):  # Not a yaml template
            pass
        else:  # yaml template file
            log.debug("validating template file %s", name)
            assert template.keys() <= PACKAGE_KEYS, template.keys()

    # Extract cc_package_files out of the dcos-config template and put them into
    # the cloud-config package.
    cc_package_files, dcos_config_files = extract_files_with_path(rendered_templates['dcos-config.yaml']['package'],
                                                                  cc_package_files)
    rendered_templates['dcos-config.yaml'] = {'package': dcos_config_files}

    # Add a empty pkginfo.json to the cc_package_files.
    # Also assert there isn't one already (can only write out a file once).
    for item in cc_package_files:
        assert item['path'] != '/pkginfo.json'

    # If there aren't any files for a cloud-config package don't make one start
    # existing adding a pkginfo.json
    if len(cc_package_files) > 0:
        cc_package_files.append({
            "path": "/pkginfo.json",
            "content": "{}"})

    for item in cc_package_files:
        assert item['path'].startswith('/')
        item['path'] = '/etc/mesosphere/setup-packages/dcos-provider-{}--setup'.format(
            arguments['provider']) + item['path']
        rendered_templates['cloud-config.yaml']['root'].append(item)

    cluster_package_info = {}

    # Render all the cluster packages
    for package_id_str in json.loads(arguments['cluster_packages']):
        package_id = PackageId(package_id_str)
        package_filename = 'packages/{}/{}.tar.xz'.format(
            package_id.name,
            package_id_str)

        # Build the package
        do_gen_package(rendered_templates[package_id.name + '.yaml'], package_filename)

        cluster_package_info[package_id.name] = {
            'id': package_id_str,
            'filename': package_filename
        }

    # Convert cloud-config to just contain write_files rather than root
    cc = rendered_templates['cloud-config.yaml']

    # Shouldn't contain any packages. Providers should pull what they need to
    # late bind out of other packages via cc_package_file.
    assert 'package' not in cc
    cc_root = cc.pop('root', [])
    # Make sure write_files exists.
    assert 'write_files' not in cc
    cc['write_files'] = []
    # Do the transform
    for item in cc_root:
        assert item['path'].startswith('/')
        cc['write_files'].append(item)
    rendered_templates['cloud-config.yaml'] = cc

    # Add in the add_services util. Done here instead of the initial
    # map since we need to bind in parameters
    def add_services(cloudconfig, cloud_init_implementation):
        return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation)

    utils.add_services = add_services

    return Bunch({
        'arguments': arguments,
        'cluster_packages': cluster_package_info,
        'templates': rendered_templates,
        'utils': utils
    })
Beispiel #6
0
def generate(
        arguments,
        extra_templates=list(),
        extra_sources=list(),
        extra_targets=list()):
    # To maintain the old API where we passed arguments rather than the new name.
    user_arguments = arguments
    arguments = None

    sources, targets, templates = get_dcosconfig_source_target_and_templates(
        user_arguments, extra_templates, extra_sources)

    resolver = validate_and_raise(sources, targets + extra_targets)
    argument_dict = get_final_arguments(resolver)
    late_variables = get_late_variables(resolver, sources)
    secret_builtins = ['expanded_config_full', 'user_arguments_full', 'config_yaml_full']
    secret_variables = set(get_secret_variables(sources) + secret_builtins)
    masked_value = '**HIDDEN**'

    # Calculate values for builtin variables.
    user_arguments_masked = {k: (masked_value if k in secret_variables else v) for k, v in user_arguments.items()}
    argument_dict['user_arguments_full'] = json_prettyprint(user_arguments)
    argument_dict['user_arguments'] = json_prettyprint(user_arguments_masked)
    argument_dict['config_yaml_full'] = user_arguments_to_yaml(user_arguments)
    argument_dict['config_yaml'] = user_arguments_to_yaml(user_arguments_masked)

    # The expanded_config and expanded_config_full variables contain all other variables and their values.
    # expanded_config is a copy of expanded_config_full with secret values removed. Calculating these variables' values
    # must come after the calculation of all other variables to prevent infinite recursion.
    # TODO(cmaloney): Make this late-bound by gen.internals
    expanded_config_full = {
        k: v for k, v in argument_dict.items()
        # Omit late-bound variables whose values have not yet been calculated.
        if not v.startswith(gen.internals.LATE_BIND_PLACEHOLDER_START)
    }
    expanded_config_scrubbed = {k: v for k, v in expanded_config_full.items() if k not in secret_variables}
    argument_dict['expanded_config_full'] = format_expanded_config(expanded_config_full)
    argument_dict['expanded_config'] = format_expanded_config(expanded_config_scrubbed)

    log.debug(
        "Final arguments:" + json_prettyprint({
            # Mask secret config values.
            k: (masked_value if k in secret_variables else v) for k, v in argument_dict.items()
        })
    )

    # Fill in the template parameters
    # TODO(cmaloney): render_templates should ideally take the template targets.
    rendered_templates = render_templates(templates, argument_dict)

    # Validate there aren't any unexpected top level directives in any of the files
    # (likely indicates a misspelling)
    for name, template in rendered_templates.items():
        if name == 'dcos-services.yaml':  # yaml list of the service files
            assert isinstance(template, list)
        elif name == 'cloud-config.yaml':
            assert template.keys() <= CLOUDCONFIG_KEYS, template.keys()
        elif isinstance(template, str):  # Not a yaml template
            pass
        else:  # yaml template file
            log.debug("validating template file %s", name)
            assert template.keys() <= PACKAGE_KEYS, template.keys()

    stable_artifacts = []
    channel_artifacts = []

    # Find all files which contain late bind variables and turn them into a "late bind package"
    # TODO(cmaloney): check there are no late bound variables in cloud-config.yaml
    late_files, regular_files = extract_files_containing_late_variables(
        rendered_templates['dcos-config.yaml']['package'])
    # put the regular files right back
    rendered_templates['dcos-config.yaml'] = {'package': regular_files}

    # Render cluster package list artifact.
    cluster_package_list_filename = 'package_lists/{}.package_list.json'.format(
        argument_dict['cluster_package_list_id']
    )
    os.makedirs(os.path.dirname(cluster_package_list_filename), mode=0o755, exist_ok=True)
    write_string(cluster_package_list_filename, argument_dict['cluster_packages'])
    log.info('Cluster package list: {}'.format(cluster_package_list_filename))
    stable_artifacts.append(cluster_package_list_filename)

    def make_package_filename(package_id, extension):
        return 'packages/{0}/{1}{2}'.format(
            package_id.name,
            repr(package_id),
            extension)

    # Render all the cluster packages
    cluster_package_info = {}

    # Prepare late binding config, if any.
    late_package = build_late_package(late_files, argument_dict['config_id'], argument_dict['provider'])
    if late_variables:
        # Render the late binding package. This package will be downloaded onto
        # each cluster node during bootstrap and rendered into the final config
        # using the values from the late config file.
        late_package_id = PackageId(late_package['name'])
        late_package_filename = make_package_filename(late_package_id, '.dcos_config')
        os.makedirs(os.path.dirname(late_package_filename), mode=0o755)
        write_yaml(late_package_filename, {'package': late_package['package']}, default_flow_style=False)
        log.info('Package filename: {}'.format(late_package_filename))
        stable_artifacts.append(late_package_filename)

        # Add the late config file to cloud config. The expressions in
        # late_variables will be resolved by the service handling the cloud
        # config (e.g. Amazon CloudFormation). The rendered late config file
        # on a cluster node's filesystem will contain the final values.
        rendered_templates['cloud-config.yaml']['root'].append({
            'path': '/etc/mesosphere/setup-flags/late-config.yaml',
            'permissions': '0644',
            'owner': 'root',
            # TODO(cmaloney): don't prettyprint to save bytes.
            # NOTE: Use yaml here simply to make avoiding painful escaping and
            # unescaping easier.
            'content': render_yaml({
                'late_bound_package_id': late_package['name'],
                'bound_values': late_variables
            })})

    # Collect metadata for cluster packages.
    for package_id_str in json.loads(argument_dict['cluster_packages']):
        package_id = PackageId(package_id_str)
        package_filename = make_package_filename(package_id, '.tar.xz')

        cluster_package_info[package_id.name] = {
            'id': package_id_str,
            'filename': package_filename
        }

    # Render config packages.
    config_package_ids = json.loads(argument_dict['config_package_ids'])
    for package_id_str in config_package_ids:
        package_id = PackageId(package_id_str)
        package_filename = cluster_package_info[package_id.name]['filename']
        do_gen_package(rendered_templates[package_id.name + '.yaml'], cluster_package_info[package_id.name]['filename'])
        stable_artifacts.append(package_filename)

    # Convert cloud-config to just contain write_files rather than root
    cc = rendered_templates['cloud-config.yaml']

    # Shouldn't contain any packages. Providers should pull what they need to
    # late bind out of other packages via cc_package_file.
    assert 'package' not in cc
    cc_root = cc.pop('root', [])
    # Make sure write_files exists.
    assert 'write_files' not in cc
    cc['write_files'] = []
    # Do the transform
    for item in cc_root:
        assert item['path'].startswith('/')
        cc['write_files'].append(item)
    rendered_templates['cloud-config.yaml'] = cc

    # Add utils that need to be defined here so they can be bound to locals.
    def add_services(cloudconfig, cloud_init_implementation):
        return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation)

    utils.add_services = add_services

    def add_stable_artifact(filename):
        assert filename not in stable_artifacts + channel_artifacts
        stable_artifacts.append(filename)

    utils.add_stable_artifact = add_stable_artifact

    def add_channel_artifact(filename):
        assert filename not in stable_artifacts + channel_artifacts
        channel_artifacts.append(filename)

    utils.add_channel_artifact = add_channel_artifact

    return Bunch({
        'arguments': argument_dict,
        'cluster_packages': cluster_package_info,
        'stable_artifacts': stable_artifacts,
        'channel_artifacts': channel_artifacts,
        'templates': rendered_templates,
        'utils': utils
    })
Beispiel #7
0
def generate(
        arguments,
        extra_templates=list(),
        cc_package_files=list(),
        validate_only=False):
    log.info("Generating configuration files...")

    assert isinstance(extra_templates, list)

    # To maintain the old API where we passed arguments rather than the new name.
    user_arguments = arguments
    arguments = None

    setters = dict()
    validate = list()

    # Make sure all user provided arguments are strings.
    validate_arguments_strings(user_arguments)

    # TODO(cmaloney): Make these all just defined by the base calc.py
    package_names = ['dcos-config', 'dcos-metadata']
    template_filenames = ['dcos-config.yaml', 'cloud-config.yaml', 'dcos-metadata.yaml', 'dcos-services.yaml']

    # TODO(cmaloney): Check there are no duplicates between templates and extra_template_files
    template_filenames += extra_templates

    def add_setter(name, value, is_optional, conditions, is_user, replace_existing):
        if replace_existing:
            if name in setters:
                del setters[name]
        setters.setdefault(name, list()).append(Setter(name, value, is_optional, conditions, is_user))

    def add_conditional_scope(scope, conditions, replace_existing):
        nonlocal validate

        # TODO(cmaloney): 'defaults' are the same as 'can' and 'must' is identical to 'arguments' except
        # that one takes functions and one takes strings. Simplify to just 'can', 'must'.
        assert scope.keys() <= {'validate', 'default', 'must', 'conditional'}

        validate += scope.get('validate', list())

        for name, fn in scope.get('must', dict()).items():
            add_setter(name, fn, False, conditions, False, replace_existing)

        for name, fn in scope.get('default', dict()).items():
            add_setter(name, fn, True, conditions, False, replace_existing)

        for name, cond_options in scope.get('conditional', dict()).items():
            for value, sub_scope in cond_options.items():
                add_conditional_scope(sub_scope, conditions + [(name, value)], replace_existing=replace_existing)

    add_conditional_scope(gen.calc.entry, [], replace_existing=False)

    # Allow overriding calculators with a `gen_extra/calc.py` if it exists
    if os.path.exists('gen_extra/calc.py'):
        mod = importlib.machinery.SourceFileLoader('gen_extra.calc', 'gen_extra/calc.py').load_module()
        add_conditional_scope(mod.entry, [], replace_existing=True)

    # Add in all user arguments as setters.
    # Happens last so that they are never overwritten with replace_existing=True
    for name, value in user_arguments.items():
        add_setter(name, value, False, [], True, False)

    # Re-arrange templates to be indexed by common name. Only allow multiple for one key if the key
    # is yaml (ends in .yaml).
    templates = dict()
    for filename in template_filenames:
        key = os.path.basename(filename)
        templates.setdefault(key, list())
        templates[key].append(filename)

        if len(templates[key]) > 1 and not key.endswith('.yaml'):
            raise Exception(
                "Internal Error: Only know how to merge YAML templates at this point in time. "
                "Can't merge template {} in template_list {}".format(name, templates[key]))

    mandatory_parameters = get_parameters(templates)

    validate_all_arguments_match_parameters(mandatory_parameters, setters, user_arguments)

    def add_builtin(name, value):
        add_setter(name, json.dumps(value, **json_prettyprint_args), False, [], False, False)

    # TODO(cmaloney): Hash the contents of all teh templates rather than using the list of filenames
    # since the filenames might not live in this git repo, or may be locally modified.
    add_builtin('template_filenames', template_filenames)
    add_builtin('package_names', list(package_names))
    add_builtin('user_arguments', user_arguments)

    # Add a builtin for expanded_config, so that we won't get unset argument errors. The temporary
    # value will get replaced with the set of all arguments once calculation is complete
    temporary_str = 'DO NOT USE THIS AS AN ARGUMENT TO OTHER ARGUMENTS. IT IS TEMPORARY'
    add_builtin('expanded_config', temporary_str)

    # Calculate the remaining arguments.
    arguments = DFSArgumentCalculator(setters, validate).calculate(mandatory_parameters)

    # Validate all new / calculated arguments are strings.
    validate_arguments_strings(arguments)

    log.info("Final arguments:" + json.dumps(arguments, **json_prettyprint_args))

    # expanded_config is a special result which contains all other arguments. It has to come after
    # the calculation of all the other arguments so it can be filled with everything which was
    # calculated. Can't be calculated because that would have an infinite recursion problem (the set
    # of all arguments would want to include itself).
    # Explicitly / manaully setup so that it'll fit where we want it.
    arguments['expanded_config'] = textwrap.indent(json.dumps(arguments, **json_prettyprint_args),
                                                   prefix='  ' * 3)

    if validate_only:
        return

    # Fill in the template parameters
    rendered_templates = render_templates(templates, arguments)

    # Validate there aren't any unexpected top level directives in any of the files
    # (likely indicates a misspelling)
    for name, template in rendered_templates.items():
        if name == 'dcos-services.yaml':  # yaml list of the service files
            assert isinstance(template, list)
        elif name == 'cloud-config.yaml':
            assert template.keys() <= CLOUDCONFIG_KEYS, template.keys()
        elif isinstance(template, str):  # Not a yaml template
            pass
        else:  # yaml template file
            log.debug("validating template file %s", name)
            assert template.keys() <= PACKAGE_KEYS, template.keys()

    # Extract cc_package_files out of the dcos-config template and put them into
    # the cloud-config package.
    cc_package_files, dcos_config_files = extract_files_with_path(rendered_templates['dcos-config.yaml']['package'],
                                                                  cc_package_files)
    rendered_templates['dcos-config.yaml'] = {'package': dcos_config_files}

    # Add a empty pkginfo.json to the cc_package_files.
    # Also assert there isn't one already (can only write out a file once).
    for item in cc_package_files:
        assert item['path'] != '/pkginfo.json'

    # If there aren't any files for a cloud-config package don't make one start
    # existing adding a pkginfo.json
    if len(cc_package_files) > 0:
        cc_package_files.append({
            "path": "/pkginfo.json",
            "content": "{}"})

    for item in cc_package_files:
        assert item['path'].startswith('/')
        item['path'] = '/etc/mesosphere/setup-packages/dcos-provider-{}--setup'.format(
            arguments['provider']) + item['path']
        rendered_templates['cloud-config.yaml']['root'].append(item)

    cluster_package_info = {}

    # Render all the cluster packages
    for package_id_str in json.loads(arguments['cluster_packages']):
        package_id = PackageId(package_id_str)
        package_filename = 'packages/{}/{}.tar.xz'.format(
            package_id.name,
            package_id_str)

        # Build the package
        do_gen_package(rendered_templates[package_id.name + '.yaml'], package_filename)

        cluster_package_info[package_id.name] = {
            'id': package_id_str,
            'filename': package_filename
        }

    # Convert cloud-config to just contain write_files rather than root
    cc = rendered_templates['cloud-config.yaml']

    # Shouldn't contain any packages. Providers should pull what they need to
    # late bind out of other packages via cc_package_file.
    assert 'package' not in cc
    cc_root = cc.pop('root', [])
    # Make sure write_files exists.
    assert 'write_files' not in cc
    cc['write_files'] = []
    # Do the transform
    for item in cc_root:
        assert item['path'].startswith('/')
        cc['write_files'].append(item)
    rendered_templates['cloud-config.yaml'] = cc

    # Add in the add_services util. Done here instead of the initial
    # map since we need to bind in parameters
    def add_services(cloudconfig, cloud_init_implementation):
        return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation)

    utils.add_services = add_services

    return Bunch({
        'arguments': arguments,
        'cluster_packages': cluster_package_info,
        'templates': rendered_templates,
        'utils': utils
    })
Beispiel #8
0
def generate(arguments,
             extra_templates=list(),
             cc_package_files=list(),
             validate_only=False):
    log.info("Generating configuration files...")

    assert isinstance(extra_templates, list)

    # To maintain the old API where we passed arguments rather than the new name.
    user_arguments = arguments
    arguments = None

    setters = dict()
    validate = list()

    # Make sure all user provided arguments are strings.
    validate_arguments_strings(user_arguments)

    # TODO(cmaloney): Make these all just defined by the base calc.py
    package_names = ['dcos-config', 'dcos-metadata']
    template_filenames = [
        'dcos-config.yaml', 'cloud-config.yaml', 'dcos-metadata.yaml',
        'dcos-services.yaml'
    ]

    # TODO(cmaloney): Check there are no duplicates between templates and extra_template_files
    template_filenames += extra_templates

    def add_setter(name, value, is_optional, conditions, is_user,
                   replace_existing):
        if replace_existing:
            if name in setters:
                del setters[name]
        setters.setdefault(name, list()).append(
            Setter(name, value, is_optional, conditions, is_user))

    def add_conditional_scope(scope, conditions, replace_existing):
        nonlocal validate

        # TODO(cmaloney): 'defaults' are the same as 'can' and 'must' is identical to 'arguments' except
        # that one takes functions and one takes strings. Simplify to just 'can', 'must'.
        assert scope.keys() <= {'validate', 'default', 'must', 'conditional'}

        validate += scope.get('validate', list())

        for name, fn in scope.get('must', dict()).items():
            add_setter(name, fn, False, conditions, False, replace_existing)

        for name, fn in scope.get('default', dict()).items():
            add_setter(name, fn, True, conditions, False, replace_existing)

        for name, cond_options in scope.get('conditional', dict()).items():
            for value, sub_scope in cond_options.items():
                add_conditional_scope(sub_scope,
                                      conditions + [(name, value)],
                                      replace_existing=replace_existing)

    add_conditional_scope(gen.calc.entry, [], replace_existing=False)

    # Allow overriding calculators with a `gen_extra/calc.py` if it exists
    if os.path.exists('gen_extra/calc.py'):
        mod = importlib.machinery.SourceFileLoader(
            'gen_extra.calc', 'gen_extra/calc.py').load_module()
        add_conditional_scope(mod.entry, [], replace_existing=True)

    # Add in all user arguments as setters.
    # Happens last so that they are never overwritten with replace_existing=True
    for name, value in user_arguments.items():
        add_setter(name, value, False, [], True, False)

    # Re-arrange templates to be indexed by common name. Only allow multiple for one key if the key
    # is yaml (ends in .yaml).
    templates = dict()
    for filename in template_filenames:
        key = os.path.basename(filename)
        templates.setdefault(key, list())
        templates[key].append(filename)

        if len(templates[key]) > 1 and not key.endswith('.yaml'):
            raise Exception(
                "Internal Error: Only know how to merge YAML templates at this point in time. "
                "Can't merge template {} in template_list {}".format(
                    name, templates[key]))

    mandatory_parameters = get_parameters(templates)

    validate_all_arguments_match_parameters(mandatory_parameters, setters,
                                            user_arguments)

    def add_builtin(name, value):
        add_setter(name, json.dumps(value, **json_prettyprint_args), False, [],
                   False, False)

    # TODO(cmaloney): Hash the contents of all teh templates rather than using the list of filenames
    # since the filenames might not live in this git repo, or may be locally modified.
    add_builtin('template_filenames', template_filenames)
    add_builtin('package_names', list(package_names))
    add_builtin('user_arguments', user_arguments)

    # Add a builtin for expanded_config, so that we won't get unset argument errors. The temporary
    # value will get replaced with the set of all arguments once calculation is complete
    temporary_str = 'DO NOT USE THIS AS AN ARGUMENT TO OTHER ARGUMENTS. IT IS TEMPORARY'
    add_builtin('expanded_config', temporary_str)

    # Calculate the remaining arguments.
    arguments = DFSArgumentCalculator(setters,
                                      validate).calculate(mandatory_parameters)

    # Validate all new / calculated arguments are strings.
    validate_arguments_strings(arguments)

    log.info("Final arguments:" +
             json.dumps(arguments, **json_prettyprint_args))

    # expanded_config is a special result which contains all other arguments. It has to come after
    # the calculation of all the other arguments so it can be filled with everything which was
    # calculated. Can't be calculated because that would have an infinite recursion problem (the set
    # of all arguments would want to include itself).
    # Explicitly / manaully setup so that it'll fit where we want it.
    arguments['expanded_config'] = textwrap.indent(json.dumps(
        arguments, **json_prettyprint_args),
                                                   prefix='  ' * 3)

    if validate_only:
        return

    # Fill in the template parameters
    rendered_templates = render_templates(templates, arguments)

    # Validate there aren't any unexpected top level directives in any of the files
    # (likely indicates a misspelling)
    for name, template in rendered_templates.items():
        if name == 'dcos-services.yaml':  # yaml list of the service files
            assert isinstance(template, list)
        elif name == 'cloud-config.yaml':
            assert template.keys() <= CLOUDCONFIG_KEYS, template.keys()
        elif isinstance(template, str):  # Not a yaml template
            pass
        else:  # yaml template file
            log.debug("validating template file %s", name)
            assert template.keys() <= PACKAGE_KEYS, template.keys()

    # Extract cc_package_files out of the dcos-config template and put them into
    # the cloud-config package.
    cc_package_files, dcos_config_files = extract_files_with_path(
        rendered_templates['dcos-config.yaml']['package'], cc_package_files)
    rendered_templates['dcos-config.yaml'] = {'package': dcos_config_files}

    # Add a empty pkginfo.json to the cc_package_files.
    # Also assert there isn't one already (can only write out a file once).
    for item in cc_package_files:
        assert item['path'] != '/pkginfo.json'

    # If there aren't any files for a cloud-config package don't make one start
    # existing adding a pkginfo.json
    if len(cc_package_files) > 0:
        cc_package_files.append({"path": "/pkginfo.json", "content": "{}"})

    for item in cc_package_files:
        assert item['path'].startswith('/')
        item[
            'path'] = '/etc/mesosphere/setup-packages/dcos-provider-{}--setup'.format(
                arguments['provider']) + item['path']
        rendered_templates['cloud-config.yaml']['root'].append(item)

    cluster_package_info = {}

    # Render all the cluster packages
    for package_id_str in json.loads(arguments['cluster_packages']):
        package_id = PackageId(package_id_str)
        package_filename = 'packages/{}/{}.tar.xz'.format(
            package_id.name, package_id_str)

        # Build the package
        do_gen_package(rendered_templates[package_id.name + '.yaml'],
                       package_filename)

        cluster_package_info[package_id.name] = {
            'id': package_id_str,
            'filename': package_filename
        }

    # Convert cloud-config to just contain write_files rather than root
    cc = rendered_templates['cloud-config.yaml']

    # Shouldn't contain any packages. Providers should pull what they need to
    # late bind out of other packages via cc_package_file.
    assert 'package' not in cc
    cc_root = cc.pop('root', [])
    # Make sure write_files exists.
    assert 'write_files' not in cc
    cc['write_files'] = []
    # Do the transform
    for item in cc_root:
        assert item['path'].startswith('/')
        cc['write_files'].append(item)
    rendered_templates['cloud-config.yaml'] = cc

    # Add in the add_services util. Done here instead of the initial
    # map since we need to bind in parameters
    def add_services(cloudconfig, cloud_init_implementation):
        return add_units(cloudconfig, rendered_templates['dcos-services.yaml'],
                         cloud_init_implementation)

    utils.add_services = add_services

    return Bunch({
        'arguments': arguments,
        'cluster_packages': cluster_package_info,
        'templates': rendered_templates,
        'utils': utils
    })