def resolve_configuration(sources: list, targets: list, user_arguments: dict): # Make sure all user provided arguments are strings. # TODO(cmaloney): Loosen this restriction / allow arbitrary types as long # as they all have a gen specific string form. validate_arguments_strings(user_arguments) # Merge the sources into a big dictionary of setters + validators, ensuring # that all setters are either strings or functions. validate = list() setters = dict() # Merge all the config targets into one big group of setters for providing # to the calculator # TODO(cmaloney): The setter management / set code is very similar to that in ConfigTarget, they # could probably be joined. for source in sources: for name, setter_list in source.setters.items(): setters.setdefault(name, list()) setters[name] += setter_list validate += source.validate # Validate that targets is a list of Targets for target in targets: assert isinstance(target, Target), \ "target should be a Target found a {} with value: {}".format(type(target), target) # TODO(cmaloney): Re-enable this after sorting out how to have "optional" config targets which # add in extra "acceptable" parameters (SSH Config, AWS Advanced Template config, etc) # validate_all_arguments_match_parameters(mandatory_parameters, setters, user_arguments) # Add in all user arguments as setters. # Happens last so that they are never overwritten with replace_existing=True user_config = Source(is_user=True) user_config.add_value_dict(user_arguments) # Merge all the seters and validate function into one uber list setters = copy.deepcopy(user_config.setters) validate = copy.deepcopy(user_config.validate) for source in sources: for name, setter_list in source.setters.items(): # TODO(cmaloney): Make a setter manager already... setters.setdefault(name, list()) setters[name] += setter_list validate += source.validate # Use setters to caluclate every required parameter arguments = DFSArgumentCalculator(setters, validate).calculate(targets) # Validate all new / calculated arguments are strings. validate_arguments_strings(arguments) log.info("Final arguments:" + json_prettyprint(arguments)) # TODO(cmaloney) Give each config target the values for all it's parameters that were hit as # well as any parameters that led to those parameters. return arguments
def do_main(args): if args['create']: info_path = args['--info-path'] if os.path.exists(info_path): raise LauncherError('InputConflict', 'Target info path already exists!') config = load_yaml(args['--config-path']) check_keys(config, [ 'type', 'provider_info', 'this_is_a_temporary_config_format_do_not_put_in_production' ]) write_json( info_path, get_launcher(config['type'], config['provider_info']).create(config)) return 0 info = load_json(args['--info-path']) check_keys(info, ['type', 'provider']) launcher = get_launcher(info['type'], info['provider']) if args['wait']: launcher.wait(info) print('Cluster is ready!') return 0 if args['describe']: print(json_prettyprint(launcher.describe(info))) return 0 if args['pytest']: test_cmd = 'py.test' if args['--env'] is not None: if '=' in args['--env']: # User is attempting to do an assigment with the option raise LauncherError( 'OptionError', "The '--env' option can only pass through environment variables " "from the current environment. Set variables according to the shell being used." ) var_list = args['--env'].split(',') check_keys(os.environ, var_list) test_cmd = ' '.join( ['{}={}'.format(e, os.environ[e]) for e in var_list]) + ' ' + test_cmd if len(args['<pytest_extras>']) > 0: test_cmd += ' ' + ' '.join(args['<pytest_extras>']) launcher.test(info, test_cmd) return 0 if args['delete']: launcher.delete(info) return 0
def do_main(args): _handle_logging(args['--log-level'].upper()) config_path = args['--config-path'] if args['create']: config = launch.config.get_validated_config(config_path) info_path = args['--info-path'] if os.path.exists(info_path): raise launch.util.LauncherError( 'InputConflict', 'Target info path already exists!') write_json(info_path, launch.get_launcher(config).create()) return 0 try: info = load_json(args['--info-path']) except FileNotFoundError as ex: raise launch.util.LauncherError('MissingInfoJSON', None) from ex launcher = launch.get_launcher(info) if args['wait']: launcher.wait() print('Cluster is ready!') return 0 if args['describe']: print(json_prettyprint(launcher.describe())) return 0 if args['pytest']: var_list = list() if args['--env'] is not None: if '=' in args['--env']: # User is attempting to do an assigment with the option raise launch.util.LauncherError( 'OptionError', "The '--env' option can only pass through environment variables " "from the current environment. Set variables according to the shell being used." ) var_list = args['--env'].split(',') missing = [v for v in var_list if v not in os.environ] if len(missing) > 0: raise launch.util.LauncherError( 'MissingInput', 'Environment variable arguments have been indicated ' 'but not set: {}'.format(repr(missing))) env_dict = {e: os.environ[e] for e in var_list} return launcher.test(args['<pytest_extras>'], env_dict) if args['delete']: launcher.delete() return 0
def do_main(args): _handle_logging(args['--log-level'].upper()) config_path = args['--config-path'] if args['create']: config = launch.config.get_validated_config(config_path) info_path = args['--info-path'] if os.path.exists(info_path): raise launch.util.LauncherError('InputConflict', 'Target info path already exists!') write_json(info_path, launch.get_launcher(config).create()) return 0 try: info = load_json(args['--info-path']) except FileNotFoundError as ex: raise launch.util.LauncherError('MissingInfoJSON', None) from ex launcher = launch.get_launcher(info) if args['wait']: launcher.wait() print('Cluster is ready!') return 0 if args['describe']: print(json_prettyprint(launcher.describe())) return 0 if args['pytest']: var_list = list() if args['--env'] is not None: if '=' in args['--env']: # User is attempting to do an assigment with the option raise launch.util.LauncherError( 'OptionError', "The '--env' option can only pass through environment variables " "from the current environment. Set variables according to the shell being used.") var_list = args['--env'].split(',') missing = [v for v in var_list if v not in os.environ] if len(missing) > 0: raise launch.util.LauncherError( 'MissingInput', 'Environment variable arguments have been indicated ' 'but not set: {}'.format(repr(missing))) env_dict = {e: os.environ[e] for e in var_list} return launcher.test(args['<pytest_extras>'], env_dict) if args['delete']: launcher.delete() return 0
def do_main(args): _handle_logging(args['--log-level'].upper()) config_path = args['--config-path'] if args['create']: config = launch.config.get_validated_config(config_path) info_path = args['--info-path'] if os.path.exists(info_path): raise launch.util.LauncherError( 'InputConflict', 'Target info path already exists!') write_json(info_path, launch.get_launcher(config).create(config)) return 0 info = load_json(args['--info-path']) launcher = launch.get_launcher(info) if args['wait']: launcher.wait(info) print('Cluster is ready!') return 0 if args['describe']: print(json_prettyprint(launcher.describe(info))) return 0 if args['pytest']: test_cmd = 'py.test' if args['--env'] is not None: if '=' in args['--env']: # User is attempting to do an assigment with the option raise launch.util.LauncherError( 'OptionError', "The '--env' option can only pass through environment variables " "from the current environment. Set variables according to the shell being used." ) var_list = args['--env'].split(',') launch.util.check_keys(os.environ, var_list) test_cmd = ' '.join( ['{}={}'.format(e, os.environ[e]) for e in var_list]) + ' ' + test_cmd if len(args['<pytest_extras>']) > 0: test_cmd += ' ' + ' '.join(args['<pytest_extras>']) launcher.test(info, test_cmd) return 0 if args['delete']: launcher.delete(info) return 0
def do_main(args): _handle_logging(args['--log-level'].upper()) config_path = args['--config-path'] if args['create']: config = launch.config.get_validated_config(config_path) info_path = args['--info-path'] if os.path.exists(info_path): raise launch.util.LauncherError('InputConflict', 'Target info path already exists!') write_json(info_path, launch.get_launcher(config).create(config)) return 0 info = load_json(args['--info-path']) launcher = launch.get_launcher(info) if args['wait']: launcher.wait(info) print('Cluster is ready!') return 0 if args['describe']: print(json_prettyprint(launcher.describe(info))) return 0 if args['pytest']: test_cmd = 'py.test' if args['--env'] is not None: if '=' in args['--env']: # User is attempting to do an assigment with the option raise launch.util.LauncherError( 'OptionError', "The '--env' option can only pass through environment variables " "from the current environment. Set variables according to the shell being used.") var_list = args['--env'].split(',') launch.util.check_keys(os.environ, var_list) test_cmd = ' '.join(['{}={}'.format(e, os.environ[e]) for e in var_list]) + ' ' + test_cmd if len(args['<pytest_extras>']) > 0: test_cmd += ' ' + ' '.join(args['<pytest_extras>']) launcher.test(info, test_cmd) return 0 if args['delete']: launcher.delete(info) return 0
def generate( arguments, extra_templates=list(), extra_sources=list(), extra_targets=list()): # To maintain the old API where we passed arguments rather than the new name. user_arguments = arguments arguments = None sources, targets, templates = get_dcosconfig_source_target_and_templates( user_arguments, extra_templates, extra_sources) resolver = validate_and_raise(sources, targets + extra_targets) argument_dict = get_final_arguments(resolver) late_variables = get_late_variables(resolver, sources) # expanded_config is a special result which contains all other arguments. It has to come after # the calculation of all the other arguments so it can be filled with everything which was # calculated. Can't be calculated because that would have an infinite recursion problem (the set # of all arguments would want to include itself). # Explicitly / manaully setup so that it'll fit where we want it. # TODO(cmaloney): Make this late-bound by gen.internals argument_dict['expanded_config'] = textwrap.indent( json_prettyprint( {k: v for k, v in argument_dict.items() if not v.startswith(gen.internals.LATE_BIND_PLACEHOLDER_START)} ), prefix=' ' * 3, ) log.debug("Final arguments:" + json_prettyprint(argument_dict)) # Fill in the template parameters # TODO(cmaloney): render_templates should ideally take the template targets. rendered_templates = render_templates(templates, argument_dict) # Validate there aren't any unexpected top level directives in any of the files # (likely indicates a misspelling) for name, template in rendered_templates.items(): if name == 'dcos-services.yaml': # yaml list of the service files assert isinstance(template, list) elif name == 'cloud-config.yaml': assert template.keys() <= CLOUDCONFIG_KEYS, template.keys() elif isinstance(template, str): # Not a yaml template pass else: # yaml template file log.debug("validating template file %s", name) assert template.keys() <= PACKAGE_KEYS, template.keys() # Find all files which contain late bind variables and turn them into a "late bind package" # TODO(cmaloney): check there are no late bound variables in cloud-config.yaml late_files, regular_files = extract_files_containing_late_variables( rendered_templates['dcos-config.yaml']['package']) # put the regular files right back rendered_templates['dcos-config.yaml'] = {'package': regular_files} def make_package_filename(package_id, extension): return 'packages/{0}/{1}{2}'.format( package_id.name, repr(package_id), extension) # Render all the cluster packages cluster_package_info = {} # Prepare late binding config, if any. late_package = build_late_package(late_files, argument_dict['config_id'], argument_dict['provider']) if late_variables: # Render the late binding package. This package will be downloaded onto # each cluster node during bootstrap and rendered into the final config # using the values from the late config file. late_package_id = PackageId(late_package['name']) late_package_filename = make_package_filename(late_package_id, '.dcos_config') os.makedirs(os.path.dirname(late_package_filename), mode=0o755) write_yaml(late_package_filename, {'package': late_package['package']}, default_flow_style=False) log.info('Package filename: {}'.format(late_package_filename)) # Add the late config file to cloud config. The expressions in # late_variables will be resolved by the service handling the cloud # config (e.g. Amazon CloudFormation). The rendered late config file # on a cluster node's filesystem will contain the final values. rendered_templates['cloud-config.yaml']['root'].append({ 'path': '/etc/mesosphere/setup-flags/late-config.yaml', 'permissions': '0644', 'owner': 'root', # TODO(cmaloney): don't prettyprint to save bytes. # NOTE: Use yaml here simply to make avoiding painful escaping and # unescaping easier. 'content': render_yaml({ 'late_bound_package_id': late_package['name'], 'bound_values': late_variables })}) # Collect metadata for cluster packages. for package_id_str in json.loads(argument_dict['cluster_packages']): package_id = PackageId(package_id_str) package_filename = make_package_filename(package_id, '.tar.xz') cluster_package_info[package_id.name] = { 'id': package_id_str, 'filename': package_filename } # Render config packages. config_package_ids = json.loads(argument_dict['config_package_ids']) for package_id_str in config_package_ids: package_id = PackageId(package_id_str) do_gen_package(rendered_templates[package_id.name + '.yaml'], cluster_package_info[package_id.name]['filename']) # Convert cloud-config to just contain write_files rather than root cc = rendered_templates['cloud-config.yaml'] # Shouldn't contain any packages. Providers should pull what they need to # late bind out of other packages via cc_package_file. assert 'package' not in cc cc_root = cc.pop('root', []) # Make sure write_files exists. assert 'write_files' not in cc cc['write_files'] = [] # Do the transform for item in cc_root: assert item['path'].startswith('/') cc['write_files'].append(item) rendered_templates['cloud-config.yaml'] = cc # Add in the add_services util. Done here instead of the initial # map since we need to bind in parameters def add_services(cloudconfig, cloud_init_implementation): return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation) utils.add_services = add_services return Bunch({ 'arguments': argument_dict, 'cluster_packages': cluster_package_info, 'config_package_ids': config_package_ids, 'late_package_id': late_package['name'] if late_package else None, 'templates': rendered_templates, 'utils': utils })
def add_builtin(name, value): base_source.add_must(name, json_prettyprint(value))
def generate( arguments, extra_templates=list(), cc_package_files=list()): # To maintain the old API where we passed arguments rather than the new name. user_arguments = arguments arguments = None sources, targets, templates = get_dcosconfig_source_target_and_templates(user_arguments, extra_templates) # TODO(cmaloney): Make it so we only get out the dcosconfig target arguments not all the config target arguments. resolver = gen.internals.resolve_configuration(sources, targets, user_arguments) status = resolver.status_dict if status['status'] == 'errors': raise ValidationError(errors=status['errors'], unset=status['unset']) argument_dict = {k: v.value for k, v in resolver.arguments.items()} log.debug("Final arguments:" + json_prettyprint(argument_dict)) # expanded_config is a special result which contains all other arguments. It has to come after # the calculation of all the other arguments so it can be filled with everything which was # calculated. Can't be calculated because that would have an infinite recursion problem (the set # of all arguments would want to include itself). # Explicitly / manaully setup so that it'll fit where we want it. # TODO(cmaloney): Make this late-bound by gen.internals argument_dict['expanded_config'] = textwrap.indent(json_prettyprint(argument_dict), prefix=' ' * 3) # Fill in the template parameters # TODO(cmaloney): render_templates should ideally take the template targets. rendered_templates = render_templates(templates, argument_dict) # Validate there aren't any unexpected top level directives in any of the files # (likely indicates a misspelling) for name, template in rendered_templates.items(): if name == 'dcos-services.yaml': # yaml list of the service files assert isinstance(template, list) elif name == 'cloud-config.yaml': assert template.keys() <= CLOUDCONFIG_KEYS, template.keys() elif isinstance(template, str): # Not a yaml template pass else: # yaml template file log.debug("validating template file %s", name) assert template.keys() <= PACKAGE_KEYS, template.keys() # Extract cc_package_files out of the dcos-config template and put them into # the cloud-config package. cc_package_files, dcos_config_files = extract_files_with_path(rendered_templates['dcos-config.yaml']['package'], cc_package_files) rendered_templates['dcos-config.yaml'] = {'package': dcos_config_files} # Add a empty pkginfo.json to the cc_package_files. # Also assert there isn't one already (can only write out a file once). for item in cc_package_files: assert item['path'] != '/pkginfo.json' # If there aren't any files for a cloud-config package don't make one start # existing adding a pkginfo.json if len(cc_package_files) > 0: cc_package_files.append({ "path": "/pkginfo.json", "content": "{}"}) for item in cc_package_files: assert item['path'].startswith('/') item['path'] = '/etc/mesosphere/setup-packages/dcos-provider-{}--setup'.format( argument_dict['provider']) + item['path'] rendered_templates['cloud-config.yaml']['root'].append(item) cluster_package_info = {} # Render all the cluster packages for package_id_str in json.loads(argument_dict['cluster_packages']): package_id = PackageId(package_id_str) package_filename = 'packages/{}/{}.tar.xz'.format( package_id.name, package_id_str) # Build the package do_gen_package(rendered_templates[package_id.name + '.yaml'], package_filename) cluster_package_info[package_id.name] = { 'id': package_id_str, 'filename': package_filename } # Convert cloud-config to just contain write_files rather than root cc = rendered_templates['cloud-config.yaml'] # Shouldn't contain any packages. Providers should pull what they need to # late bind out of other packages via cc_package_file. assert 'package' not in cc cc_root = cc.pop('root', []) # Make sure write_files exists. assert 'write_files' not in cc cc['write_files'] = [] # Do the transform for item in cc_root: assert item['path'].startswith('/') cc['write_files'].append(item) rendered_templates['cloud-config.yaml'] = cc # Add in the add_services util. Done here instead of the initial # map since we need to bind in parameters def add_services(cloudconfig, cloud_init_implementation): return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation) utils.add_services = add_services return Bunch({ 'arguments': argument_dict, 'cluster_packages': cluster_package_info, 'templates': rendered_templates, 'utils': utils })
def generate(arguments, extra_templates=list(), extra_sources=list(), extra_targets=list()): # To maintain the old API where we passed arguments rather than the new name. user_arguments = arguments arguments = None sources, targets, templates = get_dcosconfig_source_target_and_templates( user_arguments, extra_templates, extra_sources) resolver = validate_and_raise(sources, targets + extra_targets) argument_dict = get_final_arguments(resolver) late_variables = get_late_variables(resolver, sources) secret_builtins = [ 'expanded_config_full', 'user_arguments_full', 'config_yaml_full' ] secret_variables = set(get_secret_variables(sources) + secret_builtins) masked_value = '**HIDDEN**' # Calculate values for builtin variables. user_arguments_masked = { k: (masked_value if k in secret_variables else v) for k, v in user_arguments.items() } argument_dict['user_arguments_full'] = json_prettyprint(user_arguments) argument_dict['user_arguments'] = json_prettyprint(user_arguments_masked) argument_dict['config_yaml_full'] = user_arguments_to_yaml(user_arguments) argument_dict['config_yaml'] = user_arguments_to_yaml( user_arguments_masked) # The expanded_config and expanded_config_full variables contain all other variables and their values. # expanded_config is a copy of expanded_config_full with secret values removed. Calculating these variables' values # must come after the calculation of all other variables to prevent infinite recursion. # TODO(cmaloney): Make this late-bound by gen.internals expanded_config_full = { k: v for k, v in argument_dict.items() # Omit late-bound variables whose values have not yet been calculated. if not v.startswith(gen.internals.LATE_BIND_PLACEHOLDER_START) } expanded_config_scrubbed = { k: v for k, v in expanded_config_full.items() if k not in secret_variables } argument_dict['expanded_config_full'] = format_expanded_config( expanded_config_full) argument_dict['expanded_config'] = format_expanded_config( expanded_config_scrubbed) log.debug("Final arguments:" + json_prettyprint({ # Mask secret config values. k: (masked_value if k in secret_variables else v) for k, v in argument_dict.items() })) # Fill in the template parameters # TODO(cmaloney): render_templates should ideally take the template targets. rendered_templates = render_templates(templates, argument_dict) # Validate there aren't any unexpected top level directives in any of the files # (likely indicates a misspelling) for name, template in rendered_templates.items(): if name == 'dcos-services.yaml': # yaml list of the service files assert isinstance(template, list) elif name == 'cloud-config.yaml': assert template.keys() <= CLOUDCONFIG_KEYS, template.keys() elif isinstance(template, str): # Not a yaml template pass else: # yaml template file log.debug("validating template file %s", name) assert template.keys() <= PACKAGE_KEYS, template.keys() stable_artifacts = [] # Find all files which contain late bind variables and turn them into a "late bind package" # TODO(cmaloney): check there are no late bound variables in cloud-config.yaml late_files, regular_files = extract_files_containing_late_variables( rendered_templates['dcos-config.yaml']['package']) # put the regular files right back rendered_templates['dcos-config.yaml'] = {'package': regular_files} # Render cluster package list artifact. cluster_package_list_filename = 'package_lists/{}.package_list.json'.format( argument_dict['cluster_package_list_id']) os.makedirs(os.path.dirname(cluster_package_list_filename), mode=0o755, exist_ok=True) write_string(cluster_package_list_filename, argument_dict['cluster_packages']) log.info('Cluster package list: {}'.format(cluster_package_list_filename)) stable_artifacts.append(cluster_package_list_filename) def make_package_filename(package_id, extension): return 'packages/{0}/{1}{2}'.format(package_id.name, repr(package_id), extension) # Render all the cluster packages cluster_package_info = {} # Prepare late binding config, if any. late_package = build_late_package(late_files, argument_dict['config_id'], argument_dict['provider']) if late_variables: # Render the late binding package. This package will be downloaded onto # each cluster node during bootstrap and rendered into the final config # using the values from the late config file. late_package_id = PackageId(late_package['name']) late_package_filename = make_package_filename(late_package_id, '.dcos_config') os.makedirs(os.path.dirname(late_package_filename), mode=0o755) write_yaml(late_package_filename, {'package': late_package['package']}, default_flow_style=False) log.info('Package filename: {}'.format(late_package_filename)) stable_artifacts.append(late_package_filename) # Add the late config file to cloud config. The expressions in # late_variables will be resolved by the service handling the cloud # config (e.g. Amazon CloudFormation). The rendered late config file # on a cluster node's filesystem will contain the final values. rendered_templates['cloud-config.yaml']['root'].append({ 'path': '/etc/mesosphere/setup-flags/late-config.yaml', 'permissions': '0644', 'owner': 'root', # TODO(cmaloney): don't prettyprint to save bytes. # NOTE: Use yaml here simply to make avoiding painful escaping and # unescaping easier. 'content': render_yaml({ 'late_bound_package_id': late_package['name'], 'bound_values': late_variables }) }) # Collect metadata for cluster packages. for package_id_str in json.loads(argument_dict['cluster_packages']): package_id = PackageId(package_id_str) package_filename = make_package_filename(package_id, '.tar.xz') cluster_package_info[package_id.name] = { 'id': package_id_str, 'filename': package_filename } # Render config packages. config_package_ids = json.loads(argument_dict['config_package_ids']) for package_id_str in config_package_ids: package_id = PackageId(package_id_str) package_filename = cluster_package_info[package_id.name]['filename'] do_gen_package(rendered_templates[package_id.name + '.yaml'], cluster_package_info[package_id.name]['filename']) stable_artifacts.append(package_filename) # Convert cloud-config to just contain write_files rather than root cc = rendered_templates['cloud-config.yaml'] # Shouldn't contain any packages. Providers should pull what they need to # late bind out of other packages via cc_package_file. assert 'package' not in cc cc_root = cc.pop('root', []) # Make sure write_files exists. assert 'write_files' not in cc cc['write_files'] = [] # Do the transform for item in cc_root: assert item['path'].startswith('/') cc['write_files'].append(item) rendered_templates['cloud-config.yaml'] = cc # Add in the add_services util. Done here instead of the initial # map since we need to bind in parameters def add_services(cloudconfig, cloud_init_implementation): return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation) utils.add_services = add_services return Bunch({ 'arguments': argument_dict, 'cluster_packages': cluster_package_info, 'stable_artifacts': stable_artifacts, 'templates': rendered_templates, 'utils': utils })
def format_expanded_config(config): return textwrap.indent(json_prettyprint(config), prefix=(' ' * 3))
def generate( arguments, extra_templates=list(), cc_package_files=list(), extra_sources=list()): # To maintain the old API where we passed arguments rather than the new name. user_arguments = arguments arguments = None sources, targets, templates = get_dcosconfig_source_target_and_templates( user_arguments, extra_templates, extra_sources) # TODO(cmaloney): Make it so we only get out the dcosconfig target arguments not all the config target arguments. resolver = gen.internals.resolve_configuration(sources, targets, user_arguments) status = resolver.status_dict if status['status'] == 'errors': raise ValidationError(errors=status['errors'], unset=status['unset']) argument_dict = {k: v.value for k, v in resolver.arguments.items()} # expanded_config is a special result which contains all other arguments. It has to come after # the calculation of all the other arguments so it can be filled with everything which was # calculated. Can't be calculated because that would have an infinite recursion problem (the set # of all arguments would want to include itself). # Explicitly / manaully setup so that it'll fit where we want it. # TODO(cmaloney): Make this late-bound by gen.internals argument_dict['expanded_config'] = textwrap.indent(json_prettyprint(argument_dict), prefix=' ' * 3) log.debug("Final arguments:" + json_prettyprint(argument_dict)) # Fill in the template parameters # TODO(cmaloney): render_templates should ideally take the template targets. rendered_templates = render_templates(templates, argument_dict) # Validate there aren't any unexpected top level directives in any of the files # (likely indicates a misspelling) for name, template in rendered_templates.items(): if name == 'dcos-services.yaml': # yaml list of the service files assert isinstance(template, list) elif name == 'cloud-config.yaml': assert template.keys() <= CLOUDCONFIG_KEYS, template.keys() elif isinstance(template, str): # Not a yaml template pass else: # yaml template file log.debug("validating template file %s", name) assert template.keys() <= PACKAGE_KEYS, template.keys() # Extract cc_package_files out of the dcos-config template and put them into # the cloud-config package. cc_package_files, dcos_config_files = extract_files_with_path(rendered_templates['dcos-config.yaml']['package'], cc_package_files) rendered_templates['dcos-config.yaml'] = {'package': dcos_config_files} # Add a empty pkginfo.json to the cc_package_files. # Also assert there isn't one already (can only write out a file once). for item in cc_package_files: assert item['path'] != '/pkginfo.json' # If there aren't any files for a cloud-config package don't make one start # existing adding a pkginfo.json if len(cc_package_files) > 0: cc_package_files.append({ "path": "/pkginfo.json", "content": "{}"}) for item in cc_package_files: assert item['path'].startswith('/') item['path'] = '/etc/mesosphere/setup-packages/dcos-provider-{}--setup'.format( argument_dict['provider']) + item['path'] rendered_templates['cloud-config.yaml']['root'].append(item) cluster_package_info = {} # Render all the cluster packages for package_id_str in json.loads(argument_dict['cluster_packages']): package_id = PackageId(package_id_str) package_filename = 'packages/{}/{}.tar.xz'.format( package_id.name, package_id_str) # Build the package do_gen_package(rendered_templates[package_id.name + '.yaml'], package_filename) cluster_package_info[package_id.name] = { 'id': package_id_str, 'filename': package_filename } # Convert cloud-config to just contain write_files rather than root cc = rendered_templates['cloud-config.yaml'] # Shouldn't contain any packages. Providers should pull what they need to # late bind out of other packages via cc_package_file. assert 'package' not in cc cc_root = cc.pop('root', []) # Make sure write_files exists. assert 'write_files' not in cc cc['write_files'] = [] # Do the transform for item in cc_root: assert item['path'].startswith('/') cc['write_files'].append(item) rendered_templates['cloud-config.yaml'] = cc # Add in the add_services util. Done here instead of the initial # map since we need to bind in parameters def add_services(cloudconfig, cloud_init_implementation): return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation) utils.add_services = add_services return Bunch({ 'arguments': argument_dict, 'cluster_packages': cluster_package_info, 'templates': rendered_templates, 'utils': utils })
def generate(arguments, extra_templates=list(), extra_sources=list(), extra_targets=list()): # To maintain the old API where we passed arguments rather than the new name. user_arguments = arguments arguments = None sources, targets, templates = get_dcosconfig_source_target_and_templates( user_arguments, extra_templates, extra_sources) # TODO(cmaloney): Make it so we only get out the dcosconfig target arguments not all the config target arguments. resolver = gen.internals.resolve_configuration(sources, targets + extra_targets) status = resolver.status_dict if status['status'] == 'errors': raise ValidationError(errors=status['errors'], unset=status['unset']) # Gather out the late variables. The presence of late variables changes # whether or not a late package is created late_variables = dict() # TODO(branden): Get the late vars and expressions from resolver.late for source in sources: for setter_list in source.setters.values(): for setter in setter_list: if not setter.is_late: continue if setter.name not in resolver.late: continue # Skip late vars that aren't referenced by config. if not resolver.arguments[setter.name].is_finalized: continue # Validate a late variable should only have one source. assert setter.name not in late_variables late_variables[setter.name] = setter.late_expression argument_dict = { k: v.value for k, v in resolver.arguments.items() if v.is_finalized } # expanded_config is a special result which contains all other arguments. It has to come after # the calculation of all the other arguments so it can be filled with everything which was # calculated. Can't be calculated because that would have an infinite recursion problem (the set # of all arguments would want to include itself). # Explicitly / manaully setup so that it'll fit where we want it. # TODO(cmaloney): Make this late-bound by gen.internals argument_dict['expanded_config'] = textwrap.indent( json_prettyprint({ k: v for k, v in argument_dict.items() if not v.startswith(gen.internals.LATE_BIND_PLACEHOLDER_START) }), prefix=' ' * 3, ) log.debug("Final arguments:" + json_prettyprint(argument_dict)) # Fill in the template parameters # TODO(cmaloney): render_templates should ideally take the template targets. rendered_templates = render_templates(templates, argument_dict) # Validate there aren't any unexpected top level directives in any of the files # (likely indicates a misspelling) for name, template in rendered_templates.items(): if name == 'dcos-services.yaml': # yaml list of the service files assert isinstance(template, list) elif name == 'cloud-config.yaml': assert template.keys() <= CLOUDCONFIG_KEYS, template.keys() elif isinstance(template, str): # Not a yaml template pass else: # yaml template file log.debug("validating template file %s", name) assert template.keys() <= PACKAGE_KEYS, template.keys() # Find all files which contain late bind variables and turn them into a "late bind package" # TODO(cmaloney): check there are no late bound variables in cloud-config.yaml late_files, regular_files = extract_files_containing_late_variables( rendered_templates['dcos-config.yaml']['package']) # put the regular files right back rendered_templates['dcos-config.yaml'] = {'package': regular_files} def make_package_filename(package_id, extension): return 'packages/{0}/{1}{2}'.format(package_id.name, repr(package_id), extension) # Render all the cluster packages cluster_package_info = {} # Prepare late binding config, if any. late_package = build_late_package(late_files, argument_dict['config_id'], argument_dict['provider']) if late_variables: # Render the late binding package. This package will be downloaded onto # each cluster node during bootstrap and rendered into the final config # using the values from the late config file. late_package_id = PackageId(late_package['name']) late_package_filename = make_package_filename(late_package_id, '.dcos_config') os.makedirs(os.path.dirname(late_package_filename), mode=0o755) write_yaml(late_package_filename, {'package': late_package['package']}, default_flow_style=False) cluster_package_info[late_package_id.name] = { 'id': late_package['name'], 'filename': late_package_filename } # Add the late config file to cloud config. The expressions in # late_variables will be resolved by the service handling the cloud # config (e.g. Amazon CloudFormation). The rendered late config file # on a cluster node's filesystem will contain the final values. rendered_templates['cloud-config.yaml']['root'].append({ 'path': '/etc/mesosphere/setup-flags/late-config.yaml', 'permissions': '0644', 'owner': 'root', # TODO(cmaloney): don't prettyprint to save bytes. # NOTE: Use yaml here simply to make avoiding painful escaping and # unescaping easier. 'content': render_yaml({ 'late_bound_package_id': late_package['name'], 'bound_values': late_variables }) }) # Render the rest of the packages. for package_id_str in json.loads(argument_dict['cluster_packages']): package_id = PackageId(package_id_str) package_filename = make_package_filename(package_id, '.tar.xz') # Build the package do_gen_package(rendered_templates[package_id.name + '.yaml'], package_filename) cluster_package_info[package_id.name] = { 'id': package_id_str, 'filename': package_filename } # Convert cloud-config to just contain write_files rather than root cc = rendered_templates['cloud-config.yaml'] # Shouldn't contain any packages. Providers should pull what they need to # late bind out of other packages via cc_package_file. assert 'package' not in cc cc_root = cc.pop('root', []) # Make sure write_files exists. assert 'write_files' not in cc cc['write_files'] = [] # Do the transform for item in cc_root: assert item['path'].startswith('/') cc['write_files'].append(item) rendered_templates['cloud-config.yaml'] = cc # Add in the add_services util. Done here instead of the initial # map since we need to bind in parameters def add_services(cloudconfig, cloud_init_implementation): return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation) utils.add_services = add_services return Bunch({ 'arguments': argument_dict, 'cluster_packages': cluster_package_info, 'templates': rendered_templates, 'utils': utils })
def generate( arguments, extra_templates=list(), extra_sources=list(), extra_targets=list()): # To maintain the old API where we passed arguments rather than the new name. user_arguments = arguments arguments = None sources, targets, templates = get_dcosconfig_source_target_and_templates( user_arguments, extra_templates, extra_sources) resolver = validate_and_raise(sources, targets + extra_targets) argument_dict = get_final_arguments(resolver) late_variables = get_late_variables(resolver, sources) secret_builtins = ['expanded_config_full', 'user_arguments_full', 'config_yaml_full'] secret_variables = set(get_secret_variables(sources) + secret_builtins) masked_value = '**HIDDEN**' # Calculate values for builtin variables. user_arguments_masked = {k: (masked_value if k in secret_variables else v) for k, v in user_arguments.items()} argument_dict['user_arguments_full'] = json_prettyprint(user_arguments) argument_dict['user_arguments'] = json_prettyprint(user_arguments_masked) argument_dict['config_yaml_full'] = user_arguments_to_yaml(user_arguments) argument_dict['config_yaml'] = user_arguments_to_yaml(user_arguments_masked) # The expanded_config and expanded_config_full variables contain all other variables and their values. # expanded_config is a copy of expanded_config_full with secret values removed. Calculating these variables' values # must come after the calculation of all other variables to prevent infinite recursion. # TODO(cmaloney): Make this late-bound by gen.internals expanded_config_full = { k: v for k, v in argument_dict.items() # Omit late-bound variables whose values have not yet been calculated. if not v.startswith(gen.internals.LATE_BIND_PLACEHOLDER_START) } expanded_config_scrubbed = {k: v for k, v in expanded_config_full.items() if k not in secret_variables} argument_dict['expanded_config_full'] = format_expanded_config(expanded_config_full) argument_dict['expanded_config'] = format_expanded_config(expanded_config_scrubbed) log.debug( "Final arguments:" + json_prettyprint({ # Mask secret config values. k: (masked_value if k in secret_variables else v) for k, v in argument_dict.items() }) ) # Fill in the template parameters # TODO(cmaloney): render_templates should ideally take the template targets. rendered_templates = render_templates(templates, argument_dict) # Validate there aren't any unexpected top level directives in any of the files # (likely indicates a misspelling) for name, template in rendered_templates.items(): if name == 'dcos-services.yaml': # yaml list of the service files assert isinstance(template, list) elif name == 'cloud-config.yaml': assert template.keys() <= CLOUDCONFIG_KEYS, template.keys() elif isinstance(template, str): # Not a yaml template pass else: # yaml template file log.debug("validating template file %s", name) assert template.keys() <= PACKAGE_KEYS, template.keys() stable_artifacts = [] channel_artifacts = [] # Find all files which contain late bind variables and turn them into a "late bind package" # TODO(cmaloney): check there are no late bound variables in cloud-config.yaml late_files, regular_files = extract_files_containing_late_variables( rendered_templates['dcos-config.yaml']['package']) # put the regular files right back rendered_templates['dcos-config.yaml'] = {'package': regular_files} # Render cluster package list artifact. cluster_package_list_filename = 'package_lists/{}.package_list.json'.format( argument_dict['cluster_package_list_id'] ) os.makedirs(os.path.dirname(cluster_package_list_filename), mode=0o755, exist_ok=True) write_string(cluster_package_list_filename, argument_dict['cluster_packages']) log.info('Cluster package list: {}'.format(cluster_package_list_filename)) stable_artifacts.append(cluster_package_list_filename) def make_package_filename(package_id, extension): return 'packages/{0}/{1}{2}'.format( package_id.name, repr(package_id), extension) # Render all the cluster packages cluster_package_info = {} # Prepare late binding config, if any. late_package = build_late_package(late_files, argument_dict['config_id'], argument_dict['provider']) if late_variables: # Render the late binding package. This package will be downloaded onto # each cluster node during bootstrap and rendered into the final config # using the values from the late config file. late_package_id = PackageId(late_package['name']) late_package_filename = make_package_filename(late_package_id, '.dcos_config') os.makedirs(os.path.dirname(late_package_filename), mode=0o755) write_yaml(late_package_filename, {'package': late_package['package']}, default_flow_style=False) log.info('Package filename: {}'.format(late_package_filename)) stable_artifacts.append(late_package_filename) # Add the late config file to cloud config. The expressions in # late_variables will be resolved by the service handling the cloud # config (e.g. Amazon CloudFormation). The rendered late config file # on a cluster node's filesystem will contain the final values. rendered_templates['cloud-config.yaml']['root'].append({ 'path': '/etc/mesosphere/setup-flags/late-config.yaml', 'permissions': '0644', 'owner': 'root', # TODO(cmaloney): don't prettyprint to save bytes. # NOTE: Use yaml here simply to make avoiding painful escaping and # unescaping easier. 'content': render_yaml({ 'late_bound_package_id': late_package['name'], 'bound_values': late_variables })}) # Collect metadata for cluster packages. for package_id_str in json.loads(argument_dict['cluster_packages']): package_id = PackageId(package_id_str) package_filename = make_package_filename(package_id, '.tar.xz') cluster_package_info[package_id.name] = { 'id': package_id_str, 'filename': package_filename } # Render config packages. config_package_ids = json.loads(argument_dict['config_package_ids']) for package_id_str in config_package_ids: package_id = PackageId(package_id_str) package_filename = cluster_package_info[package_id.name]['filename'] do_gen_package(rendered_templates[package_id.name + '.yaml'], cluster_package_info[package_id.name]['filename']) stable_artifacts.append(package_filename) # Convert cloud-config to just contain write_files rather than root cc = rendered_templates['cloud-config.yaml'] # Shouldn't contain any packages. Providers should pull what they need to # late bind out of other packages via cc_package_file. assert 'package' not in cc cc_root = cc.pop('root', []) # Make sure write_files exists. assert 'write_files' not in cc cc['write_files'] = [] # Do the transform for item in cc_root: assert item['path'].startswith('/') cc['write_files'].append(item) rendered_templates['cloud-config.yaml'] = cc # Add utils that need to be defined here so they can be bound to locals. def add_services(cloudconfig, cloud_init_implementation): return add_units(cloudconfig, rendered_templates['dcos-services.yaml'], cloud_init_implementation) utils.add_services = add_services def add_stable_artifact(filename): assert filename not in stable_artifacts + channel_artifacts stable_artifacts.append(filename) utils.add_stable_artifact = add_stable_artifact def add_channel_artifact(filename): assert filename not in stable_artifacts + channel_artifacts channel_artifacts.append(filename) utils.add_channel_artifact = add_channel_artifact return Bunch({ 'arguments': argument_dict, 'cluster_packages': cluster_package_info, 'stable_artifacts': stable_artifacts, 'channel_artifacts': channel_artifacts, 'templates': rendered_templates, 'utils': utils })