def parse_args(settings_dir, app_settings_dir, args=None): """ Looks for all the specs for specified app and parses the commandline input arguments accordingly. Trim clg spec from customized input and modify help data. :param app_settings_dir: path to the base directory holding the application's settings. App can be provisioner\installer\tester and the path would be: settings/<app_name>/ :param args: the list of arguments used for directing the method to work on something other than CLI input (for example, in testing). :return: dict. Based on cmd-line args parsed from spec file """ # Dict with the merging result of all app's specs common_specs = _get_specs(settings_dir, include_subfolders=False) app_specs = _get_specs(app_settings_dir) utils.dict_merge(app_specs, common_specs) # Get the subparsers options as is with all the fields from app's specs. # This also trims some custom fields from options to pass to clg. subparsers_options = _get_subparsers_options(app_specs) # Pass trimmed spec to clg with modified help message cmd = clg.CommandLine(app_specs) clg_args = vars(cmd.parse(args)) ValueArgument.init_missing_args(app_specs, clg_args, app_settings_dir, subcommand=clg_args["command0"]) # Current sub-parser options sub_parser_options = subparsers_options.get(clg_args['command0'], {}) override_default_values(clg_args, sub_parser_options) return clg_args
def _get_specs(root_folder, include_subfolders=True): """ Load all specs files from base settings directory. :param root_folder: path to the base directory holding the application's settings. App can be provisioner\installer\tester and the path would be: settings/<app_name>/ :param include_subfolders: specifies whether the subfolders of the root folder should be also searched for a spec files. :return: dict: All spec files merged into a single dict. """ if not os.path.exists(root_folder): raise exceptions.IRFileNotFoundException(root_folder) # Collect all app's spec spec_files = [] if include_subfolders: for root, _, files in os.walk(root_folder): spec_files.extend([os.path.join(root, a_file) for a_file in files if a_file.endswith(SPEC_EXTENSION)]) else: spec_files = glob.glob('./' + root_folder + '/*' + SPEC_EXTENSION) res = {} for spec_file in spec_files: # TODO (aopincar): print spec_file in debug mode with open(spec_file) as fd: spec = yaml.load(fd, Loader=yamlordereddictloader.Loader) # TODO (aopincar): preserve OrderedDict when merging?!? utils.dict_merge(res, spec) return res
def _get_specs(root_folder, include_subfolders=True): """ Load all specs files from base settings directory. :param root_folder: path to the base directory holding the application's settings. App can be provisioner\installer\tester and the path would be: settings/<app_name>/ :param include_subfolders: specifies whether the subfolders of the root folder should be also searched for a spec files. :return: dict: All spec files merged into a single dict. """ if not os.path.exists(root_folder): raise exceptions.IRFileNotFoundException(root_folder) # Collect all app's spec spec_files = [] if include_subfolders: for root, _, files in os.walk(root_folder): spec_files.extend([ os.path.join(root, a_file) for a_file in files if a_file.endswith(SPEC_EXTENSION) ]) else: spec_files = glob.glob('./' + root_folder + '/*' + SPEC_EXTENSION) res = {} for spec_file in spec_files: # TODO (aopincar): print spec_file in debug mode with open(spec_file) as fd: spec = yaml.load(fd, Loader=yamlordereddictloader.Loader) # TODO (aopincar): preserve OrderedDict when merging?!? utils.dict_merge(res, spec) return res
def _get_specs(app_settings_dir): """ Load all specs files from base settings directory. :param app_settings_dir: path to the base directory holding the application's settings. App can be provisioner\installer\tester and the path would be: settings/<app_name>/ :return: dict: All spec files merged into a single dict. """ if not os.path.exists(app_settings_dir): raise exceptions.IRFileNotFoundException(app_settings_dir) # Collect all app's spec spec_files = [] for root, _, files in os.walk(app_settings_dir): spec_files.extend([os.path.join(root, a_file) for a_file in files if a_file.endswith(SPEC_EXTENSION)]) res = {} for spec_file in spec_files: # TODO (aopincar): print spec_file in debug mode with open(spec_file) as fd: spec = yaml.load(fd, Loader=yamlordereddictloader.Loader) # TODO (aopincar): preserve OrderedDict when merging?!? utils.dict_merge(res, spec) return res
def get_settings_dict(self): # todo(obaranov) this is virsh specific # rework that and make this part of lookup or something. image = dict( name=self.args['image-file'], base_url=self.args['image-server'] ) host = dict( ssh_host=self.args['host'], ssh_user=self.args['ssh-user'], ssh_key_file=self.args['ssh-key'] ) settings_dict = utils.dict_merge( {'provisioner': {'image': image}}, {'provisioner': {'hosts': {'host1': host}}}) # load network and image settings for arg_dir in ('network', 'topology'): if self.args[arg_dir] is None: raise exceptions.IRConfigurationException( "A value for for the '{}' " "argument should be provided!".format(arg_dir)) with open(set_network(self.args[arg_dir], os.path.join( self.settings_dir, arg_dir))) as settings_file: settings = yaml.load(settings_file) utils.dict_merge(settings_dict, settings) return settings_dict
def _get_specs(cls, module_name, config): """ Gets specs files as a dict from settings/<module_name> folder. :param module_name: the module name: installer|provisioner|tester """ res = {} for spec_file in cls._get_all_specs(config, subfolder=module_name): spec = yaml.load(open(spec_file), Loader=yamlordereddictloader.Loader) utils.dict_merge(res, spec) return res
def from_files(cls, settings_folders, app_name, *spec_files): """ Reads specs files and constructs the parser instance """ result = {} for spec_file in spec_files: with open(spec_file) as stream: spec = yaml.load(stream) or {} utils.dict_merge(result, spec, utils.ConflictResolver.unique_append_list_resolver) return Spec(result, settings_folders, app_name)
def lookup(self, settings_files, settings_dict): """ Replaces a setting values with !lookup in the setting files by values from other settings files. """ all_settings = utils.load_settings_files(settings_files) utils.dict_merge(all_settings, settings_dict) utils.merge_extra_vars(all_settings, self.args['extra-vars']) yamls.replace_lookup(all_settings) return all_settings
def from_files(cls, settings_folders, app_name, *spec_files): """ Reads specs files and constructs the parser instance """ result = {} for spec_file in spec_files: with open(spec_file) as stream: spec = yaml.load(stream) or {} utils.dict_merge( result, spec, utils.ConflictResolver.unique_append_list_resolver) return Spec(result, settings_folders, app_name)
def lookup(self, settings_files, settings_dict): """ Replaces a setting values with !lookup in the setting files by values from other settings files. """ all_settings = utils.load_settings_files(settings_files) utils.dict_merge(all_settings, settings_dict) utils.merge_extra_vars(all_settings, self.control_args.get('extra-vars', None)) yamls.replace_lookup(all_settings) return all_settings
def test_dict_merge(): from cli.utils import dict_merge first_dict = {'a': 1, 'b': 2, 'c': {'d': 'foo1', 'e': 'bar', 'list1': [ 'a', 'b', 'c']}, 'list2': [1, 2, 3]} second_dict = {'a': 2, 'c': {'d': 'foo2', 'f': 5, 'list1': [3, 4, 5]}, 'g': 'bla', 5: 'yy', 'list3': ['a', 2]} expected_result = {'a': 2, 'b': 2, 'c': {'d': 'foo2', 'e': 'bar', 'f': 5, 'list1': [3, 4, 5]}, 'g': 'bla', 5: 'yy', 'list2': [1, 2, 3], 'list3': ['a', 2]} dict_merge(first_dict, second_dict) assert first_dict == expected_result
def get_config_file_args(self, cli_args): """ Gets the args's from the configuration file """ file_result = {} for (parser_name, parser_dict, arg_name, arg_value, option_spec) in self._iterate_received_arguments(cli_args): file_result[parser_name] = file_result.get(parser_name, {}) if option_spec and option_spec.get("action", "") == "read-config": # we have config option. saving it. self._convert_non_cli_args(parser_name, parser_dict[arg_name]) utils.dict_merge(file_result[parser_name], parser_dict[arg_name]) # remove from cli args parser_dict.pop(arg_name) return file_result
def _get_subparsers_options(spec_dict): """ Goes through all the spec options and modifies them by removing some options parameters (like defaults) :param spec_dict: the dictionary with key obtained from spec files """ # Collect sub parsers options options = {} for sub_parser, params in spec_dict.get('subparsers', {}).iteritems(): parser_options = _get_parser_options(params) group_options = _get_parser_group_options(params) utils.dict_merge(parser_options, group_options) options[sub_parser] = parser_options return options
def validate_requires_args(self, args): """ Check if all the required arguments have been provided. """ silent_args = self.get_silent_args(args) def validate_parser(parser_name, expected_options, parser_args): result = collections.defaultdict(list) condition_req_args = self._get_conditionally_required_args( parser_name, expected_options, args) required_str = 'Required argument.' for option in expected_options: name = option['name'] option_required = option.get('required', False) name_arg = parser_args.get(name) # check required options. if ((option_required and not name_arg) or (isinstance(name_arg, str) and name_arg.startswith(required_str)) or (name in condition_req_args)) and (name not in silent_args): result[parser_name].append(name) return result res = {} for command_data in self.spec_helper.iterate_parsers(): cmd_name = command_data['name'] if cmd_name in args: utils.dict_merge( res, validate_parser( cmd_name, self.spec_helper.get_parser_option_specs(cmd_name), args[cmd_name])) missing_args = { cmd_name: args for cmd_name, args in res.items() if len(args) > 0 } if missing_args: raise exceptions.IRRequiredArgsMissingException(missing_args)
def get_config_file_args(self, cli_args): """ Gets the args's from the configuration file """ file_result = {} for (parser_name, parser_dict, arg_name, arg_value, option_spec) in self._iterate_received_arguments(cli_args): file_result[parser_name] = file_result.get(parser_name, {}) if option_spec and option_spec.get('action', '') == 'read-config': # we have config option. saving it. self._convert_non_cli_args(parser_name, parser_dict[arg_name]) utils.dict_merge(file_result[parser_name], parser_dict[arg_name]) # remove from cli args parser_dict.pop(arg_name) return file_result
def validate_requires_args(self, args): """ Check if all the required arguments have been provided. """ silent_args = self.get_silent_args(args) def validate_parser(parser_name, expected_options, parser_args): result = collections.defaultdict(list) condition_req_args = self._get_conditionally_required_args( parser_name, expected_options, args) required_str = 'Required argument.' for option in expected_options: name = option['name'] option_required = option.get('required', False) name_arg = parser_args.get(name) # check required options. if ( (option_required and not name_arg) or (isinstance(name_arg, str) and name_arg.startswith(required_str)) or (name in condition_req_args) ) and (name not in silent_args): result[parser_name].append(name) return result res = {} for command_data in self.spec_helper.iterate_parsers(): cmd_name = command_data['name'] if cmd_name in args: utils.dict_merge( res, validate_parser( cmd_name, self.spec_helper.get_parser_option_specs(cmd_name), args[cmd_name])) missing_args = {cmd_name: args for cmd_name, args in res.items() if len(args) > 0} if missing_args: raise exceptions.IRRequiredArgsMissingException(missing_args)
def test_dict_merge(): from cli.utils import dict_merge first_dict = { 'a': 1, 'b': 2, 'c': { 'd': 'foo1', 'e': 'bar', 'list1': ['a', 'b', 'c'] }, 'list2': [1, 2, 3] } second_dict = { 'a': 2, 'c': { 'd': 'foo2', 'f': 5, 'list1': [3, 4, 5] }, 'g': 'bla', 5: 'yy', 'list3': ['a', 2] } expected_result = { 'a': 2, 'b': 2, 'c': { 'd': 'foo2', 'e': 'bar', 'f': 5, 'list1': [3, 4, 5] }, 'g': 'bla', 5: 'yy', 'list2': [1, 2, 3], 'list3': ['a', 2] } dict_merge(first_dict, second_dict) assert not cmp(first_dict, expected_result)
def main(): args = get_args(ENTRY_POINT) settings_files = [] set_logger_verbosity(args.verbose) for input_file in args['input'] or []: settings_files.append(utils.normalize_file(input_file)) settings_files.append(os.path.join(get_settings_dir(ENTRY_POINT, args), args["command0"] + '.yml')) # todo(yfried): ospd specific settings_dict = set_product_repo(args) utils.dict_merge(settings_dict, set_network_details(args)) utils.dict_merge(settings_dict, set_image(args)) utils.dict_merge(settings_dict, set_storage(args)) net_template = yaml.load( open(set_network_template(args["network-template"], os.path.join( get_settings_dir(ENTRY_POINT, args), "network", "templates")))) settings_dict["installer"]["overcloud"]["network"]["template"] = \ net_template storage_template = yaml.load( open(set_network_template( settings_dict["installer"]["overcloud"]["storage"]["template"], os.path.join(get_settings_dir(ENTRY_POINT, args), "storage", "templates")))) settings_dict["installer"]["overcloud"]["storage"]["template"] = \ storage_template LOG.debug("All settings files to be loaded:\n%s" % settings_files) cli.yamls.Lookup.settings = utils.generate_settings(settings_files) utils.merge_extra_vars(cli.yamls.Lookup.settings, args['extra-vars']) # todo(yfried): ospd specific cli.yamls.Lookup.settings = cli.yamls.Lookup.settings.merge(settings_dict) cli.yamls.Lookup.in_string_lookup() LOG.debug("Dumping settings...") output = yaml.safe_dump(cli.yamls.Lookup.settings, default_flow_style=False) if args['output']: with open(args['output'], 'w') as output_file: output_file.write(output) else: print output # playbook execution stage if not args['dry-run']: vars(args)['settings'] = yaml.load(yaml.safe_dump( cli.yamls.Lookup.settings, default_flow_style=False)) vars(args)['install'] = True cli.execute.ansible_wrapper(args)
def process(self, spec_dict): """ Goes through all the spec options and modifies them by removing some options parameters (like defaults) and adding additional help info. :param spec_dict: the dictionary with key obtained from spec files :return the list of """ # Collect sub parsers options options = {} for sub_parser, params in spec_dict.get('subparsers', {}).iteritems(): parser_options = self._process_options(params, sub_parser) # go over the groups if present group_options = {} for group in params.get('groups', {}): group_options.update(self._process_options(group, sub_parser)) utils.dict_merge(parser_options, group_options) options[sub_parser] = parser_options return options
def override_default_values(clg_args, sub_parser_options): """ Collects arguments values from the different sources and resolve values. Each argument value is resolved in the following priority: 1. Explicitly provided from cmd-line 2. Environment variable 3. Provided configuration file. 4. Spec defaults :param clg_args: Dictionary based on cmd-line args parsed by clg :param sub_parser_options: the sub-parser spec options """ # Get the sub-parser's default values # todo(yfried): move to init_missing_args defaults = { option: attributes['default'] for option, attributes in sub_parser_options.iteritems() if 'default' in attributes } # todo(yfried): move this outside # Generate config file if required if clg_args.get('generate-conf-file'): _generate_config_file(file_name=clg_args['generate-conf-file'], subcommand=clg_args['command0'], defaults=defaults) else: # Override defaults with the ini file args if provided file_args = getattr(clg_args.get('from-file'), "value", {}).get(clg_args['command0'], {}) utils.dict_merge(defaults, file_args) # Resolve defaults and load values to clg_args for arg_name, arg_obj in clg_args.iteritems(): if isinstance(arg_obj, ValueArgument): arg_obj.resolve_value(arg_name, defaults) _check_required_arguments(clg_args, sub_parser_options)
def override_default_values(clg_args, sub_parser_options): """ Collects arguments values from the different sources and resolve values. Each argument value is resolved in the following priority: 1. Explicitly provided from cmd-line 2. Environment variable 3. Provided configuration file. 4. Spec defaults :param clg_args: Dictionary based on cmd-line args parsed by clg :param sub_parser_options: the sub-parser spec options """ # Get the sub-parser's default values # todo(yfried): move to init_missing_args defaults = {option: attributes['default'] for option, attributes in sub_parser_options.iteritems() if 'default' in attributes} # todo(yfried): move this outside # Generate config file if required if clg_args.get('generate-conf-file'): _generate_config_file( file_name=clg_args['generate-conf-file'], subcommand=clg_args['command0'], defaults=defaults) else: # Override defaults with the ini file args if provided file_args = getattr(clg_args.get('from-file'), "value", {}).get( clg_args['command0'], {}) utils.dict_merge(defaults, file_args) # Resolve defaults and load values to clg_args for arg_name, arg_obj in clg_args.iteritems(): if isinstance(arg_obj, ValueArgument): arg_obj.resolve_value(arg_name, defaults) _check_required_arguments(clg_args, sub_parser_options)
def parse_args(cls, module_name, config, args=None): """ Looks for all the specs for specified module and parses the commandline input arguments accordingly. :param module_name: the module name: installer|provisioner|tester """ cmd = clg.CommandLine(cls._get_specs(module_name, config)) res_args = vars(cmd.parse(args)) # always load default values for command0 default_file = os.path.join( config.get('defaults', 'settings'), module_name, res_args['command0'], DEFAULT_INI ) defaults = IniFileType(default_file)[res_args['command0']] # override defaults with env variables for arg_name, arg_value in res_args.iteritems(): upper_arg_name = arg_name.upper() if arg_value is None and upper_arg_name in os.environ: defaults[arg_name] = os.getenv(upper_arg_name) # override defaults with the ini file args if 'from-file' in res_args: file_args = res_args['from-file'] if file_args is not None and res_args['command0'] in file_args: defaults = utils.dict_merge( file_args[res_args['command0']], defaults) # replace defaults with cli utils.dict_merge(res_args, defaults) return res_args
def parse_args(self): """ Parses all the arguments (cli, file, env) and returns two dicts: * command arguments dict (arguments to control the IR logic) * nested arguments dict (arguments to pass to the playbooks) """ spec_defaults = self.get_spec_defaults() env_defaults = self.get_env_defaults() cli_args, unknown_args = CliParser.parse_args(self) file_args = self.get_config_file_args(cli_args) # generate config file and exit if self.generate_config_file(cli_args, spec_defaults): LOG.warning("Config file has been generated. Exiting.") return None # print warnings when something was overridden from non-cli source. self.validate_arg_sources( cli_args, env_defaults, file_args, spec_defaults) # todo(obaranov) Pass all the unknown arguments to the ansible # For now just raise exception if unknown_args: raise exceptions.IRUnrecognizedOptionsException(unknown_args) # merge defaults into one utils.dict_merge(spec_defaults, env_defaults) # now filter defaults to have only parser defined in cli defaults = {key: spec_defaults[key] for key in cli_args.keys() if key in spec_defaults} # copy cli args with the same name to all parser groups self._merge_duplicated_cli_args(cli_args) self._merge_duplicated_cli_args(file_args) utils.dict_merge(defaults, file_args) utils.dict_merge(defaults, cli_args) self.validate_requires_args(defaults) # now resolve complex types. self.resolve_custom_types(defaults) nested, control = self.get_nested_and_control_args(defaults) return nested, control, unknown_args
def parse_args(self): """ Parses all the arguments (cli, file, env) and returns two dicts: * command arguments dict (arguments to control the IR logic) * nested arguments dict (arguments to pass to the playbooks) """ spec_defaults = self.get_spec_defaults() env_defaults = self.get_env_defaults() cli_args, unknown_args = CliParser.parse_args(self) file_args = self.get_config_file_args(cli_args) # generate config file and exit if self.generate_config_file(cli_args, spec_defaults): LOG.warning("Config file has been generated. Exiting.") return None # print warnings when something was overridden from non-cli source. self.validate_arg_sources(cli_args, env_defaults, file_args, spec_defaults) # todo(obaranov) Pass all the unknown arguments to the ansible # For now just raise exception if unknown_args: raise exceptions.IRUnrecognizedOptionsException(unknown_args) # merge defaults into one utils.dict_merge(spec_defaults, env_defaults) # now filter defaults to have only parser defined in cli defaults = { key: spec_defaults[key] for key in cli_args.keys() if key in spec_defaults } # copy cli args with the same name to all parser groups self._merge_duplicated_cli_args(cli_args) self._merge_duplicated_cli_args(file_args) utils.dict_merge(defaults, file_args) utils.dict_merge(defaults, cli_args) self.validate_requires_args(defaults) # now resolve complex types. self.resolve_custom_types(defaults) nested, control = self.get_nested_and_control_args(defaults) return nested, control, unknown_args
def test_dict_merge_none_resolver(first, second, expected): from cli.utils import dict_merge, ConflictResolver dict_merge(first, second, conflict_resolver=ConflictResolver.none_resolver) assert first == expected
def override_default_values(app_settings_dir, clg_args, sub_parser_options): """ Collects arguments values from the different sources and resolve values. Each argument value is resolved in the following priority: 1. Explicitly provided from cmd-line 2. Environment variable 3. Provided configuration file. 4. Spec defaults :param app_settings_dir: the application settings dir. :param clg_args: Dictionary based on cmd-line args parsed by clg :param sub_parser_options: the sub-parser spec options """ # Get the sub-parser's default values # todo(yfried): move to init_missing_args defaults = {option: attributes['default'] for option, attributes in sub_parser_options.iteritems() if 'default' in attributes} # todo(yfried): move this outside # Generate config file if required if clg_args.get('generate-conf-file'): _generate_config_file( file_name=clg_args['generate-conf-file'], app_settings_dir=app_settings_dir, subcommand=clg_args['command0'], defaults=defaults, all_options=sub_parser_options) else: # Override defaults with the ini file args if provided file_args = getattr(clg_args.get('from-file'), "value", {}).get( clg_args['command0'], {}) utils.dict_merge(defaults, file_args) # Override defaults with env values # TODO (aopincar): IR env vars should be more uniques env_vars = {} for arg_name, arg_obj in clg_args.iteritems(): arg_value = os.getenv(arg_name.upper().replace("-", "_")) if arg_value: env_vars[arg_name] = arg_value utils.dict_merge(defaults, env_vars) # Resolve defaults and load values to clg_args non_cli_args = [] for arg_name, arg_obj in clg_args.iteritems(): if isinstance(arg_obj, ValueArgument): # check what values were not provided in cli if arg_obj.value is None: non_cli_args.append(arg_name) arg_obj.resolve_value(arg_name, defaults) # Now when we have all the values check what default values we have # and show warning to inform user that we took something from # defaults default_args = set(non_cli_args).difference( file_args.keys()).difference(env_vars.keys()) for arg_name in default_args: if arg_name in defaults: LOG.warning( "Argument '{}' was not supplied. " "Using: '{}' as default.".format( arg_name, defaults[arg_name])) _check_required_arguments(clg_args, sub_parser_options)
def main(): args = get_args(ENTRY_POINT) settings_files = [] set_logger_verbosity(args.verbose) for input_file in args['input'] or []: settings_files.append(utils.normalize_file(input_file)) settings_files.append( os.path.join(get_settings_dir(ENTRY_POINT, args), args["command0"] + '.yml')) # todo(yfried): ospd specific settings_dict = set_product_repo(args) utils.dict_merge(settings_dict, set_network_details(args)) utils.dict_merge(settings_dict, set_image(args)) utils.dict_merge(settings_dict, set_storage(args)) net_template = yaml.load( open( set_network_template( args["network-template"], os.path.join(get_settings_dir(ENTRY_POINT, args), "network", "templates")))) settings_dict["installer"]["overcloud"]["network"]["template"] = \ net_template storage_template = yaml.load( open( set_network_template( settings_dict["installer"]["overcloud"]["storage"]["template"], os.path.join(get_settings_dir(ENTRY_POINT, args), "storage", "templates")))) settings_dict["installer"]["overcloud"]["storage"]["template"] = \ storage_template LOG.debug("All settings files to be loaded:\n%s" % settings_files) cli.yamls.Lookup.settings = utils.generate_settings(settings_files) utils.merge_extra_vars(cli.yamls.Lookup.settings, args['extra-vars']) # todo(yfried): ospd specific cli.yamls.Lookup.settings = cli.yamls.Lookup.settings.merge(settings_dict) cli.yamls.Lookup.in_string_lookup() LOG.debug("Dumping settings...") output = yaml.safe_dump(cli.yamls.Lookup.settings, default_flow_style=False) if args['output']: with open(args['output'], 'w') as output_file: output_file.write(output) else: print output # playbook execution stage if not args['dry-run']: vars(args)['settings'] = yaml.load( yaml.safe_dump(cli.yamls.Lookup.settings, default_flow_style=False)) vars(args)['install'] = True cli.execute.ansible_wrapper(args)
def test_dict_merge_none_resolver(first, second, expected): from cli.utils import dict_merge, ConflictResolver dict_merge(first, second, conflict_resolver=ConflictResolver.none_resolver) assert not cmp(first, expected)