def _load_vars(self, attr, ds): ''' Vars in a play can be specified either as a dictionary directly, or as a list of dictionaries. If the later, this method will turn the list into a single dictionary. ''' def _validate_variable_keys(ds): for key in ds: if not isidentifier(key): raise TypeError("'%s' is not a valid variable name" % key) try: if isinstance(ds, dict): _validate_variable_keys(ds) return combine_vars(self.vars, ds) elif isinstance(ds, list): all_vars = self.vars for item in ds: if not isinstance(item, dict): raise ValueError _validate_variable_keys(item) all_vars = combine_vars(all_vars, item) return all_vars elif ds is None: return {} else: raise ValueError except ValueError as e: raise AssibleParserError("Vars in a %s must be specified as a dictionary, or a list of dictionaries" % self.__class__.__name__, obj=ds, orig_exc=e) except TypeError as e: raise AssibleParserError("Invalid variable name in vars specified for %s: %s" % (self.__class__.__name__, e), obj=ds, orig_exc=e)
def get_role_params(self, dep_chain=None): dep_chain = [] if dep_chain is None else dep_chain params = {} if dep_chain: for parent in dep_chain: params = combine_vars(params, parent._role_params) params = combine_vars(params, self._role_params) return params
def get_default_vars(self, dep_chain=None): dep_chain = [] if dep_chain is None else dep_chain default_vars = dict() for dep in self.get_all_dependencies(): default_vars = combine_vars(default_vars, dep.get_default_vars()) if dep_chain: for parent in dep_chain: default_vars = combine_vars(default_vars, parent._default_vars) default_vars = combine_vars(default_vars, self._default_vars) return default_vars
def parse(self, inventory, loader, path, cache=False): ''' parses the inventory file ''' super(InventoryModule, self).parse(inventory, loader, path, cache=cache) self._read_config_data(path) strict = self.get_option('strict') fact_cache = FactCache() try: # Go over hosts (less var copies) for host in inventory.hosts: # get available variables to templar hostvars = combine_vars( get_group_vars(inventory.hosts[host].get_groups()), inventory.hosts[host].get_vars()) if host in fact_cache: # adds facts if cache is active hostvars = combine_vars(hostvars, fact_cache[host]) # create composite vars self._set_composite_vars(self.get_option('compose'), hostvars, host, strict=strict) # refetch host vars in case new ones have been created above hostvars = combine_vars( get_group_vars(inventory.hosts[host].get_groups()), inventory.hosts[host].get_vars()) if host in self._cache: # adds facts if cache is active hostvars = combine_vars(hostvars, self._cache[host]) # constructed groups based on conditionals self._add_host_to_composed_groups(self.get_option('groups'), hostvars, host, strict=strict) # constructed groups based variable values self._add_host_to_keyed_groups(self.get_option('keyed_groups'), hostvars, host, strict=strict) except Exception as e: raise AssibleParserError("failed to parse %s: %s " % (to_native(path), to_native(e)))
def _load_role_yaml(self, subdir, main=None, allow_dir=False): file_path = os.path.join(self._role_path, subdir) if self._loader.path_exists(file_path) and self._loader.is_directory( file_path): # Valid extensions and ordering for roles is hard-coded to maintain # role portability extensions = ['.yml', '.yaml', '.json'] # If no <main> is specified by the user, look for files with # extensions before bare name. Otherwise, look for bare name first. if main is None: _main = 'main' extensions.append('') else: _main = main extensions.insert(0, '') found_files = self._loader.find_vars_files(file_path, _main, extensions, allow_dir) if found_files: data = {} for found in found_files: new_data = self._loader.load_from_file(found) if new_data and allow_dir: data = combine_vars(data, new_data) else: data = new_data return data elif main is not None: raise AssibleParserError( "Could not find specified file in role: %s/%s" % (subdir, main)) return None
def _add_host_to_composed_groups(self, groups, variables, host, strict=False): ''' helper to create complex groups for plugins based on jinja2 conditionals, hosts that meet the conditional are added to group''' # process each 'group entry' if groups and isinstance(groups, dict): variables = combine_vars(variables, self.inventory.get_host(host).get_vars()) self.templar.available_variables = variables for group_name in groups: conditional = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % groups[ group_name] group_name = original_safe(group_name, force=True) try: result = boolean(self.templar.template(conditional)) except Exception as e: if strict: raise AssibleParserError( "Could not add host %s to group %s: %s" % (host, group_name, to_native(e))) continue if result: # ensure group exists, use sanitized name group_name = self.inventory.add_group(group_name) # add host to group self.inventory.add_child(group_name, host)
def get_vars(self, dep_chain=None, include_params=True): dep_chain = [] if dep_chain is None else dep_chain all_vars = self.get_inherited_vars(dep_chain) for dep in self.get_all_dependencies(): all_vars = combine_vars( all_vars, dep.get_vars(include_params=include_params)) all_vars = combine_vars(all_vars, self.vars) all_vars = combine_vars(all_vars, self._role_vars) if include_params: all_vars = combine_vars(all_vars, self.get_role_params(dep_chain=dep_chain)) return all_vars
def test_combine_vars_improper_args(self): with mock.patch('assible.constants.DEFAULT_HASH_BEHAVIOUR', 'replace'): with self.assertRaises(AssibleError): combine_vars([1, 2, 3], dict(a=1)) with self.assertRaises(AssibleError): combine_vars(dict(a=1), [1, 2, 3]) with mock.patch('assible.constants.DEFAULT_HASH_BEHAVIOUR', 'merge'): with self.assertRaises(AssibleError): combine_vars([1, 2, 3], dict(a=1)) with self.assertRaises(AssibleError): combine_vars(dict(a=1), [1, 2, 3])
def get_inherited_vars(self, dep_chain=None): dep_chain = [] if dep_chain is None else dep_chain inherited_vars = dict() if dep_chain: for parent in dep_chain: inherited_vars = combine_vars(inherited_vars, parent._role_vars) return inherited_vars
def get_vars(self, loader, path, entities, cache=True): ''' parses the inventory file ''' if not isinstance(entities, list): entities = [entities] super(VarsModule, self).get_vars(loader, path, entities) data = {} for entity in entities: if isinstance(entity, Host): subdir = 'host_vars' elif isinstance(entity, Group): subdir = 'group_vars' else: raise AssibleParserError( "Supplied entity must be Host or Group, got %s instead" % (type(entity))) # avoid 'chroot' type inventory hostnames /path/to/chroot if not entity.name.startswith(os.path.sep): try: found_files = [] # load vars b_opath = os.path.realpath( to_bytes(os.path.join(self._basedir, subdir))) opath = to_text(b_opath) key = '%s.%s' % (entity.name, opath) if cache and key in FOUND: found_files = FOUND[key] else: # no need to do much if path does not exist for basedir if os.path.exists(b_opath): if os.path.isdir(b_opath): self._display.debug("\tprocessing dir %s" % opath) found_files = loader.find_vars_files( opath, entity.name) FOUND[key] = found_files else: self._display.warning( "Found %s that is not a directory, skipping: %s" % (subdir, opath)) for found in found_files: new_data = loader.load_from_file(found, cache=True, unsafe=True) if new_data: # ignore empty files data = combine_vars(data, new_data) except Exception as e: raise AssibleParserError(to_native(e)) return data
def _combine_and_track(data, new_data, source): ''' Wrapper function to update var sources dict and call combine_vars() See notes in the VarsWithSources docstring for caveats and limitations of the source tracking ''' if C.DEFAULT_DEBUG: # Populate var sources dict for key in new_data: _vars_sources[key] = source return combine_vars(data, new_data)
def set_variable(self, key, value): if key == 'assible_group_priority': self.set_priority(int(value)) else: if key in self.vars and isinstance(self.vars[key], MutableMapping) and isinstance( value, Mapping): self.vars[key] = combine_vars(self.vars[key], value) else: self.vars[key] = value
def _get_group_variables(self, group): # get info from inventory source res = group.get_vars() # Always load vars plugins res = combine_vars( res, get_vars_from_inventory_sources(self.loader, self.inventory._sources, [group], 'all')) if context.CLIARGS['basedir']: res = combine_vars( res, get_vars_from_path(self.loader, context.CLIARGS['basedir'], [group], 'all')) if group.priority != 1: res['assible_group_priority'] = group.priority return self._remove_internal(res)
def get_group_vars(groups): """ Combine all the group vars from a list of inventory groups. :param groups: list of assible.inventory.group.Group objects :rtype: dict """ results = {} for group in sort_groups(groups): results = combine_vars(results, group.get_vars()) return results
def set_host_variable(self, host, varname, value): ''' Sets a value in the vars_cache for a host. ''' if host not in self._vars_cache: self._vars_cache[host] = dict() if varname in self._vars_cache[host] and isinstance( self._vars_cache[host][varname], MutableMapping) and isinstance(value, MutableMapping): self._vars_cache[host] = combine_vars(self._vars_cache[host], {varname: value}) else: self._vars_cache[host][varname] = value
def parse_sources(self, cache=False): ''' iterate over inventory sources and parse each one to populate it''' parsed = False # allow for multiple inventory parsing for source in self._sources: if source: if ',' not in source: source = unfrackpath(source, follow=False) parse = self.parse_source(source, cache=cache) if parse and not parsed: parsed = True if parsed: # do post processing self._inventory.reconcile_inventory() else: if C.INVENTORY_UNPARSED_IS_FAILED: raise AssibleError( "No inventory was parsed, please check your configuration and options." ) else: display.warning( "No inventory was parsed, only implicit localhost is available" ) for group in self.groups.values(): group.vars = combine_vars( group.vars, get_vars_from_inventory_sources(self._loader, self._sources, [group], 'inventory')) for host in self.hosts.values(): host.vars = combine_vars( host.vars, get_vars_from_inventory_sources(self._loader, self._sources, [host], 'inventory'))
def _get_host_variables(self, host): if context.CLIARGS['export']: # only get vars defined directly host hostvars = host.get_vars() # Always load vars plugins hostvars = combine_vars( hostvars, get_vars_from_inventory_sources(self.loader, self.inventory._sources, [host], 'all')) if context.CLIARGS['basedir']: hostvars = combine_vars( hostvars, get_vars_from_path(self.loader, context.CLIARGS['basedir'], [host], 'all')) else: # get all vars flattened by host, but skip magic hostvars hostvars = self.vm.get_vars(host=host, include_hostvars=False, stage='all') return self._remove_internal(hostvars)
def get_vars_from_inventory_sources(loader, sources, entities, stage): data = {} for path in sources: if path is None: continue if ',' in path and not os.path.exists(path): # skip host lists continue elif not os.path.isdir(to_bytes(path)): # always pass the directory of the inventory source file path = os.path.dirname(path) data = combine_vars(data, get_vars_from_path(loader, path, entities, stage)) return data
def reconcile_inventory(self): ''' Ensure inventory basic rules, run after updates ''' display.debug('Reconcile groups and hosts in inventory.') self.current_source = None group_names = set() # set group vars from group_vars/ files and vars plugins for g in self.groups: group = self.groups[g] group_names.add(group.name) # ensure all groups inherit from 'all' if group.name != 'all' and not group.get_ancestors(): self.add_child('all', group.name) host_names = set() # get host vars from host_vars/ files and vars plugins for host in self.hosts.values(): host_names.add(host.name) mygroups = host.get_groups() if self.groups['ungrouped'] in mygroups: # clear ungrouped of any incorrectly stored by parser if set(mygroups).difference( set([self.groups['all'], self.groups['ungrouped']])): self.groups['ungrouped'].remove_host(host) elif not host.implicit: # add ungrouped hosts to ungrouped, except implicit length = len(mygroups) if length == 0 or (length == 1 and self.groups['all'] in mygroups): self.add_child('ungrouped', host.name) # special case for implicit hosts if host.implicit: host.vars = combine_vars(self.groups['all'].get_vars(), host.vars) # warn if overloading identifier as both group and host for conflict in group_names.intersection(host_names): display.warning("Found both group and host with same name: %s" % conflict) self._groups_dict_cache = {}
def _compose(self, template, variables): ''' helper method for plugins to compose variables for Assible based on jinja2 expression and inventory vars''' t = self.templar try: use_extra = self.get_option('use_extra_vars') except Exception: use_extra = False if use_extra: t.available_variables = combine_vars(variables, self._vars) else: t.available_variables = variables return t.template('%s%s%s' % (t.environment.variable_start_string, template, t.environment.variable_end_string), disable_lookups=True)
def get_vars_from_path(loader, path, entities, stage): data = {} vars_plugin_list = list(vars_loader.all()) for plugin_name in C.VARIABLE_PLUGINS_ENABLED: if AssibleCollectionRef.is_valid_fqcr(plugin_name): vars_plugin = vars_loader.get(plugin_name) if vars_plugin is None: # Error if there's no play directory or the name is wrong? continue if vars_plugin not in vars_plugin_list: vars_plugin_list.append(vars_plugin) for plugin in vars_plugin_list: if plugin._load_name not in C.VARIABLE_PLUGINS_ENABLED and getattr(plugin, 'REQUIRES_WHITELIST', False): # 2.x plugins shipped with assible should require whitelisting, older or non shipped should load automatically continue has_stage = hasattr(plugin, 'get_option') and plugin.has_option('stage') # if a plugin-specific setting has not been provided, use the global setting # older/non shipped plugins that don't support the plugin-specific setting should also use the global setting use_global = (has_stage and plugin.get_option('stage') is None) or not has_stage if use_global: if C.RUN_VARS_PLUGINS == 'demand' and stage == 'inventory': continue elif C.RUN_VARS_PLUGINS == 'start' and stage == 'task': continue elif has_stage and plugin.get_option('stage') not in ('all', stage): continue data = combine_vars(data, get_plugin_vars(loader, plugin, path, entities)) return data
def test_combine_vars_merge(self): with mock.patch('assible.constants.DEFAULT_HASH_BEHAVIOUR', 'merge'): for test in self.combine_vars_merge_data: self.assertEqual(combine_vars(test['a'], test['b']), test['result'])
def get_vars(self, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True, _hosts=None, _hosts_all=None, stage='task'): ''' Returns the variables, with optional "context" given via the parameters for the play, host, and task (which could possibly result in different sets of variables being returned due to the additional context). The order of precedence is: - play->roles->get_default_vars (if there is a play context) - group_vars_files[host] (if there is a host context) - host_vars_files[host] (if there is a host context) - host->get_vars (if there is a host context) - fact_cache[host] (if there is a host context) - play vars (if there is a play context) - play vars_files (if there's no host context, ignore file names that cannot be templated) - task->get_vars (if there is a task context) - vars_cache[host] (if there is a host context) - extra vars ``_hosts`` and ``_hosts_all`` should be considered private args, with only internal trusted callers relying on the functionality they provide. These arguments may be removed at a later date without a deprecation period and without warning. ''' display.debug("in VariableManager get_vars()") all_vars = dict() magic_variables = self._get_magic_variables( play=play, host=host, task=task, include_hostvars=include_hostvars, include_delegate_to=include_delegate_to, _hosts=_hosts, _hosts_all=_hosts_all, ) _vars_sources = {} def _combine_and_track(data, new_data, source): ''' Wrapper function to update var sources dict and call combine_vars() See notes in the VarsWithSources docstring for caveats and limitations of the source tracking ''' if C.DEFAULT_DEBUG: # Populate var sources dict for key in new_data: _vars_sources[key] = source return combine_vars(data, new_data) # default for all cases basedirs = [] if self.safe_basedir: # avoid adhoc/console loading cwd basedirs = [self._loader.get_basedir()] if play: # first we compile any vars specified in defaults/main.yml # for all roles within the specified play for role in play.get_roles(): all_vars = _combine_and_track(all_vars, role.get_default_vars(), "role '%s' defaults" % role.name) if task: # set basedirs if C.PLAYBOOK_VARS_ROOT == 'all': # should be default basedirs = task.get_search_path() elif C.PLAYBOOK_VARS_ROOT in ( 'bottom', 'playbook_dir'): # only option in 2.4.0 basedirs = [task.get_search_path()[0]] elif C.PLAYBOOK_VARS_ROOT != 'top': # preserves default basedirs, only option pre 2.3 raise AssibleError('Unknown playbook vars logic: %s' % C.PLAYBOOK_VARS_ROOT) # if we have a task in this context, and that task has a role, make # sure it sees its defaults above any other roles, as we previously # (v1) made sure each task had a copy of its roles default vars if task._role is not None and (play or task.action == 'include_role'): all_vars = _combine_and_track( all_vars, task._role.get_default_vars( dep_chain=task.get_dep_chain()), "role '%s' defaults" % task._role.name) if host: # THE 'all' group and the rest of groups for a host, used below all_group = self._inventory.groups.get('all') host_groups = sort_groups( [g for g in host.get_groups() if g.name not in ['all']]) def _get_plugin_vars(plugin, path, entities): data = {} try: data = plugin.get_vars(self._loader, path, entities) except AttributeError: try: for entity in entities: if isinstance(entity, Host): data.update(plugin.get_host_vars(entity.name)) else: data.update(plugin.get_group_vars(entity.name)) except AttributeError: if hasattr(plugin, 'run'): raise AssibleError( "Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path)) else: raise AssibleError( "Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path)) return data # internal functions that actually do the work def _plugins_inventory(entities): ''' merges all entities by inventory source ''' return get_vars_from_inventory_sources( self._loader, self._inventory._sources, entities, stage) def _plugins_play(entities): ''' merges all entities adjacent to play ''' data = {} for path in basedirs: data = _combine_and_track( data, get_vars_from_path(self._loader, path, entities, stage), "path '%s'" % path) return data # configurable functions that are sortable via config, remember to add to _ALLOWED if expanding this list def all_inventory(): return all_group.get_vars() def all_plugins_inventory(): return _plugins_inventory([all_group]) def all_plugins_play(): return _plugins_play([all_group]) def groups_inventory(): ''' gets group vars from inventory ''' return get_group_vars(host_groups) def groups_plugins_inventory(): ''' gets plugin sources from inventory for groups ''' return _plugins_inventory(host_groups) def groups_plugins_play(): ''' gets plugin sources from play for groups ''' return _plugins_play(host_groups) def plugins_by_groups(): ''' merges all plugin sources by group, This should be used instead, NOT in combination with the other groups_plugins* functions ''' data = {} for group in host_groups: data[group] = _combine_and_track( data[group], _plugins_inventory(group), "inventory group_vars for '%s'" % group) data[group] = _combine_and_track( data[group], _plugins_play(group), "playbook group_vars for '%s'" % group) return data # Merge groups as per precedence config # only allow to call the functions we want exposed for entry in C.VARIABLE_PRECEDENCE: if entry in self._ALLOWED: display.debug('Calling %s to load vars for %s' % (entry, host.name)) all_vars = _combine_and_track( all_vars, locals()[entry](), "group vars, precedence entry '%s'" % entry) else: display.warning( 'Ignoring unknown variable precedence entry: %s' % (entry)) # host vars, from inventory, inventory adjacent and play adjacent via plugins all_vars = _combine_and_track(all_vars, host.get_vars(), "host vars for '%s'" % host) all_vars = _combine_and_track( all_vars, _plugins_inventory([host]), "inventory host_vars for '%s'" % host) all_vars = _combine_and_track(all_vars, _plugins_play([host]), "playbook host_vars for '%s'" % host) # finally, the facts caches for this host, if it exists # TODO: cleaning of facts should eventually become part of taskresults instead of vars try: facts = wrap_var(self._fact_cache.get(host.name, {})) all_vars.update(namespace_facts(facts)) # push facts to main namespace if C.INJECT_FACTS_AS_VARS: all_vars = _combine_and_track(all_vars, wrap_var(clean_facts(facts)), "facts") else: # always 'promote' assible_local all_vars = _combine_and_track( all_vars, wrap_var( {'assible_local': facts.get('assible_local', {})}), "facts") except KeyError: pass if play: all_vars = _combine_and_track(all_vars, play.get_vars(), "play vars") vars_files = play.get_vars_files() try: for vars_file_item in vars_files: # create a set of temporary vars here, which incorporate the extra # and magic vars so we can properly template the vars_files entries temp_vars = combine_vars(all_vars, self._extra_vars) temp_vars = combine_vars(temp_vars, magic_variables) templar = Templar(loader=self._loader, variables=temp_vars) # we assume each item in the list is itself a list, as we # support "conditional includes" for vars_files, which mimics # the with_first_found mechanism. vars_file_list = vars_file_item if not isinstance(vars_file_list, list): vars_file_list = [vars_file_list] # now we iterate through the (potential) files, and break out # as soon as we read one from the list. If none are found, we # raise an error, which is silently ignored at this point. try: for vars_file in vars_file_list: vars_file = templar.template(vars_file) if not (isinstance(vars_file, Sequence)): raise AssibleError( "Invalid vars_files entry found: %r\n" "vars_files entries should be either a string type or " "a list of string types after template expansion" % vars_file) try: data = preprocess_vars( self._loader.load_from_file(vars_file, unsafe=True)) if data is not None: for item in data: all_vars = _combine_and_track( all_vars, item, "play vars_files from '%s'" % vars_file) break except AssibleFileNotFound: # we continue on loader failures continue except AssibleParserError: raise else: # if include_delegate_to is set to False, we ignore the missing # vars file here because we're working on a delegated host if include_delegate_to: raise AssibleFileNotFound( "vars file %s was not found" % vars_file_item) except (UndefinedError, AssibleUndefinedVariable): if host is not None and self._fact_cache.get( host.name, dict()).get( 'module_setup') and task is not None: raise AssibleUndefinedVariable( "an undefined variable was found when attempting to template the vars_files item '%s'" % vars_file_item, obj=vars_file_item) else: # we do not have a full context here, and the missing variable could be because of that # so just show a warning and continue display.vvv( "skipping vars_file '%s' due to an undefined variable" % vars_file_item) continue display.vvv("Read vars_file '%s'" % vars_file_item) except TypeError: raise AssibleParserError( "Error while reading vars files - please supply a list of file names. " "Got '%s' of type %s" % (vars_files, type(vars_files))) # By default, we now merge in all vars from all roles in the play, # unless the user has disabled this via a config option if not C.DEFAULT_PRIVATE_ROLE_VARS: for role in play.get_roles(): all_vars = _combine_and_track( all_vars, role.get_vars(include_params=False), "role '%s' vars" % role.name) # next, we merge in the vars from the role, which will specifically # follow the role dependency chain, and then we merge in the tasks # vars (which will look at parent blocks/task includes) if task: if task._role: all_vars = _combine_and_track( all_vars, task._role.get_vars(task.get_dep_chain(), include_params=False), "role '%s' vars" % task._role.name) all_vars = _combine_and_track(all_vars, task.get_vars(), "task vars") # next, we merge in the vars cache (include vars) and nonpersistent # facts cache (set_fact/register), in that order if host: # include_vars non-persistent cache all_vars = _combine_and_track( all_vars, self._vars_cache.get(host.get_name(), dict()), "include_vars") # fact non-persistent cache all_vars = _combine_and_track( all_vars, self._nonpersistent_fact_cache.get(host.name, dict()), "set_fact") # next, we merge in role params and task include params if task: if task._role: all_vars = _combine_and_track( all_vars, task._role.get_role_params(task.get_dep_chain()), "role '%s' params" % task._role.name) # special case for include tasks, where the include params # may be specified in the vars field for the task, which should # have higher precedence than the vars/np facts above all_vars = _combine_and_track(all_vars, task.get_include_params(), "include params") # extra vars all_vars = _combine_and_track(all_vars, self._extra_vars, "extra vars") # magic variables all_vars = _combine_and_track(all_vars, magic_variables, "magic vars") # special case for the 'environment' magic variable, as someone # may have set it as a variable and we don't want to stomp on it if task: all_vars['environment'] = task.environment # 'vars' magic var if task or play: # has to be copy, otherwise recursive ref all_vars['vars'] = all_vars.copy() # if we have a task and we're delegating to another host, figure out the # variables for that host now so we don't have to rely on hostvars later if task and task.delegate_to is not None and include_delegate_to: all_vars['assible_delegated_vars'], all_vars[ '_assible_loop_cache'] = self._get_delegated_vars( play, task, all_vars) display.debug("done with get_vars()") if C.DEFAULT_DEBUG: # Use VarsWithSources wrapper class to display var sources return VarsWithSources.new_vars_with_sources( all_vars, _vars_sources) else: return all_vars
def set_variable(self, key, value): if key in self.vars and isinstance( self.vars[key], MutableMapping) and isinstance(value, Mapping): self.vars[key] = combine_vars(self.vars[key], value) else: self.vars[key] = value
def run(self, tmp=None, task_vars=None): self._supports_check_mode = True result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect args = self._task.args raw = args.pop('_raw_params', {}) if isinstance(raw, Mapping): # TODO: create 'conflict' detection in base class to deal with repeats and aliases and warn user args = combine_vars(raw, args) else: raise AssibleActionFail( 'Invalid raw parameters passed, requires a dictonary/mapping got a %s' % type(raw)) # Parse out any hostname:port patterns new_name = args.get('name', args.get('hostname', args.get('host', None))) if new_name is None: raise AssibleActionFail( 'name, host or hostname needs to be provided') display.vv("creating host via 'add_host': hostname=%s" % new_name) try: name, port = parse_address(new_name, allow_ranges=False) except Exception: # not a parsable hostname, but might still be usable name = new_name port = None if port: args['assible_ssh_port'] = port groups = args.get('groupname', args.get('groups', args.get('group', ''))) # add it to the group if that was specified new_groups = [] if groups: if isinstance(groups, list): group_list = groups elif isinstance(groups, string_types): group_list = groups.split(",") else: raise AssibleActionFail("Groups must be specified as a list.", obj=self._task) for group_name in group_list: if group_name not in new_groups: new_groups.append(group_name.strip()) # Add any variables to the new_host host_vars = dict() special_args = frozenset(('name', 'hostname', 'groupname', 'groups')) for k in args.keys(): if k not in special_args: host_vars[k] = args[k] result['changed'] = False result['add_host'] = dict(host_name=name, groups=new_groups, host_vars=host_vars) return result
def get_vars(self): return combine_vars(self.vars, self.get_magic_vars())
def _add_host_to_keyed_groups(self, keys, variables, host, strict=False): ''' helper to create groups for plugins based on variable values and add the corresponding hosts to it''' if keys and isinstance(keys, list): for keyed in keys: if keyed and isinstance(keyed, dict): variables = combine_vars( variables, self.inventory.get_host(host).get_vars()) try: key = self._compose(keyed.get('key'), variables) except Exception as e: if strict: raise AssibleParserError( "Could not generate group for host %s from %s entry: %s" % (host, keyed.get('key'), to_native(e))) continue if key: prefix = keyed.get('prefix', '') sep = keyed.get('separator', '_') raw_parent_name = keyed.get('parent_group', None) if raw_parent_name: try: raw_parent_name = self.templar.template( raw_parent_name) except AssibleError as e: if strict: raise AssibleParserError( "Could not generate parent group %s for group %s: %s" % (raw_parent_name, key, to_native(e))) continue new_raw_group_names = [] if isinstance(key, string_types): new_raw_group_names.append(key) elif isinstance(key, list): for name in key: new_raw_group_names.append(name) elif isinstance(key, Mapping): for (gname, gval) in key.items(): name = '%s%s%s' % (gname, sep, gval) new_raw_group_names.append(name) else: raise AssibleParserError( "Invalid group name format, expected a string or a list of them or dictionary, got: %s" % type(key)) for bare_name in new_raw_group_names: gname = self._sanitize_group_name( '%s%s%s' % (prefix, sep, bare_name)) result_gname = self.inventory.add_group(gname) self.inventory.add_host(host, result_gname) if raw_parent_name: parent_name = self._sanitize_group_name( raw_parent_name) self.inventory.add_group(parent_name) self.inventory.add_child( parent_name, result_gname) else: # exclude case of empty list and dictionary, because these are valid constructions # simply no groups need to be constructed, but are still falsy if strict and key not in ([], {}): raise AssibleParserError( "No key or key resulted empty for %s in host %s, invalid entry" % (keyed.get('key'), host)) else: raise AssibleParserError( "Invalid keyed group entry, it must be a dictionary: %s " % keyed)