def _get_magic_variables(self, loader, play, host, task, include_hostvars, include_delegate_to): ''' Returns a dictionary of so-called "magic" variables in Ansible, which are special variables we set internally for use. ''' variables = dict() variables['playbook_dir'] = loader.get_basedir() if host: variables['group_names'] = [ group.name for group in host.get_groups() if group.name != 'all' ] if self._inventory is not None: variables['groups'] = dict() for (group_name, group) in iteritems(self._inventory.groups): variables['groups'][group_name] = [ h.name for h in group.get_hosts() ] if include_hostvars: hostvars_cache_entry = self._get_cache_entry(play=play) if hostvars_cache_entry in HOSTVARS_CACHE: hostvars = HOSTVARS_CACHE[hostvars_cache_entry] else: hostvars = HostVars(play=play, inventory=self._inventory, loader=loader, variable_manager=self) HOSTVARS_CACHE[hostvars_cache_entry] = hostvars variables['hostvars'] = hostvars variables['vars'] = hostvars[host.get_name()] if play: variables['role_names'] = [r._role_name for r in play.roles] if task: if task._role: variables['role_path'] = task._role._role_path if self._inventory is not None: variables['inventory_dir'] = self._inventory.basedir() variables['inventory_file'] = self._inventory.src() if play: # add the list of hosts in the play, as adjusted for limit/filters # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts, # however this would take work in the templating engine, so for now # we'll add both so we can give users something transitional to use host_list = [x.name for x in self._inventory.get_hosts()] variables['play_hosts'] = host_list variables['ansible_play_hosts'] = host_list # the 'omit' value alows params to be left out if the variable they are based on is undefined variables['omit'] = self._omit_token variables['ansible_version'] = CLI.version_info(gitinfo=False) return variables
def test(): try: loader = DataLoader() ds = loader.load(get_values()) inventory = InventoryManager(loader=loader) if ds is not None: for key, value in ds.items(): inventory.groups['all'].set_variable(key, value) variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) templar = Templar(loader=loader) templar.available_variables = variable_manager.get_vars(host=Host( name='all')) try: rendered = templar.template(get_template(), convert_data=False, cache=False) except Exception as e: rendered = "Template rendering failed: {0}".format(e) except Exception as e: rendered = "Template syntax error: {0}".format(e) result = {"result": str(rendered)} return json.dumps(result)
def _get_magic_variables(self, loader, play, host, task, include_hostvars, include_delegate_to): ''' Returns a dictionary of so-called "magic" variables in Ansible, which are special variables we set internally for use. ''' variables = dict() variables['playbook_dir'] = loader.get_basedir() variables['ansible_playbook_python'] = sys.executable if host: # host already provides some magic vars via host.get_vars() if self._inventory: variables['groups'] = self._inventory.get_group_dict() if play: variables['role_names'] = [r._role_name for r in play.roles] if task: if task._role: variables['role_name'] = task._role.get_name() variables['role_path'] = task._role._role_path variables['role_uuid'] = text_type(task._role._uuid) if self._inventory is not None: variables['inventory_dir'] = self._inventory.basedir() variables['inventory_file'] = self._inventory.src() if play: # add the list of hosts in the play, as adjusted for limit/filters variables['ansible_play_hosts_all'] = [ x.name for x in self._inventory.get_hosts( pattern=play.hosts or 'all', ignore_restrictions=True) ] variables['ansible_play_hosts'] = [ x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts ] variables['ansible_play_batch'] = [ x.name for x in self._inventory.get_hosts() if x.name not in play._removed_hosts ] #DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch, # however this would take work in the templating engine, so for now we'll add both variables['play_hosts'] = variables['ansible_play_batch'] # the 'omit' value alows params to be left out if the variable they are based on is undefined variables['omit'] = self._omit_token variables['ansible_version'] = CLI.version_info(gitinfo=False) # Set options vars for option, option_value in iteritems(self._options_vars): variables[option] = option_value if self._hostvars is not None and include_hostvars: variables['hostvars'] = self._hostvars return variables
def install_dns_playbook(): content = request.get_json(force=True) tagsexc = content['tagsexc'] ipmanage = content['ipmanage'] passwd = content['passwd'] user = content['user'] logging.info('runnig ansible-playbook install dns ' + tagsexc + ' ' + ipmanage) file = open('app/ansible/hosts', 'w') file.write('[dnsservers]\n') file.write(ipmanage) file.close() loader = DataLoader() context.CLIARGS = ImmutableDict(tags={tagsexc}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=10, remote_user=user, private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method='sudo', become_user='******', verbosity=True, check=False, start_at_task=None, extra_vars={ 'ansible_ssh_user='******'', 'ansible_ssh_pass='******'', 'ansible_become_pass='******'' }) inventory = InventoryManager(loader=loader, sources=('app/ansible/hosts')) variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) pbex = PlaybookExecutor(playbooks=['app/ansible/webadmindns.yml'], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords={}) results = pbex.run() db.session.commit() return jsonify({'status': results})
def _get_magic_variables(self, loader, play, host, task, include_hostvars, include_delegate_to): ''' Returns a dictionary of so-called "magic" variables in Ansible, which are special variables we set internally for use. ''' variables = dict() variables['playbook_dir'] = loader.get_basedir() if host: variables['group_names'] = sorted([ group.name for group in host.get_groups() if group.name != 'all' ]) if self._inventory: variables['groups'] = self._inventory.get_group_dict() if play: variables['role_names'] = [r._role_name for r in play.roles] if task: if task._role: variables['role_name'] = task._role.get_name() variables['role_path'] = task._role._role_path variables['role_uuid'] = text_type(task._role._uuid) if self._inventory is not None: variables['inventory_dir'] = self._inventory.basedir() variables['inventory_file'] = self._inventory.src() if play: # add the list of hosts in the play, as adjusted for limit/filters # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts, # however this would take work in the templating engine, so for now # we'll add both so we can give users something transitional to use host_list = [x.name for x in self._inventory.get_hosts()] variables['play_hosts'] = host_list variables['ansible_play_hosts'] = host_list # the 'omit' value alows params to be left out if the variable they are based on is undefined variables['omit'] = self._omit_token variables['ansible_version'] = CLI.version_info(gitinfo=False) # Set options vars for option, option_value in iteritems(self._options_vars): variables[option] = option_value if self._hostvars is not None and include_hostvars: variables['hostvars'] = self._hostvars return variables
def execute(self, playbook_source, hosts_source, extra_vars=None, tags=[]): context.CLIARGS = ImmutableDict( tags=tags, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='******', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=False, become_method='sudo', become_user='******', verbosity=True, check=False, start_at_task=None, ) with NamedTemporaryFile(mode='w') as hosts_file: hosts_file.write(hosts_source) hosts_file.seek(0) inventory = InventoryManager(loader=self.loader, sources=(hosts_file.name, )) variable_manager = VariableManager( loader=self.loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) if extra_vars: variable_manager._extra_vars = extra_vars playbook = os.path.abspath("playbooks/" + playbook_source) pbex = PlaybookExecutor(playbooks=[playbook], inventory=inventory, variable_manager=variable_manager, loader=self.loader, passwords={}) result_callback = ResultCallback() pbex._tqm._stdout_callback = result_callback result_code = pbex.run() # Remove ansible tmpdir shutil.rmtree(constants.DEFAULT_LOCAL_TMP, True) return result_code, result_callback
def _get_magic_variables(self, loader, play, host, task, include_hostvars, include_delegate_to): ''' Returns a dictionary of so-called "magic" variables in Ansible, which are special variables we set internally for use. ''' variables = dict() variables['playbook_dir'] = loader.get_basedir() variables['ansible_playbook_python'] = sys.executable if host: variables['group_names'] = sorted([group.name for group in host.get_groups() if group.name != 'all']) if self._inventory: variables['groups'] = self._inventory.get_group_dict() if play: variables['role_names'] = [r._role_name for r in play.roles] if task: if task._role: variables['role_name'] = task._role.get_name() variables['role_path'] = task._role._role_path variables['role_uuid'] = text_type(task._role._uuid) if self._inventory is not None: variables['inventory_dir'] = self._inventory.basedir() variables['inventory_file'] = self._inventory.src() if play: # add the list of hosts in the play, as adjusted for limit/filters variables['ansible_play_hosts_all'] = [x.name for x in self._inventory.get_hosts(pattern=play.hosts or 'all', ignore_restrictions=True)] variables['ansible_play_hosts'] = [x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts] variables['ansible_play_batch'] = [x.name for x in self._inventory.get_hosts() if x.name not in play._removed_hosts] #DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch, # however this would take work in the templating engine, so for now we'll add both variables['play_hosts'] = variables['ansible_play_batch'] # the 'omit' value alows params to be left out if the variable they are based on is undefined variables['omit'] = self._omit_token variables['ansible_version'] = CLI.version_info(gitinfo=False) # Set options vars for option, option_value in iteritems(self._options_vars): variables[option] = option_value if self._hostvars is not None and include_hostvars: variables['hostvars'] = self._hostvars return variables
def _run_ansible(playbook: str, variables: dict = None): if variables is None: variables = {} from ansible import context from ansible.cli import CLI from ansible.module_utils.common.collections import ImmutableDict from ansible.executor.playbook_executor import PlaybookExecutor from ansible.parsing.dataloader import DataLoader from ansible.inventory.manager import InventoryManager from ansible.vars.manager import VariableManager loader = DataLoader() context.CLIARGS = ImmutableDict( tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection="ssh", module_path=None, forks=100, remote_user="******", private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method="sudo", become_user="******", verbosity=True, check=False, start_at_task=None, ) inventory = InventoryManager(loader=loader, sources=()) variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) variable_manager._extra_vars = variables pbex = PlaybookExecutor( playbooks=[os.path.join(SETUP_DIR, "ansible", playbook)], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords={}, ) return pbex.run()
def _get_magic_variables(self, loader, play, host, task, include_hostvars, include_delegate_to): ''' Returns a dictionary of so-called "magic" variables in Ansible, which are special variables we set internally for use. ''' variables = dict() variables['playbook_dir'] = loader.get_basedir() if host: variables['group_names'] = [group.name for group in host.get_groups()] if self._inventory is not None: variables['groups'] = dict() for (group_name, group) in iteritems(self._inventory.groups): variables['groups'][group_name] = [h.name for h in group.get_hosts()] if include_hostvars: hostvars_cache_entry = self._get_cache_entry(play=play) if hostvars_cache_entry in HOSTVARS_CACHE: hostvars = HOSTVARS_CACHE[hostvars_cache_entry] else: hostvars = HostVars(play=play, inventory=self._inventory, loader=loader, variable_manager=self) HOSTVARS_CACHE[hostvars_cache_entry] = hostvars variables['hostvars'] = hostvars variables['vars'] = hostvars[host.get_name()] if task: if task._role: variables['role_path'] = task._role._role_path if self._inventory is not None: variables['inventory_dir'] = self._inventory.basedir() if play: # add the list of hosts in the play, as adjusted for limit/filters # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts, # however this would take work in the templating engine, so for now # we'll add both so we can give users something transitional to use host_list = [x.name for x in self._inventory.get_hosts()] variables['play_hosts'] = host_list variables['ansible_play_hosts'] = host_list # the 'omit' value alows params to be left out if the variable they are based on is undefined variables['omit'] = self._omit_token variables['ansible_version'] = CLI.version_info(gitinfo=False) return variables
def __init__(self, host_list, callback=None): Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check', 'diff', 'listtags', 'listtasks', 'listhosts', 'syntax' ]) self.logger = logging.getLogger('copilot') # initialize needed objects self.loader = DataLoader() self.options = Options(syntax=False, listtags=False, listtasks=False, listhosts=False, connection='ssh', module_path='', forks=100, become=True, become_method='sudo', become_user='******', check=False, diff=False) # create inventory and pass to variable manager self.inventory = InventoryManager(loader=self.loader, sources=host_list) self.host_list = host_list self.variable_manager = VariableManager(loader=self.loader, inventory=self.inventory) # from ansible 2.4 the ansible_version is set in the cli module, and # since we're using the api we need to set it explicitly to make it # available to any playbooks we're asked to run self.variable_manager.extra_vars = { "ansible_version": cli.version_info(gitinfo=False) } self.callback = callback self.pb_file = None self.playbook = None self.rc = 0
def cluster_playbook_executer(): loader = DataLoader() playbook_base_path = os.getcwd() context.CLIARGS = ImmutableDict(tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='******', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method='sudo', become_user='******', verbosity=True, check=False, start_at_task=None) inventory = InventoryManager( loader=loader, sources=(playbook_base_path + '/kubespray/inventory/eks-automated-cluster/inventory.ini', )) variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) pbex = PlaybookExecutor( playbooks=[playbook_base_path + '/kubespray/cluster.yml'], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords={}) results = pbex.run()
def playthebook(book, host): loader = DataLoader() context.CLIARGS = ImmutableDict(tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='******', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method='sudo', become_user='******', verbosity=True, check=False, start_at_task=None) rc = ResultCallback() inventory = InventoryManager(loader=loader, sources=('inventory', )) variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) pbex = PlaybookExecutor(playbooks=['play.yml'], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords={}) pbex._tqm._stdout_callback = rc return pbex
def get_vars(self, loader, play=None, host=None, task=None, include_hostvars=True, use_cache=True): ''' Returns the variables, with optional "context" given via the parameters for the play, host, and task (which could possibly result in different sets of variables being returned due to the additional context). The order of precedence is: - play->roles->get_default_vars (if there is a play context) - group_vars_files[host] (if there is a host context) - host_vars_files[host] (if there is a host context) - host->get_vars (if there is a host context) - fact_cache[host] (if there is a host context) - play vars (if there is a play context) - play vars_files (if there's no host context, ignore file names that cannot be templated) - task->get_vars (if there is a task context) - vars_cache[host] (if there is a host context) - extra vars ''' debug("in VariableManager get_vars()") cache_entry = self._get_cache_entry(play=play, host=host, task=task) if cache_entry in CACHED_VARS and use_cache: debug("vars are cached, returning them now") return CACHED_VARS[cache_entry] all_vars = defaultdict(dict) if play: # first we compile any vars specified in defaults/main.yml # for all roles within the specified play for role in play.get_roles(): all_vars = combine_vars(all_vars, role.get_default_vars()) # if we have a task in this context, and that task has a role, make # sure it sees its defaults above any other roles, as we previously # (v1) made sure each task had a copy of its roles default vars if task and task._role is not None: all_vars = combine_vars(all_vars, task._role.get_default_vars()) if host: # next, if a host is specified, we load any vars from group_vars # files and then any vars from host_vars files which may apply to # this host or the groups it belongs to # we merge in vars from groups specified in the inventory (INI or script) all_vars = combine_vars(all_vars, host.get_group_vars()) # then we merge in the special 'all' group_vars first, if they exist if 'all' in self._group_vars_files: data = preprocess_vars(self._group_vars_files['all']) for item in data: all_vars = combine_vars(all_vars, item) for group in host.get_groups(): if group.name in self._group_vars_files and group.name != 'all': for data in self._group_vars_files[group.name]: data = preprocess_vars(data) for item in data: all_vars = combine_vars(all_vars, item) # then we merge in vars from the host specified in the inventory (INI or script) all_vars = combine_vars(all_vars, host.get_vars()) # then we merge in the host_vars/<hostname> file, if it exists host_name = host.get_name() if host_name in self._host_vars_files: for data in self._host_vars_files[host_name]: data = preprocess_vars(data) for item in data: all_vars = combine_vars(all_vars, item) # finally, the facts caches for this host, if it exists try: host_facts = self._fact_cache.get(host.name, dict()) for k in host_facts.keys(): if host_facts[k] is not None and not isinstance(host_facts[k], UnsafeProxy): host_facts[k] = UnsafeProxy(host_facts[k]) all_vars = combine_vars(all_vars, host_facts) all_vars = combine_vars(all_vars, self._nonpersistent_fact_cache.get(host.name, dict())) except KeyError: pass if play: all_vars = combine_vars(all_vars, play.get_vars()) for vars_file_item in play.get_vars_files(): try: # create a set of temporary vars here, which incorporate the # extra vars so we can properly template the vars_files entries temp_vars = combine_vars(all_vars, self._extra_vars) templar = Templar(loader=loader, variables=temp_vars) # we assume each item in the list is itself a list, as we # support "conditional includes" for vars_files, which mimics # the with_first_found mechanism. vars_file_list = templar.template(vars_file_item) if not isinstance(vars_file_list, list): vars_file_list = [ vars_file_list ] # now we iterate through the (potential) files, and break out # as soon as we read one from the list. If none are found, we # raise an error, which is silently ignored at this point. for vars_file in vars_file_list: data = preprocess_vars(loader.load_from_file(vars_file)) if data is not None: for item in data: all_vars = combine_vars(all_vars, item) break else: raise AnsibleError("vars file %s was not found" % vars_file_item) except UndefinedError: continue if not C.DEFAULT_PRIVATE_ROLE_VARS: for role in play.get_roles(): all_vars = combine_vars(all_vars, role.get_vars()) if task: if task._role: all_vars = combine_vars(all_vars, task._role.get_vars()) all_vars = combine_vars(all_vars, task.get_vars()) if host: all_vars = combine_vars(all_vars, self._vars_cache.get(host.get_name(), dict())) all_vars = combine_vars(all_vars, self._extra_vars) # FIXME: make sure all special vars are here # Finally, we create special vars all_vars['playbook_dir'] = loader.get_basedir() if host: all_vars['groups'] = [group.name for group in host.get_groups()] if self._inventory is not None: all_vars['groups'] = self._inventory.groups_list() if include_hostvars: hostvars = HostVars(vars_manager=self, play=play, inventory=self._inventory, loader=loader) all_vars['hostvars'] = hostvars if task: if task._role: all_vars['role_path'] = task._role._role_path if self._inventory is not None: all_vars['inventory_dir'] = self._inventory.basedir() if play: # add the list of hosts in the play, as adjusted for limit/filters # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts, # however this would take work in the templating engine, so for now # we'll add both so we can give users something transitional to use host_list = [x.name for x in self._inventory.get_hosts()] all_vars['play_hosts'] = host_list all_vars['ansible_play_hosts'] = host_list # the 'omit' value alows params to be left out if the variable they are based on is undefined all_vars['omit'] = self._omit_token all_vars['ansible_version'] = CLI.version_info(gitinfo=False) if 'hostvars' in all_vars and host: all_vars['vars'] = all_vars['hostvars'][host.get_name()] #CACHED_VARS[cache_entry] = all_vars debug("done with get_vars()") return all_vars
class VariableManager: def __init__(self): self._fact_cache = FactCache() self._vars_cache = defaultdict(dict) self._extra_vars = defaultdict(dict) self._host_vars_files = defaultdict(dict) self._group_vars_files = defaultdict(dict) self._inventory = None self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest() def _get_cache_entry(self, play=None, host=None, task=None): play_id = "NONE" if play: play_id = play._uuid host_id = "NONE" if host: host_id = host.get_name() task_id = "NONE" if task: task_id = task._uuid return "PLAY:%s;HOST:%s;TASK:%s" % (play_id, host_id, task_id) @property def extra_vars(self): ''' ensures a clean copy of the extra_vars are made ''' return self._extra_vars.copy() @extra_vars.setter def extra_vars(self, value): ''' ensures a clean copy of the extra_vars are used to set the value ''' assert isinstance(value, MutableMapping) self._extra_vars = value.copy() def set_inventory(self, inventory): self._inventory = inventory def _preprocess_vars(self, a): ''' Ensures that vars contained in the parameter passed in are returned as a list of dictionaries, to ensure for instance that vars loaded from a file conform to an expected state. ''' if a is None: return None elif not isinstance(a, list): data = [ a ] else: data = a for item in data: if not isinstance(item, MutableMapping): raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a))) return data def _validate_both_dicts(self, a, b): ''' Validates that both arguments are dictionaries, or an error is raised. ''' if not (isinstance(a, MutableMapping) and isinstance(b, MutableMapping)): raise AnsibleError("failed to combine variables, expected dicts but got a '%s' and a '%s'" % (type(a).__name__, type(b).__name__)) def _combine_vars(self, a, b): ''' Combines dictionaries of variables, based on the hash behavior ''' self._validate_both_dicts(a, b) if C.DEFAULT_HASH_BEHAVIOUR == "merge": return self._merge_dicts(a, b) else: return dict(a.items() + b.items()) def _merge_dicts(self, a, b): ''' Recursively merges dict b into a, so that keys from b take precedence over keys from a. ''' result = dict() self._validate_both_dicts(a, b) for dicts in a, b: # next, iterate over b keys and values for k, v in dicts.iteritems(): # if there's already such key in a # and that key contains dict if k in result and isinstance(result[k], dict): # merge those dicts recursively result[k] = self._merge_dicts(a[k], v) else: # otherwise, just copy a value from b to a result[k] = v return result def get_vars(self, loader, play=None, host=None, task=None, include_hostvars=True, use_cache=True): ''' Returns the variables, with optional "context" given via the parameters for the play, host, and task (which could possibly result in different sets of variables being returned due to the additional context). The order of precedence is: - play->roles->get_default_vars (if there is a play context) - group_vars_files[host] (if there is a host context) - host_vars_files[host] (if there is a host context) - host->get_vars (if there is a host context) - fact_cache[host] (if there is a host context) - vars_cache[host] (if there is a host context) - play vars (if there is a play context) - play vars_files (if there's no host context, ignore file names that cannot be templated) - task->get_vars (if there is a task context) - extra vars ''' debug("in VariableManager get_vars()") cache_entry = self._get_cache_entry(play=play, host=host, task=task) if cache_entry in CACHED_VARS and use_cache: debug("vars are cached, returning them now") return CACHED_VARS[cache_entry] all_vars = defaultdict(dict) if play: # first we compile any vars specified in defaults/main.yml # for all roles within the specified play for role in play.get_roles(): all_vars = self._combine_vars(all_vars, role.get_default_vars()) if host: # next, if a host is specified, we load any vars from group_vars # files and then any vars from host_vars files which may apply to # this host or the groups it belongs to # we merge in the special 'all' group_vars first, if they exist if 'all' in self._group_vars_files: data = self._preprocess_vars(self._group_vars_files['all']) for item in data: all_vars = self._combine_vars(all_vars, item) for group in host.get_groups(): all_vars = self._combine_vars(all_vars, group.get_vars()) if group.name in self._group_vars_files and group.name != 'all': data = self._preprocess_vars(self._group_vars_files[group.name]) for item in data: all_vars = self._combine_vars(all_vars, item) host_name = host.get_name() if host_name in self._host_vars_files: data = self._preprocess_vars(self._host_vars_files[host_name]) for item in data: all_vars = self._combine_vars(all_vars, self._host_vars_files[host_name]) # then we merge in vars specified for this host all_vars = self._combine_vars(all_vars, host.get_vars()) # next comes the facts cache and the vars cache, respectively try: all_vars = self._combine_vars(all_vars, self._fact_cache.get(host.name, dict())) except KeyError: pass if play: all_vars = self._combine_vars(all_vars, play.get_vars()) for vars_file_item in play.get_vars_files(): try: # create a set of temporary vars here, which incorporate the # extra vars so we can properly template the vars_files entries temp_vars = self._combine_vars(all_vars, self._extra_vars) templar = Templar(loader=loader, variables=temp_vars) # we assume each item in the list is itself a list, as we # support "conditional includes" for vars_files, which mimics # the with_first_found mechanism. vars_file_list = templar.template(vars_file_item) if not isinstance(vars_file_list, list): vars_file_list = [ vars_file_list ] # now we iterate through the (potential) files, and break out # as soon as we read one from the list. If none are found, we # raise an error, which is silently ignored at this point. for vars_file in vars_file_list: data = self._preprocess_vars(loader.load_from_file(vars_file)) if data is not None: for item in data: all_vars = self._combine_vars(all_vars, item) break else: raise AnsibleError("vars file %s was not found" % vars_file_item) except UndefinedError, e: continue if not C.DEFAULT_PRIVATE_ROLE_VARS: for role in play.get_roles(): all_vars = self._combine_vars(all_vars, role.get_vars()) if task: if task._role: all_vars = self._combine_vars(all_vars, task._role.get_vars()) all_vars = self._combine_vars(all_vars, task.get_vars()) if host: all_vars = self._combine_vars(all_vars, self._vars_cache.get(host.get_name(), dict())) all_vars = self._combine_vars(all_vars, self._extra_vars) # FIXME: make sure all special vars are here # Finally, we create special vars all_vars['playbook_dir'] = loader.get_basedir() if host: all_vars['groups'] = [group.name for group in host.get_groups()] if self._inventory is not None: all_vars['groups'] = self._inventory.groups_list() if include_hostvars: hostvars = HostVars(vars_manager=self, play=play, inventory=self._inventory, loader=loader) all_vars['hostvars'] = hostvars if task: if task._role: all_vars['role_path'] = task._role._role_path if self._inventory is not None: all_vars['inventory_dir'] = self._inventory.basedir() if play: # add the list of hosts in the play, as adjusted for limit/filters # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts, # however this would take work in the templating engine, so for now # we'll add both so we can give users something transitional to use host_list = [x.name for x in self._inventory.get_hosts()] all_vars['play_hosts'] = host_list all_vars['ansible_play_hosts'] = host_list # the 'omit' value alows params to be left out if the variable they are based on is undefined all_vars['omit'] = self._omit_token all_vars['ansible_version'] = CLI.version_info(gitinfo=False) # make vars self referential, so people can do things like 'vars[var_name]' copied_vars = all_vars.copy() if 'hostvars' in copied_vars: del copied_vars['hostvars'] all_vars['vars'] = copied_vars #CACHED_VARS[cache_entry] = all_vars debug("done with get_vars()") return all_vars
def ansible_run(loader, inventory, playbook_path, extra_vars): frame_info = getframeinfo(currentframe()) print(f"Start [{frame_info.function}]!") variable_manager = VariableManager(loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) if extra_vars.items(): print(f"[{frame_info.function}] extra_vars.items() is empty") else: variable_manager._extra_vars = extra_vars pbex = PlaybookExecutor(playbooks=[playbook_path], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords={}) callback = SampleCallback() pbex._tqm._stdout_callback = callback return_code = pbex.run() results = callback.results return return_code, results
def pb_exec(): with open('/home/dmitryd/ansible_ui/scripts/config/conf.yml') as f: conf = yaml.safe_load(f) h_dir = conf['variables']['dirs']['inventory_dir'] pb_dir = conf['variables']['dirs']['pb_dir'] retry_dir = conf['variables']['dirs']['pb_retry_dir'] user_name = conf['main']['ssh_config']['user'] user_key = conf['main']['ssh_config']['key'] if user_key == 'None': user_key = None pb_files = pb_ui.pb_list(pb_dir) host_file = pb_ui.hosts_list(h_dir) loader = DataLoader() context.CLIARGS = ImmutableDict(tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user=user_name, private_key_file=user_key, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method='sudo', become_user='******', verbosity=True, check=False, start_at_task=None, retry_files_enabled=True, retry_files_save_path=retry_dir) inventory = InventoryManager(loader=loader, sources=(host_file,)) variable_manager = VariableManager(loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) pbex = PlaybookExecutor(playbooks=pb_files, inventory=inventory, variable_manager=variable_manager, loader=loader, passwords={}) results = pbex.run() print() #print("unreachable hosts:") #print(pbex._unreachable_hosts) print("are you want to save new Host file?(Yes,No)") y = str(input()) if y in ['Yes', 'YES', 'yes', 'y', 'Y']: pb_ui.new_inv(host_file,h_dir) else: os.remove(host_file)
def run_playbook(self, affected_host, host_file, playbook_path, namePlay, selectuser): if not os.path.exists(playbook_path): print('[INFO] The playbook does not exist: "{0}"'.format( playbook_path)) sys.exit() if not os.path.isfile(host_file): print('[INFO] Host file does not exist: "{0}"'.format(host_file)) sys.exit() try: loader = DataLoader() passwords = dict(vault_pass='******') context.CLIARGS = ImmutableDict(tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user=selectuser, private_key_file=None, extra_vars=[{ 'affected_hosts': '' + affected_host + '' }], ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method='sudo', become_user=selectuser, verbosity=True, check=False, start_at_task=None) inventory = InventoryManager(loader=loader, sources=(host_file)) variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) pbex = PlaybookExecutor(playbooks=[playbook_path], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords) try: execPlay = json.dumps({affected_host: pbex.run()}, indent=4) check = json.loads(execPlay) if (check[affected_host] == 2): flow = ModuleResultsCollector() flow.failed(IDOPeration, affected_host, namePlay, 'FAILED') print('[INFO] Playbook failed: {}'.format(playbook_path)) elif (check[affected_host] == 4): flow = ModuleResultsCollector() flow.failed(IDOPeration, affected_host, namePlay, 'FAILED') print('[INFO] Playbook unreachable: {}'.format( playbook_path)) else: flow = ModuleResultsCollector() flow.passed(IDOPeration, affected_host, namePlay, 'PASSED') print( '[INFO] Playbook pass: {} [ok]'.format(playbook_path)) except AnsibleError as ansError: flow = ModuleResultsCollector() flow.failed(IDOPeration, affected_host, namePlay, 'FAILED') print('[INFO] Ansible error: {}'.format(ansError)) except Exception as e: flow = ModuleResultsCollector() flow.failed(IDOPeration, affected_host, namePlay, 'FAILED') print('[INFO]: {} - Failed caused by: {}'.format(playbook_path, e))
def v2_runner_on_ok(self, result, *args, **kwargs): """Print a json representation of the result. Also, store the result in an instance attribute for retrieval later """ host = result._host self.host_ok[host.get_name()] = result print(json.dumps({host.name: result._result}, indent=4)) def v2_runner_on_failed(self, result, *args, **kwargs): host = result._host self.host_failed[host.get_name()] = result print("aaaa") results_callback = ResultCallback() context.CLIARGS = ImmutableDict(tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='******', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method='Sudo', become_user='******', verbosity=True, check=False, start_at_task=None, stdout_callback=results_callback) inventory = InventoryManager(loader=loader, sources=('10.0.0.27,')) variable_manager = VariableManager(loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) pbex = PlaybookExecutor(playbooks=['./static/ansible/ls.yml'], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords={}) results = pbex.run() print(results)
def load_playbook(user: str, yaml: str, inv: str) -> None: """ Add user and initiate YAML playbook """ load_data = DataLoader() context.CLIARGS = ImmutableDict( tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user=user, private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method='sudo', become_user='******', verbosity=True, check=False, start_at_task=None ) inventory = InventoryManager(loader=load_data, sources=(inv,)) variable_manager = VariableManager(loader=load_data, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) play = PlaybookExecutor(playbooks=[yaml], inventory=inventory, variable_manager=variable_manager, loader=load_data, passwords={}) results = play.run() pprint(results)
def get_vars(self, loader, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True): ''' Returns the variables, with optional "context" given via the parameters for the play, host, and task (which could possibly result in different sets of variables being returned due to the additional context). The order of precedence is: - play->roles->get_default_vars (if there is a play context) - group_vars_files[host] (if there is a host context) - host_vars_files[host] (if there is a host context) - host->get_vars (if there is a host context) - fact_cache[host] (if there is a host context) - play vars (if there is a play context) - play vars_files (if there's no host context, ignore file names that cannot be templated) - task->get_vars (if there is a task context) - vars_cache[host] (if there is a host context) - extra vars ''' debug("in VariableManager get_vars()") cache_entry = self._get_cache_entry(play=play, host=host, task=task) if cache_entry in VARIABLE_CACHE and use_cache: debug("vars are cached, returning them now") return VARIABLE_CACHE[cache_entry] all_vars = defaultdict(dict) if play: # first we compile any vars specified in defaults/main.yml # for all roles within the specified play for role in play.get_roles(): all_vars = combine_vars(all_vars, role.get_default_vars()) # if we have a task in this context, and that task has a role, make # sure it sees its defaults above any other roles, as we previously # (v1) made sure each task had a copy of its roles default vars if task and task._role is not None: all_vars = combine_vars(all_vars, task._role.get_default_vars()) if host: # next, if a host is specified, we load any vars from group_vars # files and then any vars from host_vars files which may apply to # this host or the groups it belongs to # we merge in vars from groups specified in the inventory (INI or script) all_vars = combine_vars(all_vars, host.get_group_vars()) # then we merge in the special 'all' group_vars first, if they exist if 'all' in self._group_vars_files: data = preprocess_vars(self._group_vars_files['all']) for item in data: all_vars = combine_vars(all_vars, item) for group in host.get_groups(): if group.name in self._group_vars_files and group.name != 'all': for data in self._group_vars_files[group.name]: data = preprocess_vars(data) for item in data: all_vars = combine_vars(all_vars, item) # then we merge in vars from the host specified in the inventory (INI or script) all_vars = combine_vars(all_vars, host.get_vars()) # then we merge in the host_vars/<hostname> file, if it exists host_name = host.get_name() if host_name in self._host_vars_files: for data in self._host_vars_files[host_name]: data = preprocess_vars(data) for item in data: all_vars = combine_vars(all_vars, item) # finally, the facts caches for this host, if it exists try: host_facts = self._fact_cache.get(host.name, dict()) for k in host_facts.keys(): if host_facts[k] is not None and not isinstance( host_facts[k], UnsafeProxy): host_facts[k] = UnsafeProxy(host_facts[k]) all_vars = combine_vars(all_vars, host_facts) except KeyError: pass if play: all_vars = combine_vars(all_vars, play.get_vars()) for vars_file_item in play.get_vars_files(): # create a set of temporary vars here, which incorporate the # extra vars so we can properly template the vars_files entries temp_vars = combine_vars(all_vars, self._extra_vars) templar = Templar(loader=loader, variables=temp_vars) # we assume each item in the list is itself a list, as we # support "conditional includes" for vars_files, which mimics # the with_first_found mechanism. #vars_file_list = templar.template(vars_file_item) vars_file_list = vars_file_item if not isinstance(vars_file_list, list): vars_file_list = [vars_file_list] # now we iterate through the (potential) files, and break out # as soon as we read one from the list. If none are found, we # raise an error, which is silently ignored at this point. try: for vars_file in vars_file_list: vars_file = templar.template(vars_file) try: data = preprocess_vars( loader.load_from_file(vars_file)) if data is not None: for item in data: all_vars = combine_vars(all_vars, item) break except AnsibleFileNotFound as e: # we continue on loader failures continue except AnsibleParserError as e: raise else: raise AnsibleFileNotFound( "vars file %s was not found" % vars_file_item) except (UndefinedError, AnsibleUndefinedVariable): if host is not None and self._fact_cache.get( host.name, dict()).get('module_setup') and task is not None: raise AnsibleUndefinedVariable( "an undefined variable was found when attempting to template the vars_files item '%s'" % vars_file_item, obj=vars_file_item) else: # we do not have a full context here, and the missing variable could be # because of that, so just show a warning and continue display.vvv( "skipping vars_file '%s' due to an undefined variable" % vars_file_item) continue if not C.DEFAULT_PRIVATE_ROLE_VARS: for role in play.get_roles(): all_vars = combine_vars( all_vars, role.get_vars(include_params=False)) if task: if task._role: all_vars = combine_vars(all_vars, task._role.get_vars()) all_vars = combine_vars(all_vars, task.get_vars()) if host: all_vars = combine_vars( all_vars, self._vars_cache.get(host.get_name(), dict())) all_vars = combine_vars( all_vars, self._nonpersistent_fact_cache.get(host.name, dict())) all_vars = combine_vars(all_vars, self._extra_vars) # FIXME: make sure all special vars are here # Finally, we create special vars all_vars['playbook_dir'] = loader.get_basedir() if host: all_vars['group_names'] = [ group.name for group in host.get_groups() ] if self._inventory is not None: all_vars['groups'] = dict() for (group_name, group) in iteritems(self._inventory.groups): all_vars['groups'][group_name] = [ h.name for h in group.get_hosts() ] if include_hostvars: hostvars_cache_entry = self._get_cache_entry(play=play) if hostvars_cache_entry in HOSTVARS_CACHE: hostvars = HOSTVARS_CACHE[hostvars_cache_entry] else: hostvars = HostVars(play=play, inventory=self._inventory, loader=loader, variable_manager=self) HOSTVARS_CACHE[hostvars_cache_entry] = hostvars all_vars['hostvars'] = hostvars if task: if task._role: all_vars['role_path'] = task._role._role_path # if we have a task and we're delegating to another host, figure out the # variables for that host now so we don't have to rely on hostvars later if task.delegate_to is not None and include_delegate_to: # we unfortunately need to template the delegate_to field here, # as we're fetching vars before post_validate has been called on # the task that has been passed in templar = Templar(loader=loader, variables=all_vars) items = [] if task.loop is not None: if task.loop in lookup_loader: #TODO: remove convert_bare true and deprecate this in with_ try: loop_terms = listify_lookup_plugin_terms( terms=task.loop_args, templar=templar, loader=loader, fail_on_undefined=True, convert_bare=True) except AnsibleUndefinedVariable as e: if 'has no attribute' in str(e): loop_terms = [] self._display.deprecated( "Skipping task due to undefined attribute, in the future this will be a fatal error." ) else: raise items = lookup_loader.get(task.loop, loader=loader, templar=templar).run( terms=loop_terms, variables=all_vars) else: raise AnsibleError( "Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % task.loop) else: items = [None] vars_copy = all_vars.copy() delegated_host_vars = dict() for item in items: # update the variables with the item value for templating, in case we need it if item is not None: vars_copy['item'] = item templar.set_available_variables(vars_copy) delegated_host_name = templar.template( task.delegate_to, fail_on_undefined=False) if delegated_host_name in delegated_host_vars: # no need to repeat ourselves, as the delegate_to value # does not appear to be tied to the loop item variable continue # a dictionary of variables to use if we have to create a new host below new_delegated_host_vars = dict( ansible_host=delegated_host_name, ansible_user=C.DEFAULT_REMOTE_USER, ansible_connection=C.DEFAULT_TRANSPORT, ) # now try to find the delegated-to host in inventory, or failing that, # create a new host on the fly so we can fetch variables for it delegated_host = None if self._inventory is not None: delegated_host = self._inventory.get_host( delegated_host_name) # try looking it up based on the address field, and finally # fall back to creating a host on the fly to use for the var lookup if delegated_host is None: for h in self._inventory.get_hosts( ignore_limits_and_restrictions=True): # check if the address matches, or if both the delegated_to host # and the current host are in the list of localhost aliases if h.address == delegated_host_name or h.name in C.LOCALHOST and delegated_host_name in C.LOCALHOST: delegated_host = h break else: delegated_host = Host(name=delegated_host_name) delegated_host.vars.update( new_delegated_host_vars) else: delegated_host = Host(name=delegated_host_name) delegated_host.vars.update(new_delegated_host_vars) # now we go fetch the vars for the delegated-to host and save them in our # master dictionary of variables to be used later in the TaskExecutor/PlayContext delegated_host_vars[delegated_host_name] = self.get_vars( loader=loader, play=play, host=delegated_host, task=task, include_delegate_to=False, include_hostvars=False, ) all_vars['ansible_delegated_vars'] = delegated_host_vars if self._inventory is not None: all_vars['inventory_dir'] = self._inventory.basedir() if play: # add the list of hosts in the play, as adjusted for limit/filters # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts, # however this would take work in the templating engine, so for now # we'll add both so we can give users something transitional to use host_list = [x.name for x in self._inventory.get_hosts()] all_vars['play_hosts'] = host_list all_vars['ansible_play_hosts'] = host_list # the 'omit' value alows params to be left out if the variable they are based on is undefined all_vars['omit'] = self._omit_token all_vars['ansible_version'] = CLI.version_info(gitinfo=False) if 'hostvars' in all_vars and host: all_vars['vars'] = all_vars['hostvars'][host.get_name()] #VARIABLE_CACHE[cache_entry] = all_vars debug("done with get_vars()") return all_vars
loader = DataLoader() loader.set_vault_secrets([('default', VaultSecret(_bytes=to_bytes('123456')))]) context.CLIARGS = ImmutableDict(tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, module_path=None, forks=100, private_key_file=None, start_at_task=None) inventory = InventoryManager(loader=loader, sources=[code_path + '/ansible/inventory']) variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) variable_manager._extra_vars = {'firstvar': False} pbex = PlaybookExecutor(playbooks=[code_path + '/ansible/variables.yml'], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords={}) results = pbex.run() print(results)
if self._inventory is not None: all_vars['inventory_dir'] = self._inventory.basedir() if play: # add the list of hosts in the play, as adjusted for limit/filters # FIXME: play_hosts should be deprecated in favor of ansible_play_hosts, # however this would take work in the templating engine, so for now # we'll add both so we can give users something transitional to use host_list = [x.name for x in self._inventory.get_hosts()] all_vars['play_hosts'] = host_list all_vars['ansible_play_hosts'] = host_list # the 'omit' value alows params to be left out if the variable they are based on is undefined all_vars['omit'] = self._omit_token all_vars['ansible_version'] = CLI.version_info(gitinfo=False) # make vars self referential, so people can do things like 'vars[var_name]' copied_vars = all_vars.copy() if 'hostvars' in copied_vars: del copied_vars['hostvars'] all_vars['vars'] = all_vars.copy() #CACHED_VARS[cache_entry] = all_vars debug("done with get_vars()") return all_vars def _get_inventory_basename(self, path): ''' Returns the bsaename minus the extension of the given path, so the
def run_playbook(self, playbook, extra_vars=None, check_result=False): """ Run an ansible playbook """ # https://stackoverflow.com/questions/27590039/running-ansible-playbook-using-python-api loader = DataLoader() if extra_vars: extra_vars = set(extra_vars) else: extra_vars = {} context.CLIARGS = ImmutableDict(tags={"classic"}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='******', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, extra_vars=extra_vars, scp_extra_args=None, become=True, become_method='sudo', become_user='******', verbosity=True, check=False, start_at_task=None) inventory = InventoryManager(loader=loader, sources=(self.inventory_file, )) variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) passwords = {} os.environ['TEST_ARTIFACTS'] = os.path.abspath(self.test_artifacts) self.logger.debug("TEST_ARTIFACTS = {}".format( os.environ['TEST_ARTIFACTS'])) pbex = playbook_executor.PlaybookExecutor( playbooks=[playbook], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords) pbex._tqm._stdout_callback = "yaml" # pylint: disable=protected-access self.logger.info("Running playbook {}".format(playbook)) # https://stackoverflow.com/questions/10415028/how-can-i-recover-the-return-value-of-a-function-passed-to-multiprocessing-proce manager = multiprocessing.Manager() return_dict = manager.dict() run_process = multiprocessing.Process(target=_run_pbex, args=(pbex, return_dict)) run_process.start() # run playbook with timeout of 4hrs run_process.join(4 * 60 * 60) if run_process.is_alive(): self.logger.error( "Playbook has been running for too long. Aborting it.") _terminate_tree(run_process.pid) run_process.join() return_code = 124 else: if "exit_code" in return_dict: return_code = return_dict["exit_code"] else: return_code = 1 self.logger.debug("Playbook {} finished with {}".format( playbook, return_code)) if check_result: results_yml_file = "{}/results.yml".format(self.test_artifacts) # check if result matches: https://docs.fedoraproject.org/en-US/ci/standard-test-interface/ if not os.path.isfile("{}/test.log".format(self.test_artifacts)): err_msg = "Playbook finished without creating test.log" # overwrite any results.yml to make sure there is an error test_result = {} test_result['test'] = os.path.splitext( os.path.basename(playbook))[0] test_result['result'] = "fail" test_result["error_reason"] = err_msg result = {} result['results'] = [test_result] with open(results_yml_file, "w") as _file: yaml.safe_dump(result, _file) self.logger.error(err_msg) return_code = 1 if not os.path.isfile(results_yml_file): self.logger.debug( "playbook didn't create results.yml, creating one...") # playbook didn't create results.yml, so create one test_result = {} test_result['test'] = os.path.splitext( os.path.basename(playbook))[0] if return_code == 0: test_result['result'] = "pass" else: test_result['result'] = "fail" result = {} result['results'] = [test_result] with open(results_yml_file, "w") as _file: yaml.safe_dump(result, _file) self.logger.debug("parsing results.yml") with open(results_yml_file) as _file: parsed_yaml = yaml.load(_file, Loader=yaml.FullLoader) for result in parsed_yaml["results"]: if result['result'] != "pass": self.logger.debug( "{} has result {}, setting whole playbook as failed". format(result['test'], result['result'])) return_code = 1 return return_code
def get_vars(self, loader, play=None, host=None, task=None, include_hostvars=True, use_cache=True): ''' Returns the variables, with optional "context" given via the parameters for the play, host, and task (which could possibly result in different sets of variables being returned due to the additional context). The order of precedence is: - play->roles->get_default_vars (if there is a play context) - group_vars_files[host] (if there is a host context) - host_vars_files[host] (if there is a host context) - host->get_vars (if there is a host context) - fact_cache[host] (if there is a host context) - vars_cache[host] (if there is a host context) - play vars (if there is a play context) - play vars_files (if there's no host context, ignore file names that cannot be templated) - task->get_vars (if there is a task context) - extra vars ''' debug("in VariableManager get_vars()") cache_entry = self._get_cache_entry(play=play, host=host, task=task) if cache_entry in CACHED_VARS and use_cache: debug("vars are cached, returning them now") return CACHED_VARS[cache_entry] all_vars = defaultdict(dict) if play: # first we compile any vars specified in defaults/main.yml # for all roles within the specified play for role in play.get_roles(): all_vars = self._combine_vars(all_vars, role.get_default_vars()) if host: # next, if a host is specified, we load any vars from group_vars # files and then any vars from host_vars files which may apply to # this host or the groups it belongs to # we merge in the special 'all' group_vars first, if they exist if 'all' in self._group_vars_files: data = self._preprocess_vars(self._group_vars_files['all']) for item in data: all_vars = self._combine_vars(all_vars, item) for group in host.get_groups(): all_vars = self._combine_vars(all_vars, group.get_vars()) if group.name in self._group_vars_files and group.name != 'all': data = self._preprocess_vars( self._group_vars_files[group.name]) for item in data: all_vars = self._combine_vars(all_vars, item) host_name = host.get_name() if host_name in self._host_vars_files: data = self._preprocess_vars(self._host_vars_files[host_name]) for item in data: all_vars = self._combine_vars( all_vars, self._host_vars_files[host_name]) # then we merge in vars specified for this host all_vars = self._combine_vars(all_vars, host.get_vars()) # next comes the facts cache and the vars cache, respectively try: all_vars = self._combine_vars( all_vars, self._fact_cache.get(host.name, dict())) except KeyError: pass if play: all_vars = self._combine_vars(all_vars, play.get_vars()) for vars_file_item in play.get_vars_files(): try: # create a set of temporary vars here, which incorporate the # extra vars so we can properly template the vars_files entries temp_vars = self._combine_vars(all_vars, self._extra_vars) templar = Templar(loader=loader, variables=temp_vars) # we assume each item in the list is itself a list, as we # support "conditional includes" for vars_files, which mimics # the with_first_found mechanism. vars_file_list = templar.template(vars_file_item) if not isinstance(vars_file_list, list): vars_file_list = [vars_file_list] # now we iterate through the (potential) files, and break out # as soon as we read one from the list. If none are found, we # raise an error, which is silently ignored at this point. for vars_file in vars_file_list: data = self._preprocess_vars( loader.load_from_file(vars_file)) if data is not None: for item in data: all_vars = self._combine_vars(all_vars, item) break else: raise AnsibleError("vars file %s was not found" % vars_file_item) except UndefinedError as e: continue if not C.DEFAULT_PRIVATE_ROLE_VARS: for role in play.get_roles(): all_vars = self._combine_vars(all_vars, role.get_vars()) if task: if task._role: all_vars = self._combine_vars(all_vars, task._role.get_vars()) all_vars = self._combine_vars(all_vars, task.get_vars()) if host: all_vars = self._combine_vars( all_vars, self._vars_cache.get(host.get_name(), dict())) all_vars = self._combine_vars(all_vars, self._extra_vars) # FIXME: make sure all special vars are here # Finally, we create special vars all_vars['playbook_dir'] = loader.get_basedir() if host: all_vars['groups'] = [group.name for group in host.get_groups()] if self._inventory is not None: all_vars['groups'] = self._inventory.groups_list() if include_hostvars: hostvars = HostVars(vars_manager=self, play=play, inventory=self._inventory, loader=loader) all_vars['hostvars'] = hostvars if task: if task._role: all_vars['role_path'] = task._role._role_path if self._inventory is not None: all_vars['inventory_dir'] = self._inventory.basedir() if play: # add the list of hosts in the play, as adjusted for limit/filters # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts, # however this would take work in the templating engine, so for now # we'll add both so we can give users something transitional to use host_list = [x.name for x in self._inventory.get_hosts()] all_vars['play_hosts'] = host_list all_vars['ansible_play_hosts'] = host_list # the 'omit' value alows params to be left out if the variable they are based on is undefined all_vars['omit'] = self._omit_token all_vars['ansible_version'] = CLI.version_info(gitinfo=False) if 'hostvars' in all_vars and host: all_vars['vars'] = all_vars['hostvars'][host.get_name()] #CACHED_VARS[cache_entry] = all_vars debug("done with get_vars()") return all_vars
def ansiblePlaybook(mainTfDir, baseCWD, providerName, kubeconfig, noTerraform, test, configs, usePrivateIPs, freeMaster): """Runs ansible-playbook with the given playbook. Parameters: mainTfDir (str): Path where the .tf file is. baseCWD (str): Path to go back. providerName (str): Provider name. kubeconfig (str): Path to kubeconfig file. noTerraform (bool): Specifies whether current run uses terraform. test (str): Cluster identification. configs (dict): Content of configs.yaml. usePrivateIPs (bool): Indicates whether private IPs should be used. freeMaster (bool): If True, pods can't run on the master node. Returns: int: 0 for success, 1 for failure """ btspMsg = "...bootstraping Kubernetes cluster..." writeToFile("src/logging/%s" % test, btspMsg, True) hostsFilePath = "%s/hosts" % mainTfDir createHostsFile(mainTfDir, baseCWD, providerName, hostsFilePath, configs, usePrivateIPs, noTerraform=noTerraform, test=test) loader = DataLoader() masterIP = getMasterIP(hostsFilePath) context.CLIARGS = ImmutableDict( tags={}, private_key_file=configs["pathToKey"], connection='ssh', timeout=120, remote_user=tryTakeFromYaml(configs, "openUser", "root"), become_method='sudo', ssh_common_args='-o StrictHostKeyChecking=no', extra_vars=[{ 'kubeconfig': kubeconfig, 'masterIP': masterIP }], forks=100, verbosity=4, #True, listtags=False, listtasks=False, listhosts=False, syntax=False, check=False, start_at_task=None) inventory = InventoryManager(loader=loader, sources=hostsFilePath) variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) # ----- to hide ansible logs if aggregateLogs: p = Process(target=subprocPrint, args=(test, )) p.start() with open(ansibleLogs % test, 'a') as f: # from now on, logs go to ansibleLogs with contextlib.redirect_stdout(f): with contextlib.redirect_stderr(f): playbooksArray = [playbookPath] if not freeMaster: playbooksArray.append( "src/provisionment/playbooks/allowMasterRuns.yaml") # --------------- GPU support if test in ("dlTest", "proGANTest"): playbooksArray.append( "src/provisionment/playbooks/gpuSupport.yaml") # --------------- MPI support if test in ("dlTest", "hpcTest"): playbooksArray.append( "src/provisionment/playbooks/kubeflow_mpiOperator.yaml" ) # --------------- OCI's Grow File System if providerName == "oci": playbooksArray.append( "src/provisionment/playbooks/oci_growfs.yaml") res = PlaybookExecutor(playbooks=playbooksArray, inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=None).run(), masterIP if aggregateLogs: p.terminate() return res