Beispiel #1
0
 def __init__(self, filename):
     self._loader = loader = DataLoader()
     self._inventory = inventory = InventoryManager(
         loader=loader, sources=filename)
     self._variable_manager = variable_manager = VariableManager(
         loader=loader, inventory=inventory)
     self._hostvars = HostVars(inventory, variable_manager, loader)
Beispiel #2
0
    def _get_magic_variables(self, loader, play, host, task, include_hostvars,
                             include_delegate_to):
        '''
        Returns a dictionary of so-called "magic" variables in Ansible,
        which are special variables we set internally for use.
        '''

        variables = dict()
        variables['playbook_dir'] = loader.get_basedir()

        if host:
            variables['group_names'] = [
                group.name for group in host.get_groups()
                if group.name != 'all'
            ]

            if self._inventory is not None:
                variables['groups'] = dict()
                for (group_name, group) in iteritems(self._inventory.groups):
                    variables['groups'][group_name] = [
                        h.name for h in group.get_hosts()
                    ]

                if include_hostvars:
                    hostvars_cache_entry = self._get_cache_entry(play=play)
                    if hostvars_cache_entry in HOSTVARS_CACHE:
                        hostvars = HOSTVARS_CACHE[hostvars_cache_entry]
                    else:
                        hostvars = HostVars(play=play,
                                            inventory=self._inventory,
                                            loader=loader,
                                            variable_manager=self)
                        HOSTVARS_CACHE[hostvars_cache_entry] = hostvars
                    variables['hostvars'] = hostvars
                    variables['vars'] = hostvars[host.get_name()]

        if play:
            variables['role_names'] = [r._role_name for r in play.roles]

        if task:
            if task._role:
                variables['role_path'] = task._role._role_path

        if self._inventory is not None:
            variables['inventory_dir'] = self._inventory.basedir()
            variables['inventory_file'] = self._inventory.src()
            if play:
                # add the list of hosts in the play, as adjusted for limit/filters
                # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts,
                #             however this would take work in the templating engine, so for now
                #             we'll add both so we can give users something transitional to use
                host_list = [x.name for x in self._inventory.get_hosts()]
                variables['play_hosts'] = host_list
                variables['ansible_play_hosts'] = host_list

        # the 'omit' value alows params to be left out if the variable they are based on is undefined
        variables['omit'] = self._omit_token
        variables['ansible_version'] = CLI.version_info(gitinfo=False)

        return variables
 def get_host_vars(self, host_name):
     all_vars = self.vars_manager.get_vars(
         play=self.play, host=self.inventory.get_host(host_name))
     host_vars = HostVars(
         inventory=self.inventory,
         variable_manager=self.vars_manager,
         loader=self.loader,
     )
     all_vars.update(dict(hostvars=host_vars))
     return all_vars
Beispiel #4
0
    def get_vars(self,
                 loader,
                 play=None,
                 host=None,
                 task=None,
                 include_hostvars=True,
                 use_cache=True):
        '''
        Returns the variables, with optional "context" given via the parameters
        for the play, host, and task (which could possibly result in different
        sets of variables being returned due to the additional context).

        The order of precedence is:
        - play->roles->get_default_vars (if there is a play context)
        - group_vars_files[host] (if there is a host context)
        - host_vars_files[host] (if there is a host context)
        - host->get_vars (if there is a host context)
        - fact_cache[host] (if there is a host context)
        - vars_cache[host] (if there is a host context)
        - play vars (if there is a play context)
        - play vars_files (if there's no host context, ignore
          file names that cannot be templated)
        - task->get_vars (if there is a task context)
        - extra vars
        '''

        debug("in VariableManager get_vars()")
        cache_entry = self._get_cache_entry(play=play, host=host, task=task)
        if cache_entry in CACHED_VARS and use_cache:
            debug("vars are cached, returning them now")
            return CACHED_VARS[cache_entry]

        all_vars = defaultdict(dict)

        if play:
            # first we compile any vars specified in defaults/main.yml
            # for all roles within the specified play
            for role in play.get_roles():
                all_vars = self._combine_vars(all_vars,
                                              role.get_default_vars())

        if host:
            # next, if a host is specified, we load any vars from group_vars
            # files and then any vars from host_vars files which may apply to
            # this host or the groups it belongs to

            # we merge in the special 'all' group_vars first, if they exist
            if 'all' in self._group_vars_files:
                data = self._preprocess_vars(self._group_vars_files['all'])
                for item in data:
                    all_vars = self._combine_vars(all_vars, item)

            for group in host.get_groups():
                all_vars = self._combine_vars(all_vars, group.get_vars())
                if group.name in self._group_vars_files and group.name != 'all':
                    data = self._preprocess_vars(
                        self._group_vars_files[group.name])
                    for item in data:
                        all_vars = self._combine_vars(all_vars, item)

            host_name = host.get_name()
            if host_name in self._host_vars_files:
                data = self._preprocess_vars(self._host_vars_files[host_name])
                for item in data:
                    all_vars = self._combine_vars(
                        all_vars, self._host_vars_files[host_name])

            # then we merge in vars specified for this host
            all_vars = self._combine_vars(all_vars, host.get_vars())

            # next comes the facts cache and the vars cache, respectively
            try:
                all_vars = self._combine_vars(
                    all_vars, self._fact_cache.get(host.name, dict()))
            except KeyError:
                pass

        if play:
            all_vars = self._combine_vars(all_vars, play.get_vars())

            for vars_file_item in play.get_vars_files():
                try:
                    # create a set of temporary vars here, which incorporate the
                    # extra vars so we can properly template the vars_files entries
                    temp_vars = self._combine_vars(all_vars, self._extra_vars)
                    templar = Templar(loader=loader, variables=temp_vars)

                    # we assume each item in the list is itself a list, as we
                    # support "conditional includes" for vars_files, which mimics
                    # the with_first_found mechanism.
                    vars_file_list = templar.template(vars_file_item)
                    if not isinstance(vars_file_list, list):
                        vars_file_list = [vars_file_list]

                    # now we iterate through the (potential) files, and break out
                    # as soon as we read one from the list. If none are found, we
                    # raise an error, which is silently ignored at this point.
                    for vars_file in vars_file_list:
                        data = self._preprocess_vars(
                            loader.load_from_file(vars_file))
                        if data is not None:
                            for item in data:
                                all_vars = self._combine_vars(all_vars, item)
                            break
                    else:
                        raise AnsibleError("vars file %s was not found" %
                                           vars_file_item)
                except UndefinedError as e:
                    continue

            if not C.DEFAULT_PRIVATE_ROLE_VARS:
                for role in play.get_roles():
                    all_vars = self._combine_vars(all_vars, role.get_vars())

        if task:
            if task._role:
                all_vars = self._combine_vars(all_vars, task._role.get_vars())
            all_vars = self._combine_vars(all_vars, task.get_vars())

        if host:
            all_vars = self._combine_vars(
                all_vars, self._vars_cache.get(host.get_name(), dict()))

        all_vars = self._combine_vars(all_vars, self._extra_vars)

        # FIXME: make sure all special vars are here
        # Finally, we create special vars

        all_vars['playbook_dir'] = loader.get_basedir()

        if host:
            all_vars['groups'] = [group.name for group in host.get_groups()]

            if self._inventory is not None:
                all_vars['groups'] = self._inventory.groups_list()
                if include_hostvars:
                    hostvars = HostVars(vars_manager=self,
                                        play=play,
                                        inventory=self._inventory,
                                        loader=loader)
                    all_vars['hostvars'] = hostvars

        if task:
            if task._role:
                all_vars['role_path'] = task._role._role_path

        if self._inventory is not None:
            all_vars['inventory_dir'] = self._inventory.basedir()
            if play:
                # add the list of hosts in the play, as adjusted for limit/filters
                # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts,
                #             however this would take work in the templating engine, so for now
                #             we'll add both so we can give users something transitional to use
                host_list = [x.name for x in self._inventory.get_hosts()]
                all_vars['play_hosts'] = host_list
                all_vars['ansible_play_hosts'] = host_list

        # the 'omit' value alows params to be left out if the variable they are based on is undefined
        all_vars['omit'] = self._omit_token

        all_vars['ansible_version'] = CLI.version_info(gitinfo=False)

        if 'hostvars' in all_vars and host:
            all_vars['vars'] = all_vars['hostvars'][host.get_name()]

        #CACHED_VARS[cache_entry] = all_vars

        debug("done with get_vars()")
        return all_vars
Beispiel #5
0
    def run(self, play):
        '''
        Iterates over the roles/tasks in a play, using the given (or default)
        strategy for queueing tasks. The default is the linear strategy, which
        operates like classic Ansible by keeping all hosts in lock-step with
        a given task (meaning no hosts move on to the next task until all hosts
        are done with the current task).
        '''

        if not self._callbacks_loaded:
            self.load_callbacks()

        all_vars = self._variable_manager.get_vars(play=play)
        warn_if_reserved(all_vars)
        templar = Templar(loader=self._loader, variables=all_vars)

        new_play = play.copy()
        new_play.post_validate(templar)
        new_play.handlers = new_play.compile_roles_handlers(
        ) + new_play.handlers

        self.hostvars = HostVars(
            inventory=self._inventory,
            variable_manager=self._variable_manager,
            loader=self._loader,
        )

        # Fork # of forks, # of hosts or serial, whichever is lowest
        num_hosts = len(
            self._inventory.get_hosts(new_play.hosts,
                                      ignore_restrictions=True))

        max_serial = 0
        if new_play.serial:
            # the play has not been post_validated here, so we may need
            # to convert the scalar value to a list at this point
            serial_items = new_play.serial
            if not isinstance(serial_items, list):
                serial_items = [serial_items]
            max_serial = max([pct_to_int(x, num_hosts) for x in serial_items])

        contenders = [self._options.forks, max_serial, num_hosts]
        contenders = [v for v in contenders if v is not None and v > 0]
        self._initialize_processes(min(contenders))

        play_context = PlayContext(new_play, self._options, self.passwords,
                                   self._connection_lockfile.fileno())
        for callback_plugin in self._callback_plugins:
            if hasattr(callback_plugin, 'set_play_context'):
                callback_plugin.set_play_context(play_context)

        self.send_callback('v2_playbook_on_play_start', new_play)

        # initialize the shared dictionary containing the notified handlers
        self._initialize_notified_handlers(new_play)

        # load the specified strategy (or the default linear one)
        strategy = strategy_loader.get(new_play.strategy, self)
        if strategy is None:
            raise AnsibleError("Invalid play strategy specified: %s" %
                               new_play.strategy,
                               obj=play._ds)

        # build the iterator
        iterator = PlayIterator(
            inventory=self._inventory,
            play=new_play,
            play_context=play_context,
            variable_manager=self._variable_manager,
            all_vars=all_vars,
            start_at_done=self._start_at_done,
        )

        # Because the TQM may survive multiple play runs, we start by marking
        # any hosts as failed in the iterator here which may have been marked
        # as failed in previous runs. Then we clear the internal list of failed
        # hosts so we know what failed this round.
        for host_name in self._failed_hosts.keys():
            host = self._inventory.get_host(host_name)
            iterator.mark_host_failed(host)

        self.clear_failed_hosts()

        # during initialization, the PlayContext will clear the start_at_task
        # field to signal that a matching task was found, so check that here
        # and remember it so we don't try to skip tasks on future plays
        if getattr(self._options, 'start_at_task',
                   None) is not None and play_context.start_at_task is None:
            self._start_at_done = True

        # and run the play using the strategy and cleanup on way out
        play_return = strategy.run(iterator, play_context)

        # now re-save the hosts that failed from the iterator to our internal list
        for host_name in iterator.get_failed_hosts():
            self._failed_hosts[host_name] = True

        strategy.cleanup()
        self._cleanup_processes()
        return play_return
Beispiel #6
0
    def run(self, play):
        '''
        Iterates over the roles/tasks in a play, using the given (or default)
        strategy for queueing tasks. The default is the linear strategy, which
        operates like classic Ansible by keeping all hosts in lock-step with
        a given task (meaning no hosts move on to the next task until all hosts
        are done with the current task).
        '''

        if not self._callbacks_loaded:
            self.load_callbacks()

        all_vars = self._variable_manager.get_vars(play=play)
        warn_if_reserved(all_vars)
        templar = Templar(loader=self._loader, variables=all_vars)

        new_play = play.copy()
        new_play.post_validate(templar)
        new_play.handlers = new_play.compile_roles_handlers(
        ) + new_play.handlers

        self.hostvars = HostVars(
            inventory=self._inventory,
            variable_manager=self._variable_manager,
            loader=self._loader,
        )

        play_context = PlayContext(new_play, self.passwords,
                                   self._connection_lockfile.fileno())
        if (self._stdout_callback
                and hasattr(self._stdout_callback, 'set_play_context')):
            self._stdout_callback.set_play_context(play_context)

        for callback_plugin in self._callback_plugins:
            if hasattr(callback_plugin, 'set_play_context'):
                callback_plugin.set_play_context(play_context)

        self.send_callback('v2_playbook_on_play_start', new_play)

        # build the iterator
        iterator = PlayIterator(
            inventory=self._inventory,
            play=new_play,
            play_context=play_context,
            variable_manager=self._variable_manager,
            all_vars=all_vars,
            start_at_done=self._start_at_done,
        )

        # adjust to # of workers to configured forks or size of batch, whatever is lower
        self._initialize_processes(min(self._forks, iterator.batch_size))

        # load the specified strategy (or the default linear one)
        strategy = strategy_loader.get(new_play.strategy, self)
        if strategy is None:
            raise AnsibleError("Invalid play strategy specified: %s" %
                               new_play.strategy,
                               obj=play._ds)

        # Because the TQM may survive multiple play runs, we start by marking
        # any hosts as failed in the iterator here which may have been marked
        # as failed in previous runs. Then we clear the internal list of failed
        # hosts so we know what failed this round.
        for host_name in self._failed_hosts.keys():
            host = self._inventory.get_host(host_name)
            iterator.mark_host_failed(host)

        self.clear_failed_hosts()

        # during initialization, the PlayContext will clear the start_at_task
        # field to signal that a matching task was found, so check that here
        # and remember it so we don't try to skip tasks on future plays
        if context.CLIARGS.get(
                'start_at_task'
        ) is not None and play_context.start_at_task is None:
            self._start_at_done = True

        # and run the play using the strategy and cleanup on way out
        play_return = strategy.run(iterator, play_context)

        # now re-save the hosts that failed from the iterator to our internal list
        for host_name in iterator.get_failed_hosts():
            self._failed_hosts[host_name] = True

        strategy.cleanup()
        self._cleanup_processes()
        return play_return
Beispiel #7
0
class VariableManager:

    def __init__(self):

        self._fact_cache       = FactCache()
        self._vars_cache       = defaultdict(dict)
        self._extra_vars       = defaultdict(dict)
        self._host_vars_files  = defaultdict(dict)
        self._group_vars_files = defaultdict(dict)
        self._inventory        = None

        self._omit_token       = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()

    def _get_cache_entry(self, play=None, host=None, task=None):
        play_id = "NONE"
        if play:
            play_id = play._uuid

        host_id = "NONE"
        if host:
            host_id = host.get_name()

        task_id = "NONE"
        if task:
            task_id = task._uuid

        return "PLAY:%s;HOST:%s;TASK:%s" % (play_id, host_id, task_id)

    @property
    def extra_vars(self):
        ''' ensures a clean copy of the extra_vars are made '''
        return self._extra_vars.copy()

    @extra_vars.setter
    def extra_vars(self, value):
        ''' ensures a clean copy of the extra_vars are used to set the value '''
        assert isinstance(value, MutableMapping)
        self._extra_vars = value.copy()

    def set_inventory(self, inventory):
        self._inventory = inventory

    def _preprocess_vars(self, a):
        '''
        Ensures that vars contained in the parameter passed in are
        returned as a list of dictionaries, to ensure for instance
        that vars loaded from a file conform to an expected state.
        '''

        if a is None:
            return None
        elif not isinstance(a, list):
            data = [ a ]
        else:
            data = a

        for item in data:
            if not isinstance(item, MutableMapping):
                raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))

        return data

    def _validate_both_dicts(self, a, b):
        '''
        Validates that both arguments are dictionaries, or an error is raised.
        '''
        if not (isinstance(a, MutableMapping) and isinstance(b, MutableMapping)):
            raise AnsibleError("failed to combine variables, expected dicts but got a '%s' and a '%s'" % (type(a).__name__, type(b).__name__))

    def _combine_vars(self, a, b):
        '''
        Combines dictionaries of variables, based on the hash behavior
        '''

        self._validate_both_dicts(a, b)

        if C.DEFAULT_HASH_BEHAVIOUR == "merge":
            return self._merge_dicts(a, b)
        else:
            return dict(a.items() + b.items())

    def _merge_dicts(self, a, b):
        '''
        Recursively merges dict b into a, so that keys
        from b take precedence over keys from a.
        '''

        result = dict()

        self._validate_both_dicts(a, b)

        for dicts in a, b:
            # next, iterate over b keys and values
            for k, v in dicts.iteritems():
                # if there's already such key in a
                # and that key contains dict
                if k in result and isinstance(result[k], dict):
                    # merge those dicts recursively
                    result[k] = self._merge_dicts(a[k], v)
                else:
                    # otherwise, just copy a value from b to a
                    result[k] = v

        return result

    def get_vars(self, loader, play=None, host=None, task=None, include_hostvars=True, use_cache=True):
        '''
        Returns the variables, with optional "context" given via the parameters
        for the play, host, and task (which could possibly result in different
        sets of variables being returned due to the additional context).

        The order of precedence is:
        - play->roles->get_default_vars (if there is a play context)
        - group_vars_files[host] (if there is a host context)
        - host_vars_files[host] (if there is a host context)
        - host->get_vars (if there is a host context)
        - fact_cache[host] (if there is a host context)
        - vars_cache[host] (if there is a host context)
        - play vars (if there is a play context)
        - play vars_files (if there's no host context, ignore
          file names that cannot be templated)
        - task->get_vars (if there is a task context)
        - extra vars
        '''

        debug("in VariableManager get_vars()")
        cache_entry = self._get_cache_entry(play=play, host=host, task=task)
        if cache_entry in CACHED_VARS and use_cache:
            debug("vars are cached, returning them now")
            return CACHED_VARS[cache_entry]

        all_vars = defaultdict(dict)

        if play:
            # first we compile any vars specified in defaults/main.yml
            # for all roles within the specified play
            for role in play.get_roles():
                all_vars = self._combine_vars(all_vars, role.get_default_vars())

        if host:
            # next, if a host is specified, we load any vars from group_vars
            # files and then any vars from host_vars files which may apply to
            # this host or the groups it belongs to

            # we merge in the special 'all' group_vars first, if they exist
            if 'all' in self._group_vars_files:
                data = self._preprocess_vars(self._group_vars_files['all'])
                for item in data:
                    all_vars = self._combine_vars(all_vars, item)

            for group in host.get_groups():
                all_vars = self._combine_vars(all_vars, group.get_vars())
                if group.name in self._group_vars_files and group.name != 'all':
                    data = self._preprocess_vars(self._group_vars_files[group.name])
                    for item in data:
                        all_vars = self._combine_vars(all_vars, item)

            host_name = host.get_name()
            if host_name in self._host_vars_files:
                data = self._preprocess_vars(self._host_vars_files[host_name])
                for item in data:
                    all_vars = self._combine_vars(all_vars, self._host_vars_files[host_name])

            # then we merge in vars specified for this host
            all_vars = self._combine_vars(all_vars, host.get_vars())

            # next comes the facts cache and the vars cache, respectively
            try:
                all_vars = self._combine_vars(all_vars, self._fact_cache.get(host.name, dict()))
            except KeyError:
                pass

        if play:
            all_vars = self._combine_vars(all_vars, play.get_vars())

            for vars_file_item in play.get_vars_files():
                try:
                    # create a set of temporary vars here, which incorporate the
                    # extra vars so we can properly template the vars_files entries
                    temp_vars = self._combine_vars(all_vars, self._extra_vars)
                    templar = Templar(loader=loader, variables=temp_vars)

                    # we assume each item in the list is itself a list, as we
                    # support "conditional includes" for vars_files, which mimics
                    # the with_first_found mechanism.
                    vars_file_list = templar.template(vars_file_item)
                    if not isinstance(vars_file_list, list):
                         vars_file_list = [ vars_file_list ]

                    # now we iterate through the (potential) files, and break out
                    # as soon as we read one from the list. If none are found, we
                    # raise an error, which is silently ignored at this point.
                    for vars_file in vars_file_list:
                        data = self._preprocess_vars(loader.load_from_file(vars_file))
                        if data is not None:
                            for item in data:
                                all_vars = self._combine_vars(all_vars, item)
                            break
                    else:
                        raise AnsibleError("vars file %s was not found" % vars_file_item)
                except UndefinedError, e:
                    continue

            if not C.DEFAULT_PRIVATE_ROLE_VARS:
                for role in play.get_roles():
                    all_vars = self._combine_vars(all_vars, role.get_vars())

        if task:
            if task._role:
                all_vars = self._combine_vars(all_vars, task._role.get_vars())
            all_vars = self._combine_vars(all_vars, task.get_vars())

        if host:
            all_vars = self._combine_vars(all_vars, self._vars_cache.get(host.get_name(), dict()))

        all_vars = self._combine_vars(all_vars, self._extra_vars)

        # FIXME: make sure all special vars are here
        # Finally, we create special vars

        all_vars['playbook_dir'] = loader.get_basedir()

        if host:
            all_vars['groups'] = [group.name for group in host.get_groups()]

            if self._inventory is not None:
                all_vars['groups']   = self._inventory.groups_list()
                if include_hostvars:
                    hostvars = HostVars(vars_manager=self, play=play, inventory=self._inventory, loader=loader)
                    all_vars['hostvars'] = hostvars

        if task:
            if task._role:
                all_vars['role_path'] = task._role._role_path

        if self._inventory is not None:
            all_vars['inventory_dir'] = self._inventory.basedir()
            if play:
                # add the list of hosts in the play, as adjusted for limit/filters
                # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts,
                #             however this would take work in the templating engine, so for now
                #             we'll add both so we can give users something transitional to use
                host_list = [x.name for x in self._inventory.get_hosts()]
                all_vars['play_hosts'] = host_list
                all_vars['ansible_play_hosts'] = host_list


        # the 'omit' value alows params to be left out if the variable they are based on is undefined
        all_vars['omit'] = self._omit_token

        all_vars['ansible_version'] = CLI.version_info(gitinfo=False)

        # make vars self referential, so people can do things like 'vars[var_name]'
        copied_vars = all_vars.copy()
        if 'hostvars' in copied_vars:
            del copied_vars['hostvars']
        all_vars['vars'] = copied_vars

        #CACHED_VARS[cache_entry] = all_vars

        debug("done with get_vars()")
        return all_vars
Beispiel #8
0
    def get_vars(self,
                 loader,
                 play=None,
                 host=None,
                 task=None,
                 use_cache=True):
        '''
        Returns the variables, with optional "context" given via the parameters
        for the play, host, and task (which could possibly result in different
        sets of variables being returned due to the additional context).

        The order of precedence is:
        - play->roles->get_default_vars (if there is a play context)
        - group_vars_files[host] (if there is a host context)
        - host_vars_files[host] (if there is a host context)
        - host->get_vars (if there is a host context)
        - fact_cache[host] (if there is a host context)
        - vars_cache[host] (if there is a host context)
        - play vars (if there is a play context)
        - play vars_files (if there's no host context, ignore
          file names that cannot be templated)
        - task->get_vars (if there is a task context)
        - extra vars
        '''

        debug("in VariableManager get_vars()")
        cache_entry = self._get_cache_entry(play=play, host=host, task=task)
        if cache_entry in CACHED_VARS and use_cache:
            debug("vars are cached, returning them now")
            return CACHED_VARS[cache_entry]

        all_vars = defaultdict(dict)

        if play:
            # first we compile any vars specified in defaults/main.yml
            # for all roles within the specified play
            for role in play.get_roles():
                all_vars = self._combine_vars(all_vars,
                                              role.get_default_vars())

        if host:
            # next, if a host is specified, we load any vars from group_vars
            # files and then any vars from host_vars files which may apply to
            # this host or the groups it belongs to

            # we merge in the special 'all' group_vars first, if they exist
            if 'all' in self._group_vars_files:
                all_vars = self._combine_vars(all_vars,
                                              self._group_vars_files['all'])

            for group in host.get_groups():
                all_vars = self._combine_vars(all_vars, group.get_vars())
                if group.name in self._group_vars_files and group.name != 'all':
                    all_vars = self._combine_vars(
                        all_vars, self._group_vars_files[group.name])

            host_name = host.get_name()
            if host_name in self._host_vars_files:
                all_vars = self._combine_vars(all_vars,
                                              self._host_vars_files[host_name])

            # then we merge in vars specified for this host
            all_vars = self._combine_vars(all_vars, host.get_vars())

            # next comes the facts cache and the vars cache, respectively
            all_vars = self._combine_vars(
                all_vars, self._fact_cache.get(host.get_name(), dict()))

        if play:
            all_vars = self._combine_vars(all_vars, play.get_vars())
            templar = Templar(loader=loader, variables=all_vars)
            for vars_file in play.get_vars_files():
                try:
                    vars_file = templar.template(vars_file)
                    data = loader.load_from_file(vars_file)
                    if data is None:
                        data = dict()
                    all_vars = self._combine_vars(all_vars, data)
                except:
                    # FIXME: get_vars should probably be taking a flag to determine
                    #        whether or not vars files errors should be fatal at this
                    #        stage, or just base it on whether a host was specified?
                    pass
            for role in play.get_roles():
                all_vars = self._combine_vars(all_vars, role.get_vars())

        if host:
            all_vars = self._combine_vars(
                all_vars, self._vars_cache.get(host.get_name(), dict()))

        if task:
            if task._role:
                all_vars = self._combine_vars(all_vars, task._role.get_vars())
            all_vars = self._combine_vars(all_vars, task.get_vars())

        all_vars = self._combine_vars(all_vars, self._extra_vars)

        # FIXME: make sure all special vars are here
        # Finally, we create special vars

        all_vars['playbook_dir'] = loader.get_basedir()

        if host:
            all_vars['groups'] = [group.name for group in host.get_groups()]

            if self._inventory is not None:
                hostvars = HostVars(vars_manager=self,
                                    play=play,
                                    inventory=self._inventory,
                                    loader=loader)
                all_vars['hostvars'] = hostvars
                all_vars['groups'] = self._inventory.groups_list()

        if task:
            if task._role:
                all_vars['role_path'] = task._role._role_path

        if self._inventory is not None:
            all_vars['inventory_dir'] = self._inventory.basedir()
            if play:
                # add the list of hosts in the play, as adjusted for limit/filters
                # FIXME: play_hosts should be deprecated in favor of ansible_play_hosts,
                #        however this would take work in the templating engine, so for now
                #        we'll add both so we can give users something transitional to use
                host_list = [x.name for x in self._inventory.get_hosts()]
                all_vars['play_hosts'] = host_list
                all_vars['ansible_play_hosts'] = host_list

        # the 'omit' value alows params to be left out if the variable they are based on is undefined
        all_vars['omit'] = self._omit_token

        #CACHED_VARS[cache_entry] = all_vars

        debug("done with get_vars()")
        return all_vars
    def run(self, play):
        '''
        Iterates over the roles/tasks in a play, using the given (or default)
        strategy for queueing tasks. The default is the linear strategy, which
        operates like classic Ansible by keeping all hosts in lock-step with
        a given task (meaning no hosts move on to the next task until all hosts
        are done with the current task).
        '''

        if not self._callbacks_loaded:
            self.load_callbacks()

        all_vars = self._variable_manager.get_vars(loader=self._loader,
                                                   play=play)
        templar = Templar(loader=self._loader, variables=all_vars)

        new_play = play.copy()
        new_play.post_validate(templar)

        self.hostvars = HostVars(
            inventory=self._inventory,
            variable_manager=self._variable_manager,
            loader=self._loader,
        )

        # Fork # of forks, # of hosts or serial, whichever is lowest
        contenders = [
            self._options.forks, play.serial,
            len(self._inventory.get_hosts(new_play.hosts))
        ]
        contenders = [v for v in contenders if v is not None and v > 0]
        self._initialize_processes(min(contenders))

        play_context = PlayContext(new_play, self._options, self.passwords,
                                   self._connection_lockfile.fileno())
        for callback_plugin in self._callback_plugins:
            if hasattr(callback_plugin, 'set_play_context'):
                callback_plugin.set_play_context(play_context)

        self.send_callback('v2_playbook_on_play_start', new_play)

        # initialize the shared dictionary containing the notified handlers
        self._initialize_notified_handlers(new_play.handlers)

        # load the specified strategy (or the default linear one)
        strategy = strategy_loader.get(new_play.strategy, self)
        if strategy is None:
            raise AnsibleError("Invalid play strategy specified: %s" %
                               new_play.strategy,
                               obj=play._ds)

        # build the iterator
        iterator = PlayIterator(
            inventory=self._inventory,
            play=new_play,
            play_context=play_context,
            variable_manager=self._variable_manager,
            all_vars=all_vars,
            start_at_done=self._start_at_done,
        )

        # during initialization, the PlayContext will clear the start_at_task
        # field to signal that a matching task was found, so check that here
        # and remember it so we don't try to skip tasks on future plays
        if getattr(self._options, 'start_at_task',
                   None) is not None and play_context.start_at_task is None:
            self._start_at_done = True

        # and run the play using the strategy and cleanup on way out
        play_return = strategy.run(iterator, play_context)
        self._cleanup_processes()
        return play_return
Beispiel #10
0
    def get_vars(self,
                 loader,
                 play=None,
                 host=None,
                 task=None,
                 include_hostvars=True,
                 include_delegate_to=True,
                 use_cache=True):
        '''
        Returns the variables, with optional "context" given via the parameters
        for the play, host, and task (which could possibly result in different
        sets of variables being returned due to the additional context).

        The order of precedence is:
        - play->roles->get_default_vars (if there is a play context)
        - group_vars_files[host] (if there is a host context)
        - host_vars_files[host] (if there is a host context)
        - host->get_vars (if there is a host context)
        - fact_cache[host] (if there is a host context)
        - play vars (if there is a play context)
        - play vars_files (if there's no host context, ignore
          file names that cannot be templated)
        - task->get_vars (if there is a task context)
        - vars_cache[host] (if there is a host context)
        - extra vars
        '''

        debug("in VariableManager get_vars()")
        cache_entry = self._get_cache_entry(play=play, host=host, task=task)
        if cache_entry in VARIABLE_CACHE and use_cache:
            debug("vars are cached, returning them now")
            return VARIABLE_CACHE[cache_entry]

        all_vars = defaultdict(dict)

        if play:
            # first we compile any vars specified in defaults/main.yml
            # for all roles within the specified play
            for role in play.get_roles():
                all_vars = combine_vars(all_vars, role.get_default_vars())

            # if we have a task in this context, and that task has a role, make
            # sure it sees its defaults above any other roles, as we previously
            # (v1) made sure each task had a copy of its roles default vars
            if task and task._role is not None:
                all_vars = combine_vars(all_vars,
                                        task._role.get_default_vars())

        if host:
            # next, if a host is specified, we load any vars from group_vars
            # files and then any vars from host_vars files which may apply to
            # this host or the groups it belongs to

            # we merge in vars from groups specified in the inventory (INI or script)
            all_vars = combine_vars(all_vars, host.get_group_vars())

            # then we merge in the special 'all' group_vars first, if they exist
            if 'all' in self._group_vars_files:
                data = preprocess_vars(self._group_vars_files['all'])
                for item in data:
                    all_vars = combine_vars(all_vars, item)

            for group in host.get_groups():
                if group.name in self._group_vars_files and group.name != 'all':
                    for data in self._group_vars_files[group.name]:
                        data = preprocess_vars(data)
                        for item in data:
                            all_vars = combine_vars(all_vars, item)

            # then we merge in vars from the host specified in the inventory (INI or script)
            all_vars = combine_vars(all_vars, host.get_vars())

            # then we merge in the host_vars/<hostname> file, if it exists
            host_name = host.get_name()
            if host_name in self._host_vars_files:
                for data in self._host_vars_files[host_name]:
                    data = preprocess_vars(data)
                    for item in data:
                        all_vars = combine_vars(all_vars, item)

            # finally, the facts caches for this host, if it exists
            try:
                host_facts = self._fact_cache.get(host.name, dict())
                for k in host_facts.keys():
                    if host_facts[k] is not None and not isinstance(
                            host_facts[k], UnsafeProxy):
                        host_facts[k] = UnsafeProxy(host_facts[k])
                all_vars = combine_vars(all_vars, host_facts)
            except KeyError:
                pass

        if play:
            all_vars = combine_vars(all_vars, play.get_vars())

            for vars_file_item in play.get_vars_files():
                # create a set of temporary vars here, which incorporate the
                # extra vars so we can properly template the vars_files entries
                temp_vars = combine_vars(all_vars, self._extra_vars)
                templar = Templar(loader=loader, variables=temp_vars)

                # we assume each item in the list is itself a list, as we
                # support "conditional includes" for vars_files, which mimics
                # the with_first_found mechanism.
                #vars_file_list = templar.template(vars_file_item)
                vars_file_list = vars_file_item
                if not isinstance(vars_file_list, list):
                    vars_file_list = [vars_file_list]

                # now we iterate through the (potential) files, and break out
                # as soon as we read one from the list. If none are found, we
                # raise an error, which is silently ignored at this point.
                try:
                    for vars_file in vars_file_list:
                        vars_file = templar.template(vars_file)
                        try:
                            data = preprocess_vars(
                                loader.load_from_file(vars_file))
                            if data is not None:
                                for item in data:
                                    all_vars = combine_vars(all_vars, item)
                            break
                        except AnsibleFileNotFound as e:
                            # we continue on loader failures
                            continue
                        except AnsibleParserError as e:
                            raise
                    else:
                        raise AnsibleFileNotFound(
                            "vars file %s was not found" % vars_file_item)
                except (UndefinedError, AnsibleUndefinedVariable):
                    if host is not None and self._fact_cache.get(
                            host.name,
                            dict()).get('module_setup') and task is not None:
                        raise AnsibleUndefinedVariable(
                            "an undefined variable was found when attempting to template the vars_files item '%s'"
                            % vars_file_item,
                            obj=vars_file_item)
                    else:
                        # we do not have a full context here, and the missing variable could be
                        # because of that, so just show a warning and continue
                        display.vvv(
                            "skipping vars_file '%s' due to an undefined variable"
                            % vars_file_item)
                        continue

            if not C.DEFAULT_PRIVATE_ROLE_VARS:
                for role in play.get_roles():
                    all_vars = combine_vars(
                        all_vars, role.get_vars(include_params=False))

        if task:
            if task._role:
                all_vars = combine_vars(all_vars, task._role.get_vars())
            all_vars = combine_vars(all_vars, task.get_vars())

        if host:
            all_vars = combine_vars(
                all_vars, self._vars_cache.get(host.get_name(), dict()))
            all_vars = combine_vars(
                all_vars,
                self._nonpersistent_fact_cache.get(host.name, dict()))

        all_vars = combine_vars(all_vars, self._extra_vars)

        # FIXME: make sure all special vars are here
        # Finally, we create special vars

        all_vars['playbook_dir'] = loader.get_basedir()

        if host:
            all_vars['group_names'] = [
                group.name for group in host.get_groups()
            ]

            if self._inventory is not None:
                all_vars['groups'] = dict()
                for (group_name, group) in iteritems(self._inventory.groups):
                    all_vars['groups'][group_name] = [
                        h.name for h in group.get_hosts()
                    ]

                if include_hostvars:
                    hostvars_cache_entry = self._get_cache_entry(play=play)
                    if hostvars_cache_entry in HOSTVARS_CACHE:
                        hostvars = HOSTVARS_CACHE[hostvars_cache_entry]
                    else:
                        hostvars = HostVars(play=play,
                                            inventory=self._inventory,
                                            loader=loader,
                                            variable_manager=self)
                        HOSTVARS_CACHE[hostvars_cache_entry] = hostvars
                    all_vars['hostvars'] = hostvars

        if task:
            if task._role:
                all_vars['role_path'] = task._role._role_path

            # if we have a task and we're delegating to another host, figure out the
            # variables for that host now so we don't have to rely on hostvars later
            if task.delegate_to is not None and include_delegate_to:
                # we unfortunately need to template the delegate_to field here,
                # as we're fetching vars before post_validate has been called on
                # the task that has been passed in
                templar = Templar(loader=loader, variables=all_vars)

                items = []
                if task.loop is not None:
                    if task.loop in lookup_loader:
                        #TODO: remove convert_bare true and deprecate this in with_
                        try:
                            loop_terms = listify_lookup_plugin_terms(
                                terms=task.loop_args,
                                templar=templar,
                                loader=loader,
                                fail_on_undefined=True,
                                convert_bare=True)
                        except AnsibleUndefinedVariable as e:
                            if 'has no attribute' in str(e):
                                loop_terms = []
                                self._display.deprecated(
                                    "Skipping task due to undefined attribute, in the future this will be a fatal error."
                                )
                            else:
                                raise
                        items = lookup_loader.get(task.loop,
                                                  loader=loader,
                                                  templar=templar).run(
                                                      terms=loop_terms,
                                                      variables=all_vars)
                    else:
                        raise AnsibleError(
                            "Unexpected failure in finding the lookup named '%s' in the available lookup plugins"
                            % task.loop)
                else:
                    items = [None]

                vars_copy = all_vars.copy()
                delegated_host_vars = dict()
                for item in items:
                    # update the variables with the item value for templating, in case we need it
                    if item is not None:
                        vars_copy['item'] = item

                    templar.set_available_variables(vars_copy)
                    delegated_host_name = templar.template(
                        task.delegate_to, fail_on_undefined=False)
                    if delegated_host_name in delegated_host_vars:
                        # no need to repeat ourselves, as the delegate_to value
                        # does not appear to be tied to the loop item variable
                        continue

                    # a dictionary of variables to use if we have to create a new host below
                    new_delegated_host_vars = dict(
                        ansible_host=delegated_host_name,
                        ansible_user=C.DEFAULT_REMOTE_USER,
                        ansible_connection=C.DEFAULT_TRANSPORT,
                    )

                    # now try to find the delegated-to host in inventory, or failing that,
                    # create a new host on the fly so we can fetch variables for it
                    delegated_host = None
                    if self._inventory is not None:
                        delegated_host = self._inventory.get_host(
                            delegated_host_name)
                        # try looking it up based on the address field, and finally
                        # fall back to creating a host on the fly to use for the var lookup
                        if delegated_host is None:
                            for h in self._inventory.get_hosts(
                                    ignore_limits_and_restrictions=True):
                                # check if the address matches, or if both the delegated_to host
                                # and the current host are in the list of localhost aliases
                                if h.address == delegated_host_name or h.name in C.LOCALHOST and delegated_host_name in C.LOCALHOST:
                                    delegated_host = h
                                    break
                            else:
                                delegated_host = Host(name=delegated_host_name)
                                delegated_host.vars.update(
                                    new_delegated_host_vars)
                    else:
                        delegated_host = Host(name=delegated_host_name)
                        delegated_host.vars.update(new_delegated_host_vars)

                    # now we go fetch the vars for the delegated-to host and save them in our
                    # master dictionary of variables to be used later in the TaskExecutor/PlayContext
                    delegated_host_vars[delegated_host_name] = self.get_vars(
                        loader=loader,
                        play=play,
                        host=delegated_host,
                        task=task,
                        include_delegate_to=False,
                        include_hostvars=False,
                    )
                all_vars['ansible_delegated_vars'] = delegated_host_vars

        if self._inventory is not None:
            all_vars['inventory_dir'] = self._inventory.basedir()
            if play:
                # add the list of hosts in the play, as adjusted for limit/filters
                # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts,
                #             however this would take work in the templating engine, so for now
                #             we'll add both so we can give users something transitional to use
                host_list = [x.name for x in self._inventory.get_hosts()]
                all_vars['play_hosts'] = host_list
                all_vars['ansible_play_hosts'] = host_list

        # the 'omit' value alows params to be left out if the variable they are based on is undefined
        all_vars['omit'] = self._omit_token
        all_vars['ansible_version'] = CLI.version_info(gitinfo=False)

        if 'hostvars' in all_vars and host:
            all_vars['vars'] = all_vars['hostvars'][host.get_name()]

        #VARIABLE_CACHE[cache_entry] = all_vars

        debug("done with get_vars()")
        return all_vars
Beispiel #11
0
def get_ansible_variables(self):
    from ansible.vars.hostvars import HostVars

    (options, loader, inventory, variable_manager) = self.get_ansible_variablemanager()
    hostvars = HostVars(inventory=inventory, variable_manager=variable_manager, loader=loader)
    return hostvars[self.uid]
Beispiel #12
0
                all_vars = self._combine_vars(all_vars, task._role.get_vars())
            all_vars = self._combine_vars(all_vars, task.get_vars())

        all_vars = self._combine_vars(all_vars, self._extra_vars)

        # FIXME: make sure all special vars are here
        # Finally, we create special vars

        all_vars['playbook_dir'] = loader.get_basedir()

        if host:
            all_vars['groups'] = [group.name for group in host.get_groups()]

            if self._inventory is not None:
                hostvars = HostVars(vars_manager=self,
                                    play=play,
                                    inventory=self._inventory,
                                    loader=loader)
                all_vars['hostvars'] = hostvars
                all_vars['groups'] = self._inventory.groups_list()

        if task:
            if task._role:
                all_vars['role_path'] = task._role._role_path

        if self._inventory is not None:
            all_vars['inventory_dir'] = self._inventory.basedir()
            if play:
                # add the list of hosts in the play, as adjusted for limit/filters
                # FIXME: play_hosts should be deprecated in favor of ansible_play_hosts,
                #        however this would take work in the templating engine, so for now
                #        we'll add both so we can give users something transitional to use
    def run(self, play):
        '''
        Iterates over the roles/tasks in a play, using the given (or default)
        strategy for queueing tasks. The default is the linear strategy, which
        operates like classic Ansible by keeping all hosts in lock-step with
        a given task (meaning no hosts move on to the next task until all hosts
        are done with the current task).
        '''

        if not self._callbacks_loaded:
            self.load_callbacks()

        all_vars = self._variable_manager.get_vars(loader=self._loader,
                                                   play=play)
        templar = Templar(loader=self._loader, variables=all_vars)

        new_play = play.copy()
        new_play.post_validate(templar)

        class HostVarsManager(SyncManager):
            pass

        hostvars = HostVars(
            play=new_play,
            inventory=self._inventory,
            variable_manager=self._variable_manager,
            loader=self._loader,
        )

        HostVarsManager.register(
            'hostvars',
            callable=lambda: hostvars,
            # FIXME: this is the list of exposed methods to the DictProxy object, plus our
            #        special ones (set_variable_manager/set_inventory). There's probably a better way
            #        to do this with a proper BaseProxy/DictProxy derivative
            exposed=('set_variable_manager', 'set_inventory', '__contains__',
                     '__delitem__', '__getitem__', '__len__', '__setitem__',
                     'clear', 'copy', 'get', 'has_key', 'items', 'keys', 'pop',
                     'popitem', 'setdefault', 'update', 'values'),
        )
        self._hostvars_manager = HostVarsManager()
        self._hostvars_manager.start()

        # Fork # of forks, # of hosts or serial, whichever is lowest
        contenders = [
            self._options.forks, play.serial,
            len(self._inventory.get_hosts(new_play.hosts))
        ]
        contenders = [v for v in contenders if v is not None and v > 0]
        self._initialize_processes(min(contenders))

        play_context = PlayContext(new_play, self._options, self.passwords,
                                   self._connection_lockfile.fileno())
        for callback_plugin in self._callback_plugins:
            if hasattr(callback_plugin, 'set_play_context'):
                callback_plugin.set_play_context(play_context)

        self.send_callback('v2_playbook_on_play_start', new_play)

        # initialize the shared dictionary containing the notified handlers
        self._initialize_notified_handlers(new_play.handlers)

        # load the specified strategy (or the default linear one)
        strategy = strategy_loader.get(new_play.strategy, self)
        if strategy is None:
            raise AnsibleError("Invalid play strategy specified: %s" %
                               new_play.strategy,
                               obj=play._ds)

        # build the iterator
        iterator = PlayIterator(
            inventory=self._inventory,
            play=new_play,
            play_context=play_context,
            variable_manager=self._variable_manager,
            all_vars=all_vars,
            start_at_done=self._start_at_done,
        )

        # during initialization, the PlayContext will clear the start_at_task
        # field to signal that a matching task was found, so check that here
        # and remember it so we don't try to skip tasks on future plays
        if getattr(self._options, 'start_at_task',
                   None) is not None and play_context.start_at_task is None:
            self._start_at_done = True

        # and run the play using the strategy and cleanup on way out
        play_return = strategy.run(iterator, play_context)
        self._cleanup_processes()
        self._hostvars_manager.shutdown()
        return play_return