Пример #1
0
    def test_action_base__compute_environment_string(self):
        fake_loader = DictDataLoader({})

        # create our fake task
        mock_task = MagicMock()
        mock_task.action = "copy"
        mock_task.args = dict(a=1)

        # create a mock connection, so we don't actually try and connect to things
        def env_prefix(**args):
            return ' '.join([
                '%s=%s' % (k, pipes.quote(text_type(v)))
                for k, v in args.items()
            ])

        mock_connection = MagicMock()
        mock_connection._shell.env_prefix.side_effect = env_prefix

        # we're using a real play context here
        play_context = PlayContext()

        # and we're using a real templar here too
        templar = Templar(loader=fake_loader)

        # our test class
        action_base = DerivedActionBase(
            task=mock_task,
            connection=mock_connection,
            play_context=play_context,
            loader=fake_loader,
            templar=templar,
            shared_loader_obj=None,
        )

        # test standard environment setup
        mock_task.environment = [dict(FOO='foo'), None]
        env_string = action_base._compute_environment_string()
        self.assertEqual(env_string, "FOO=foo")

        # test where environment is not a list
        mock_task.environment = dict(FOO='foo')
        env_string = action_base._compute_environment_string()
        self.assertEqual(env_string, "FOO=foo")

        # test environment with a variable in it
        templar.set_available_variables(variables=dict(the_var='bar'))
        mock_task.environment = [dict(FOO='{{the_var}}')]
        env_string = action_base._compute_environment_string()
        self.assertEqual(env_string, "FOO=bar")

        # test with a bad environment set
        mock_task.environment = dict(FOO='foo')
        mock_task.environment = ['hi there']
        self.assertRaises(AnsibleError,
                          action_base._compute_environment_string)
Пример #2
0
    def test_action_base__compute_environment_string(self):
        fake_loader = DictDataLoader({
        })

        # create our fake task
        mock_task = MagicMock()
        mock_task.action = "copy"
        mock_task.args = dict(a=1)

        # create a mock connection, so we don't actually try and connect to things
        def env_prefix(**args):
            return ' '.join(['%s=%s' % (k, shlex_quote(text_type(v))) for k, v in args.items()])
        mock_connection = MagicMock()
        mock_connection._shell.env_prefix.side_effect = env_prefix

        # we're using a real play context here
        play_context = PlayContext()

        # and we're using a real templar here too
        templar = Templar(loader=fake_loader)

        # our test class
        action_base = DerivedActionBase(
            task=mock_task,
            connection=mock_connection,
            play_context=play_context,
            loader=fake_loader,
            templar=templar,
            shared_loader_obj=None,
        )

        # test standard environment setup
        mock_task.environment = [dict(FOO='foo'), None]
        env_string = action_base._compute_environment_string()
        self.assertEqual(env_string, "FOO=foo")

        # test where environment is not a list
        mock_task.environment = dict(FOO='foo')
        env_string = action_base._compute_environment_string()
        self.assertEqual(env_string, "FOO=foo")

        # test environment with a variable in it
        templar.set_available_variables(variables=dict(the_var='bar'))
        mock_task.environment = [dict(FOO='{{the_var}}')]
        env_string = action_base._compute_environment_string()
        self.assertEqual(env_string, "FOO=bar")

        # test with a bad environment set
        mock_task.environment = dict(FOO='foo')
        mock_task.environment = ['hi there']
        self.assertRaises(AnsibleError, action_base._compute_environment_string)
Пример #3
0
    def test_templar_simple(self):
        fake_loader = DictDataLoader({
          "/path/to/my_file.txt": "foo\n",
        })
        shared_loader = SharedPluginLoaderObj()
        variables = dict(
            foo="bar",
            bam="{{foo}}",
            num=1,
            var_true=True,
            var_false=False,
            var_dict=dict(a="b"),
            bad_dict="{a='b'",
            var_list=[1],
            recursive="{{recursive}}",
        )
        templar = Templar(loader=fake_loader, variables=variables)

        # test some basic templating
        self.assertEqual(templar.template("{{foo}}"), "bar")
        self.assertEqual(templar.template("{{foo}}\n"), "bar")
        self.assertEqual(templar.template("{{foo}}\n", preserve_trailing_newlines=True), "bar\n")
        self.assertEqual(templar.template("foo", convert_bare=True), "bar")
        self.assertEqual(templar.template("{{bam}}"), "bar")
        self.assertEqual(templar.template("{{num}}"), 1)
        self.assertEqual(templar.template("{{var_true}}"), True)
        self.assertEqual(templar.template("{{var_false}}"), False)
        self.assertEqual(templar.template("{{var_dict}}"), dict(a="b"))
        self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'")
        self.assertEqual(templar.template("{{var_list}}"), [1])
        self.assertEqual(templar.template(1, convert_bare=True), 1)
        #FIXME: lookup ignores fake file and returns error
        #self.assertEqual(templar.template("{{lookup('file', '/path/to/my_file.txt')}}"), "foo")

        # force errors
        self.assertRaises(UndefinedError, templar.template, "{{bad_var}}")
        self.assertRaises(UndefinedError, templar.template, "{{lookup('file', bad_var)}}")
        self.assertRaises(AnsibleError, templar.template, "{{lookup('bad_lookup')}}")
        self.assertRaises(AnsibleError, templar.template, "{{recursive}}")
        self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{foo-bar}}")

        # test with fail_on_undefined=False
        self.assertEqual(templar.template("{{bad_var}}", fail_on_undefined=False), "{{bad_var}}")

        # test set_available_variables()
        templar.set_available_variables(variables=dict(foo="bam"))
        self.assertEqual(templar.template("{{foo}}"), "bam")
        # variables must be a dict() for set_available_variables()
        self.assertRaises(AssertionError, templar.set_available_variables, "foo=bam")
Пример #4
0
def make_and_run_plugin(task_args, task_vars):
    """
    Build and run an instance of the action plugin configured with the passed
    vars and args

    """
    templar = Templar(loader=DataLoader())
    templar.set_available_variables(task_vars)
    task = mock.MagicMock(args=task_args)
    plugin = MV(
        task=task,
        connection=None,
        play_context=None,
        loader=None,
        templar=templar,
        shared_loader_obj=None,
    )
    return plugin.run(tmp=None, task_vars=task_vars)
Пример #5
0
 def _process_task_loops(self, task):
     results = []
     if task.loop_with or task.loop:
         task_vars = self._variable_manager.get_vars(task=task)
         templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=self._all_vars)
         items = self._get_loop_items(task, templar, task_vars)
         if items:
             for item in items:
                 try:
                     task_vars['item'] = item
                     templar.set_available_variables(task_vars)
                     tmp_task = task.copy(exclude_parent=True, exclude_tasks=True)
                     tmp_task._parent = task._parent
                     tmp_task.post_validate(templar=templar)
                     results.append(self._task_data_for_template(tmp_task))
                     del task_vars['item']
                 except AnsibleParserError:
                     display.warning('%s caused an AnsibleParseError; ignoring' % (tmp_task))
                     results.clear()
     return results
Пример #6
0
    def parse(self, inventory, loader, path):
        ''' parses the inventory file '''

        super(InventoryModule, self).parse(inventory, loader, path)

        try:
            data = self.loader.load_from_file(path)
        except Exception as e:
            raise AnsibleParserError("Unable to parse %s: %s" %
                                     (to_native(path), to_native(e)))

        if not data or data.get('plugin') != self.NAME:
            raise AnsibleParserError(
                "%s is empty or not a constructed groups config file" %
                (to_native(path)))

        try:
            templar = Templar(loader=loader)

            # Go over hosts (less var copies)
            for host in inventory.hosts:

                # get available variables to templar
                hostvars = host.get_vars()
                if host.name in inventory.cache:  # adds facts if cache is active
                    hostvars = combine_vars(hostvars,
                                            inventory.cache[host.name])
                templar.set_available_variables(hostvars)

                # process each 'group entry'
                for group_name, expression in data.get('groups', {}):
                    conditional = u"{%% if %s %%} True {%% else %%} False {%% endif %%}" % expression
                    result = templar.template(conditional)
                    if result and bool(result):
                        # ensure group exists
                        inventory.add_group(group_name)
                        # add host to group
                        inventory.add_child(group_name, host.name)
        except Exception as e:
            raise AnsibleParserError("failed to parse %s: %s " %
                                     (to_native(path), to_native(e)))
Пример #7
0
    def _get_delegated_vars(self, play, task, existing_variables):
        # we unfortunately need to template the delegate_to field here,
        # as we're fetching vars before post_validate has been called on
        # the task that has been passed in
        vars_copy = existing_variables.copy()
        templar = Templar(loader=self._loader, variables=vars_copy)

        items = []
        if task.loop is not None:
            if task.loop in lookup_loader:
                try:
                    loop_terms = listify_lookup_plugin_terms(terms=task.loop_args, templar=templar,
                                                             loader=self._loader, fail_on_undefined=True, convert_bare=False)
                    items = lookup_loader.get(task.loop, loader=self._loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
                except AnsibleUndefinedVariable:
                    # This task will be skipped later due to this, so we just setup
                    # a dummy array for the later code so it doesn't fail
                    items = [None]
            else:
                raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % task.loop)
        else:
            items = [None]

        delegated_host_vars = dict()
        for item in items:
            # update the variables with the item value for templating, in case we need it
            if item is not None:
                vars_copy['item'] = item

            templar.set_available_variables(vars_copy)
            delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
            if delegated_host_name is None:
                raise AnsibleError(message="Undefined delegate_to host for task:", obj=task._ds)
            if delegated_host_name in delegated_host_vars:
                # no need to repeat ourselves, as the delegate_to value
                # does not appear to be tied to the loop item variable
                continue

            # a dictionary of variables to use if we have to create a new host below
            # we set the default port based on the default transport here, to make sure
            # we use the proper default for windows
            new_port = C.DEFAULT_REMOTE_PORT
            if C.DEFAULT_TRANSPORT == 'winrm':
                new_port = 5986

            new_delegated_host_vars = dict(
                ansible_delegated_host=delegated_host_name,
                ansible_host=delegated_host_name,  # not redundant as other sources can change ansible_host
                ansible_port=new_port,
                ansible_user=C.DEFAULT_REMOTE_USER,
                ansible_connection=C.DEFAULT_TRANSPORT,
            )

            # now try to find the delegated-to host in inventory, or failing that,
            # create a new host on the fly so we can fetch variables for it
            delegated_host = None
            if self._inventory is not None:
                delegated_host = self._inventory.get_host(delegated_host_name)
                # try looking it up based on the address field, and finally
                # fall back to creating a host on the fly to use for the var lookup
                if delegated_host is None:
                    if delegated_host_name in C.LOCALHOST:
                        delegated_host = self._inventory.localhost
                    else:
                        for h in self._inventory.get_hosts(ignore_limits=True, ignore_restrictions=True):
                            # check if the address matches, or if both the delegated_to host
                            # and the current host are in the list of localhost aliases
                            if h.address == delegated_host_name:
                                delegated_host = h
                                break
                        else:
                            delegated_host = Host(name=delegated_host_name)
                            delegated_host.vars = combine_vars(delegated_host.vars, new_delegated_host_vars)
            else:
                delegated_host = Host(name=delegated_host_name)
                delegated_host.vars = combine_vars(delegated_host.vars, new_delegated_host_vars)

            # now we go fetch the vars for the delegated-to host and save them in our
            # master dictionary of variables to be used later in the TaskExecutor/PlayContext
            delegated_host_vars[delegated_host_name] = self.get_vars(
                play=play,
                host=delegated_host,
                task=task,
                include_delegate_to=False,
                include_hostvars=False,
            )
        return delegated_host_vars
Пример #8
0
    def _run_loop(self, items):
        '''
        Runs the task with the loop items specified and collates the result
        into an array named 'results' which is inserted into the final result
        along with the item for which the loop ran.
        '''

        results = []

        # make copies of the job vars and task so we can add the item to
        # the variables and re-validate the task with the item variable
        # task_vars = self._job_vars.copy()
        task_vars = self._job_vars

        loop_var = 'item'
        index_var = None
        label = None
        loop_pause = 0
        templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=self._job_vars)

        # FIXME: move this to the object itself to allow post_validate to take care of templating (loop_control.post_validate)
        if self._task.loop_control:
            loop_var = templar.template(self._task.loop_control.loop_var)
            index_var = templar.template(self._task.loop_control.index_var)
            loop_pause = templar.template(self._task.loop_control.pause)

            # This may be 'None',so it is tempalted below after we ensure a value and an item is assigned
            label = self._task.loop_control.label

        # ensure we always have a label
        if label is None:
            label = '{{' + loop_var + '}}'

        if loop_var in task_vars:
            display.warning(u"The loop variable '%s' is already in use. "
                            u"You should set the `loop_var` value in the `loop_control` option for the task"
                            u" to something else to avoid variable collisions and unexpected behavior." % loop_var)

        ran_once = False
        if self._task.loop_with:
            # Only squash with 'with_:' not with the 'loop:', 'magic' squashing can be removed once with_ loops are
            items = self._squash_items(items, loop_var, task_vars)

        no_log = False
        for item_index, item in enumerate(items):
            task_vars[loop_var] = item
            if index_var:
                task_vars[index_var] = item_index

            # Update template vars to reflect current loop iteration
            templar.set_available_variables(task_vars)

            # pause between loop iterations
            if loop_pause and ran_once:
                try:
                    time.sleep(float(loop_pause))
                except ValueError as e:
                    raise AnsibleError('Invalid pause value: %s, produced error: %s' % (loop_pause, to_native(e)))
            else:
                ran_once = True

            try:
                tmp_task = self._task.copy(exclude_parent=True, exclude_tasks=True)
                tmp_task._parent = self._task._parent
                tmp_play_context = self._play_context.copy()
            except AnsibleParserError as e:
                results.append(dict(failed=True, msg=to_text(e)))
                continue

            # now we swap the internal task and play context with their copies,
            # execute, and swap them back so we can do the next iteration cleanly
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
            res = self._execute(variables=task_vars)
            task_fields = self._task.dump_attrs()
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)

            # update 'general no_log' based on specific no_log
            no_log = no_log or tmp_task.no_log

            # now update the result with the item info, and append the result
            # to the list of results
            res[loop_var] = item
            if index_var:
                res[index_var] = item_index
            res['_ansible_item_result'] = True
            res['_ansible_ignore_errors'] = task_fields.get('ignore_errors')

            # gets templated here unlike rest of loop_control fields, depends on loop_var above
            res['_ansible_item_label'] = templar.template(label, cache=False)

            self._rslt_q.put(
                TaskResult(
                    self._host.name,
                    self._task._uuid,
                    res,
                    task_fields=task_fields,
                ),
                block=False,
            )
            results.append(res)
            del task_vars[loop_var]

        self._task.no_log = no_log

        return results
Пример #9
0
    def test_templar_simple(self):
        fake_loader = DictDataLoader({
            "/path/to/my_file.txt": "foo\n",
        })
        shared_loader = SharedPluginLoaderObj()
        variables = dict(
            foo="bar",
            bam="{{foo}}",
            num=1,
            var_true=True,
            var_false=False,
            var_dict=dict(a="b"),
            bad_dict="{a='b'",
            var_list=[1],
            recursive="{{recursive}}",
        )
        templar = Templar(loader=fake_loader, variables=variables)

        # test some basic templating
        self.assertEqual(templar.template("{{foo}}"), "bar")
        self.assertEqual(templar.template("{{foo}}\n"), "bar\n")
        self.assertEqual(
            templar.template("{{foo}}\n", preserve_trailing_newlines=True),
            "bar\n")
        self.assertEqual(
            templar.template("{{foo}}\n", preserve_trailing_newlines=False),
            "bar")
        self.assertEqual(templar.template("foo", convert_bare=True), "bar")
        self.assertEqual(templar.template("{{bam}}"), "bar")
        self.assertEqual(templar.template("{{num}}"), 1)
        self.assertEqual(templar.template("{{var_true}}"), True)
        self.assertEqual(templar.template("{{var_false}}"), False)
        self.assertEqual(templar.template("{{var_dict}}"), dict(a="b"))
        self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'")
        self.assertEqual(templar.template("{{var_list}}"), [1])
        self.assertEqual(templar.template(1, convert_bare=True), 1)
        #FIXME: lookup ignores fake file and returns error
        #self.assertEqual(templar.template("{{lookup('file', '/path/to/my_file.txt')}}"), "foo")

        # force errors
        self.assertRaises(AnsibleUndefinedVariable, templar.template,
                          "{{bad_var}}")
        self.assertRaises(AnsibleUndefinedVariable, templar.template,
                          "{{lookup('file', bad_var)}}")
        self.assertRaises(AnsibleError, templar.template,
                          "{{lookup('bad_lookup')}}")
        self.assertRaises(AnsibleError, templar.template, "{{recursive}}")
        self.assertRaises(AnsibleUndefinedVariable, templar.template,
                          "{{foo-bar}}")

        # test with fail_on_undefined=False
        self.assertEqual(
            templar.template("{{bad_var}}", fail_on_undefined=False),
            "{{bad_var}}")

        # test set_available_variables()
        templar.set_available_variables(variables=dict(foo="bam"))
        self.assertEqual(templar.template("{{foo}}"), "bam")
        # variables must be a dict() for set_available_variables()
        self.assertRaises(AssertionError, templar.set_available_variables,
                          "foo=bam")
Пример #10
0
class BaseInventoryPlugin(object):
    """ Parses an Inventory Source"""

    TYPE = 'generator'

    def __init__(self):

        self.inventory = None
        self.display = display
        self._cache = {}

    def parse(self, inventory, loader, path, cache=True):
        ''' Populates self.groups from the given data. Raises an error on any parse failure.  '''

        self.loader = loader
        self.inventory = inventory
        self.templar = Templar(loader=loader)

    def verify_file(self, path):
        ''' Verify if file is usable by this plugin, base does minimal accessability check '''

        b_path = to_bytes(path, errors='surrogate_or_strict')
        return (os.path.exists(b_path) and os.access(b_path, os.R_OK))

    def get_cache_prefix(self, path):
        ''' create predictable unique prefix for plugin/inventory '''

        m = hashlib.sha1()
        m.update(to_bytes(self.NAME, errors='surrogate_or_strict'))
        d1 = m.hexdigest()

        n = hashlib.sha1()
        n.update(to_bytes(path, errors='surrogate_or_strict'))
        d2 = n.hexdigest()

        return 's_'.join([d1[:5], d2[:5]])

    def clear_cache(self):
        pass

    def populate_host_vars(self, hosts, variables, group=None, port=None):
        if not isinstance(variables, MutableMapping):
            raise AnsibleParserError(
                "Invalid data from file, expected dictionary and got:\n\n%s" %
                to_native(variables))

        for host in hosts:
            self.inventory.add_host(host, group=group, port=port)
            for k in variables:
                self.inventory.set_variable(host, k, variables[k])

    def _compose(self, template, variables):
        ''' helper method for pluigns to compose variables for Ansible based on jinja2 expression and inventory vars'''
        t = self.templar
        t.set_available_variables(variables)
        return t.do_template('%s%s%s' %
                             (t.environment.variable_start_string, template,
                              t.environment.variable_end_string),
                             disable_lookups=True)

    def _set_composite_vars(self, compose, variables, host, strict=False):
        ''' loops over compose entries to create vars for hosts '''
        if compose and isinstance(compose, dict):
            for varname in compose:
                try:
                    composite = self._compose(compose[varname], variables)
                except Exception as e:
                    if strict:
                        raise AnsibleOptionsError("Could set %s: %s" %
                                                  (varname, to_native(e)))
                    continue
                self.inventory.set_variable(host, varname, composite)

    def _add_host_to_composed_groups(self,
                                     groups,
                                     variables,
                                     host,
                                     strict=False):
        ''' helper to create complex groups for plugins based on jinaj2 conditionals, hosts that meet the conditional are added to group'''
        # process each 'group entry'
        if groups and isinstance(groups, dict):
            self.templar.set_available_variables(variables)
            for group_name in groups:
                conditional = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % groups[
                    group_name]
                try:
                    result = boolean(self.templar.template(conditional))
                except Exception as e:
                    if strict:
                        raise AnsibleOptionsError(
                            "Could not add to group %s: %s" %
                            (group_name, to_native(e)))
                    continue
                if result:
                    # ensure group exists
                    self.inventory.add_group(group_name)
                    # add host to group
                    self.inventory.add_child(group_name, host)

    def _add_host_to_keyed_groups(self, keys, variables, host, strict=False):
        ''' helper to create groups for plugins based on variable values and add the corresponding hosts to it'''
        if keys and isinstance(keys, list):
            for keyed in keys:
                if keyed and isinstance(keyed, dict):
                    prefix = keyed.get('prefix', '')
                    key = keyed.get('key')
                    if key is not None:
                        try:
                            groups = to_safe_group_name(
                                '%s_%s' %
                                (prefix, self._compose(key, variables)))
                        except Exception as e:
                            if strict:
                                raise AnsibleOptionsError(
                                    "Could not generate group on %s: %s" %
                                    (key, to_native(e)))
                            continue
                        if isinstance(groups, string_types):
                            groups = [groups]
                        if isinstance(groups, list):
                            for group_name in groups:
                                if group_name not in self.inventory.groups:
                                    self.inventory.add_group(group_name)
                                self.inventory.add_child(group_name, host)
                        else:
                            raise AnsibleOptionsError(
                                "Invalid group name format, expected string or list of strings, got: %s"
                                % type(groups))
                    else:
                        raise AnsibleOptionsError(
                            "No key supplied, invalid entry")
                else:
                    raise AnsibleOptionsError(
                        "Invalid keyed group entry, it must be a dictionary: %s "
                        % keyed)
Пример #11
0
    def _get_delegated_vars(self, loader, play, task, existing_variables):
        # we unfortunately need to template the delegate_to field here,
        # as we're fetching vars before post_validate has been called on
        # the task that has been passed in
        vars_copy = existing_variables.copy()
        templar = Templar(loader=loader, variables=vars_copy)

        items = []
        if task.loop is not None:
            if task.loop in lookup_loader:
                #TODO: remove convert_bare true and deprecate this in with_
                try:
                    loop_terms = listify_lookup_plugin_terms(terms=task.loop_args, templar=templar, loader=loader, fail_on_undefined=True, convert_bare=True)
                except AnsibleUndefinedVariable as e:
                    if 'has no attribute' in str(e):
                        loop_terms = []
                        display.deprecated("Skipping task due to undefined attribute, in the future this will be a fatal error.")
                    else:
                        raise
                items = lookup_loader.get(task.loop, loader=loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
            else:
                raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % task.loop)
        else:
            items = [None]

        delegated_host_vars = dict()
        for item in items:
            # update the variables with the item value for templating, in case we need it
            if item is not None:
                vars_copy['item'] = item

            templar.set_available_variables(vars_copy)
            delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
            if delegated_host_name in delegated_host_vars:
                # no need to repeat ourselves, as the delegate_to value
                # does not appear to be tied to the loop item variable
                continue

            # a dictionary of variables to use if we have to create a new host below
            # we set the default port based on the default transport here, to make sure
            # we use the proper default for windows
            new_port = C.DEFAULT_REMOTE_PORT
            if C.DEFAULT_TRANSPORT == 'winrm':
                new_port = 5986

            new_delegated_host_vars = dict(
                ansible_host=delegated_host_name,
                ansible_port=new_port,
                ansible_user=C.DEFAULT_REMOTE_USER,
                ansible_connection=C.DEFAULT_TRANSPORT,
            )

            # now try to find the delegated-to host in inventory, or failing that,
            # create a new host on the fly so we can fetch variables for it
            delegated_host = None
            if self._inventory is not None:
                delegated_host = self._inventory.get_host(delegated_host_name)
                # try looking it up based on the address field, and finally
                # fall back to creating a host on the fly to use for the var lookup
                if delegated_host is None:
                    for h in self._inventory.get_hosts(ignore_limits_and_restrictions=True):
                        # check if the address matches, or if both the delegated_to host
                        # and the current host are in the list of localhost aliases
                        if h.address == delegated_host_name or h.name in C.LOCALHOST and delegated_host_name in C.LOCALHOST:
                            delegated_host = h
                            break
                    else:
                        delegated_host = Host(name=delegated_host_name)
                        delegated_host.vars.update(new_delegated_host_vars)
            else:
                delegated_host = Host(name=delegated_host_name)
                delegated_host.vars.update(new_delegated_host_vars)

            # now we go fetch the vars for the delegated-to host and save them in our
            # master dictionary of variables to be used later in the TaskExecutor/PlayContext
            delegated_host_vars[delegated_host_name] = self.get_vars(
                loader=loader,
                play=play,
                host=delegated_host,
                task=task,
                include_delegate_to=False,
                include_hostvars=False,
            )

        return delegated_host_vars
Пример #12
0
class BaseInventoryPlugin(object):
    """ Parses an Inventory Source"""

    TYPE = 'generator'

    def __init__(self, cache=None):

        self.inventory = None
        self.display = display
        self.cache = cache

    def parse(self, inventory, loader, path, cache=True):
        ''' Populates self.groups from the given data. Raises an error on any parse failure.  '''

        self.loader = loader
        self.inventory = inventory
        self.templar = Templar(loader=loader)

    def verify_file(self, path):
        ''' Verify if file is usable by this plugin, base does minimal accessability check '''

        b_path = to_bytes(path)
        return (os.path.exists(b_path) and os.access(b_path, os.R_OK))

    def get_cache_prefix(self, path):
        ''' create predictable unique prefix for plugin/inventory '''

        m = hashlib.sha1()
        m.update(to_bytes(self.NAME))
        d1 = m.hexdigest()

        n = hashlib.sha1()
        n.update(to_bytes(path))
        d2 = n.hexdigest()

        return 's_'.join([d1[:5], d2[:5]])

    def clear_cache(self):
        pass

    def populate_host_vars(self, hosts, variables, group=None, port=None):
        for host in hosts:
            self.inventory.add_host(host, group=group, port=port)
            for k in variables:
                self.inventory.set_variable(host, k, variables[k])

    def _compose(self, template, variables):
        ''' helper method for pluigns to compose variables for Ansible based on jinja2 expression and inventory vars'''
        t = self.templar
        t.set_available_variables(variables)
        return t.do_template('%s%s%s' %
                             (t.environment.variable_start_string, template,
                              t.environment.variable_end_string),
                             disable_lookups=True)

    def _set_composite_vars(self, compose, variables, host):
        ''' loops over compose entries to create vars for hosts '''
        if compose and isinstance(compose, dict):
            for varname in compose:
                composite = self._compose(compose[varname], variables)
                self.inventory.set_variable(host, varname, composite)

    def _add_host_to_composed_groups(self, groups, variables, host):
        ''' helper to create complex groups for plugins based on jinaj2 conditionals, hosts that meet the conditional are added to group'''
        # process each 'group entry'
        if groups and isinstance(groups, dict):
            self.templar.set_available_variables(variables)
            for group_name in groups:
                conditional = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % groups[
                    group_name]
                result = self.templar.template(conditional)
                if result and bool(result):
                    # ensure group exists
                    self.inventory.add_group(group_name)
                    # add host to group
                    self.inventory.add_child(group_name, host)
Пример #13
0
    def _get_delegated_vars(self, loader, play, task, existing_variables):
        # we unfortunately need to template the delegate_to field here,
        # as we're fetching vars before post_validate has been called on
        # the task that has been passed in
        vars_copy = existing_variables.copy()
        templar = Templar(loader=loader, variables=vars_copy)

        items = []
        if task.loop is not None:
            if task.loop in lookup_loader:
                #TODO: remove convert_bare true and deprecate this in with_
                try:
                    loop_terms = listify_lookup_plugin_terms(
                        terms=task.loop_args,
                        templar=templar,
                        loader=loader,
                        fail_on_undefined=True,
                        convert_bare=True)
                except AnsibleUndefinedVariable as e:
                    if 'has no attribute' in str(e):
                        loop_terms = []
                        self._display.deprecated(
                            "Skipping task due to undefined attribute, in the future this will be a fatal error."
                        )
                    else:
                        raise
                items = lookup_loader.get(task.loop,
                                          loader=loader,
                                          templar=templar).run(
                                              terms=loop_terms,
                                              variables=vars_copy)
            else:
                raise AnsibleError(
                    "Unexpected failure in finding the lookup named '%s' in the available lookup plugins"
                    % task.loop)
        else:
            items = [None]

        delegated_host_vars = dict()
        for item in items:
            # update the variables with the item value for templating, in case we need it
            if item is not None:
                vars_copy['item'] = item

            templar.set_available_variables(vars_copy)
            delegated_host_name = templar.template(task.delegate_to,
                                                   fail_on_undefined=False)
            if delegated_host_name in delegated_host_vars:
                # no need to repeat ourselves, as the delegate_to value
                # does not appear to be tied to the loop item variable
                continue

            # a dictionary of variables to use if we have to create a new host below
            new_delegated_host_vars = dict(
                ansible_host=delegated_host_name,
                ansible_user=C.DEFAULT_REMOTE_USER,
                ansible_connection=C.DEFAULT_TRANSPORT,
            )

            # now try to find the delegated-to host in inventory, or failing that,
            # create a new host on the fly so we can fetch variables for it
            delegated_host = None
            if self._inventory is not None:
                delegated_host = self._inventory.get_host(delegated_host_name)
                # try looking it up based on the address field, and finally
                # fall back to creating a host on the fly to use for the var lookup
                if delegated_host is None:
                    for h in self._inventory.get_hosts(
                            ignore_limits_and_restrictions=True):
                        # check if the address matches, or if both the delegated_to host
                        # and the current host are in the list of localhost aliases
                        if h.address == delegated_host_name or h.name in C.LOCALHOST and delegated_host_name in C.LOCALHOST:
                            delegated_host = h
                            break
                    else:
                        delegated_host = Host(name=delegated_host_name)
                        delegated_host.vars.update(new_delegated_host_vars)
            else:
                delegated_host = Host(name=delegated_host_name)
                delegated_host.vars.update(new_delegated_host_vars)

            # now we go fetch the vars for the delegated-to host and save them in our
            # master dictionary of variables to be used later in the TaskExecutor/PlayContext
            delegated_host_vars[delegated_host_name] = self.get_vars(
                loader=loader,
                play=play,
                host=delegated_host,
                task=task,
                include_delegate_to=False,
                include_hostvars=False,
            )

        return delegated_host_vars
Пример #14
0
    def _process_pending_results(self,
                                 iterator,
                                 one_pass=False,
                                 max_passes=None):
        '''
        Reads results off the final queue and takes appropriate action
        based on the result (executing callbacks, updating state, etc.).
        '''

        ret_results = []

        def get_original_host(host_name):
            host_name = to_text(host_name)
            if host_name in self._inventory._hosts_cache:
                return self._inventory._hosts_cache[host_name]
            else:
                return self._inventory.get_host(host_name)

        def search_handler_blocks_by_name(handler_name, handler_blocks):
            for handler_block in handler_blocks:
                for handler_task in handler_block.block:
                    if handler_task.name:
                        handler_vars = self._variable_manager.get_vars(
                            loader=self._loader,
                            play=iterator._play,
                            task=handler_task)
                        templar = Templar(loader=self._loader,
                                          variables=handler_vars)
                        try:
                            # first we check with the full result of get_name(), which may
                            # include the role name (if the handler is from a role). If that
                            # is not found, we resort to the simple name field, which doesn't
                            # have anything extra added to it.
                            target_handler_name = templar.template(
                                handler_task.name)
                            if target_handler_name == handler_name:
                                return handler_task
                            else:
                                target_handler_name = templar.template(
                                    handler_task.get_name())
                                if target_handler_name == handler_name:
                                    return handler_task
                        except (UndefinedError, AnsibleUndefinedVariable):
                            # We skip this handler due to the fact that it may be using
                            # a variable in the name that was conditionally included via
                            # set_fact or some other method, and we don't want to error
                            # out unnecessarily
                            continue
            return None

        def search_handler_blocks_by_uuid(handler_uuid, handler_blocks):
            for handler_block in handler_blocks:
                for handler_task in handler_block.block:
                    if handler_uuid == handler_task._uuid:
                        return handler_task
            return None

        def parent_handler_match(target_handler, handler_name):
            if target_handler:
                if isinstance(target_handler, (TaskInclude, IncludeRole)):
                    try:
                        handler_vars = self._variable_manager.get_vars(
                            loader=self._loader,
                            play=iterator._play,
                            task=target_handler)
                        templar = Templar(loader=self._loader,
                                          variables=handler_vars)
                        target_handler_name = templar.template(
                            target_handler.name)
                        if target_handler_name == handler_name:
                            return True
                        else:
                            target_handler_name = templar.template(
                                target_handler.get_name())
                            if target_handler_name == handler_name:
                                return True
                    except (UndefinedError, AnsibleUndefinedVariable):
                        pass
                return parent_handler_match(target_handler._parent,
                                            handler_name)
            else:
                return False

        # a Templar class to use for templating things later, as we're using
        # original/non-validated objects here on the manager side. We set the
        # variables in use later inside the loop below
        templar = Templar(loader=self._loader)

        cur_pass = 0
        while True:
            try:
                self._results_lock.acquire()
                task_result = self._results.pop()
            except IndexError:
                break
            finally:
                self._results_lock.release()

            # get the original host and task.  We then assign them to the TaskResult for use in callbacks/etc.
            original_host = get_original_host(task_result._host)
            original_task = iterator.get_original_task(original_host,
                                                       task_result._task)

            task_result._host = original_host
            task_result._task = original_task

            # get the correct loop var for use later
            if original_task.loop_control:
                loop_var = original_task.loop_control.loop_var or 'item'
            else:
                loop_var = 'item'

            # get the vars for this task/host pair, make them the active set of vars for our templar above
            task_vars = self._variable_manager.get_vars(loader=self._loader,
                                                        play=iterator._play,
                                                        host=original_host,
                                                        task=original_task)
            self.add_tqm_variables(task_vars, play=iterator._play)
            templar.set_available_variables(task_vars)

            # send callbacks for 'non final' results
            if '_ansible_retry' in task_result._result:
                self._tqm.send_callback('v2_runner_retry', task_result)
                continue
            elif '_ansible_item_result' in task_result._result:
                if task_result.is_failed() or task_result.is_unreachable():
                    self._tqm.send_callback('v2_runner_item_on_failed',
                                            task_result)
                elif task_result.is_skipped():
                    self._tqm.send_callback('v2_runner_item_on_skipped',
                                            task_result)
                else:
                    if 'diff' in task_result._result:
                        if self._diff:
                            self._tqm.send_callback('v2_on_file_diff',
                                                    task_result)
                    self._tqm.send_callback('v2_runner_item_on_ok',
                                            task_result)
                continue

            run_once = templar.template(original_task.run_once)
            if original_task.register:
                if run_once:
                    host_list = [
                        host for host in self._inventory.get_hosts(
                            iterator._play.hosts)
                        if host.name not in self._tqm._unreachable_hosts
                    ]
                else:
                    host_list = [original_host]

                clean_copy = strip_internal_keys(task_result._result)
                if 'invocation' in clean_copy:
                    del clean_copy['invocation']

                for target_host in host_list:
                    self._variable_manager.set_nonpersistent_facts(
                        target_host, {original_task.register: clean_copy})

            # all host status messages contain 2 entries: (msg, task_result)
            role_ran = False
            if task_result.is_failed():
                role_ran = True
                ignore_errors = templar.template(original_task.ignore_errors)
                if not ignore_errors:
                    display.debug("marking %s as failed" % original_host.name)
                    if run_once:
                        # if we're using run_once, we have to fail every host here
                        for h in self._inventory.get_hosts(
                                iterator._play.hosts):
                            if h.name not in self._tqm._unreachable_hosts:
                                state, _ = iterator.get_next_task_for_host(
                                    h, peek=True)
                                iterator.mark_host_failed(h)
                                state, new_task = iterator.get_next_task_for_host(
                                    h, peek=True)
                    else:
                        iterator.mark_host_failed(original_host)

                    # increment the failed count for this host
                    self._tqm._stats.increment('failures', original_host.name)

                    # grab the current state and if we're iterating on the rescue portion
                    # of a block then we save the failed task in a special var for use
                    # within the rescue/always
                    state, _ = iterator.get_next_task_for_host(original_host,
                                                               peek=True)

                    if iterator.is_failed(
                            original_host
                    ) and state and state.run_state == iterator.ITERATING_COMPLETE:
                        self._tqm._failed_hosts[original_host.name] = True

                    if state and state.run_state == iterator.ITERATING_RESCUE:
                        self._variable_manager.set_nonpersistent_facts(
                            original_host,
                            dict(
                                ansible_failed_task=original_task.serialize(),
                                ansible_failed_result=task_result._result,
                            ),
                        )
                else:
                    self._tqm._stats.increment('ok', original_host.name)
                    if 'changed' in task_result._result and task_result._result[
                            'changed']:
                        self._tqm._stats.increment('changed',
                                                   original_host.name)
                self._tqm.send_callback('v2_runner_on_failed',
                                        task_result,
                                        ignore_errors=ignore_errors)
            elif task_result.is_unreachable():
                self._tqm._unreachable_hosts[original_host.name] = True
                iterator._play._removed_hosts.append(original_host.name)
                self._tqm._stats.increment('dark', original_host.name)
                self._tqm.send_callback('v2_runner_on_unreachable',
                                        task_result)
            elif task_result.is_skipped():
                self._tqm._stats.increment('skipped', original_host.name)
                self._tqm.send_callback('v2_runner_on_skipped', task_result)
            else:
                role_ran = True

                if original_task.loop:
                    # this task had a loop, and has more than one result, so
                    # loop over all of them instead of a single result
                    result_items = task_result._result.get('results', [])
                else:
                    result_items = [task_result._result]

                for result_item in result_items:
                    if '_ansible_notify' in result_item:
                        print("GOT A NOTIFY")
                        if task_result.is_changed():
                            # The shared dictionary for notified handlers is a proxy, which
                            # does not detect when sub-objects within the proxy are modified.
                            # So, per the docs, we reassign the list so the proxy picks up and
                            # notifies all other threads
                            for handler_name in result_item['_ansible_notify']:
                                print(
                                    "TRYING TO SEND NOTIFICATION TO HANDLER: %s"
                                    % handler_name)
                                found = False
                                # Find the handler using the above helper.  First we look up the
                                # dependency chain of the current task (if it's from a role), otherwise
                                # we just look through the list of handlers in the current play/all
                                # roles and use the first one that matches the notify name
                                target_handler = search_handler_blocks_by_name(
                                    handler_name, iterator._play.handlers)
                                if target_handler is not None:
                                    found = True
                                    if original_host._uuid not in self._notified_handlers[
                                            target_handler._uuid]:
                                        self._notified_handlers[
                                            target_handler._uuid].append(
                                                original_host)
                                        # FIXME: should this be a callback?
                                        display.vv("NOTIFIED HANDLER %s" %
                                                   (handler_name, ))
                                else:
                                    # As there may be more than one handler with the notified name as the
                                    # parent, so we just keep track of whether or not we found one at all
                                    for target_handler_uuid in self._notified_handlers:
                                        target_handler = search_handler_blocks_by_uuid(
                                            target_handler_uuid,
                                            iterator._play.handlers)
                                        if target_handler and parent_handler_match(
                                                target_handler, handler_name):
                                            self._notified_handlers[
                                                target_handler._uuid].append(
                                                    original_host)
                                            display.vv(
                                                "NOTIFIED HANDLER %s" %
                                                (target_handler.get_name(), ))
                                            found = True

                                if handler_name in self._listening_handlers:
                                    for listening_handler_uuid in self._listening_handlers[
                                            handler_name]:
                                        listening_handler = search_handler_blocks_by_uuid(
                                            listening_handler_uuid,
                                            iterator._play.handlers)
                                        if listening_handler is not None:
                                            found = True
                                        else:
                                            continue
                                        if original_host not in self._notified_handlers[
                                                listening_handler._uuid]:
                                            self._notified_handlers[
                                                listening_handler.
                                                _uuid].append(original_host)
                                            display.vv(
                                                "NOTIFIED HANDLER %s" %
                                                (listening_handler.get_name(),
                                                 ))

                                # and if none were found, then we raise an error
                                if not found:
                                    msg = "The requested handler '%s' was not found in either the main handlers list nor in the listening handlers list" % handler_name
                                    if C.ERROR_ON_MISSING_HANDLER:
                                        raise AnsibleError(msg)
                                    else:
                                        display.warning(msg)

                    if 'add_host' in result_item:
                        # this task added a new host (add_host module)
                        new_host_info = result_item.get('add_host', dict())
                        self._add_host(new_host_info, iterator)

                    elif 'add_group' in result_item:
                        # this task added a new group (group_by module)
                        self._add_group(original_host, result_item)

                    elif 'ansible_facts' in result_item:

                        # if delegated fact and we are delegating facts, we need to change target host for them
                        if original_task.delegate_to is not None and original_task.delegate_facts:
                            item = result_item.get(loop_var, None)
                            if item is not None:
                                task_vars[loop_var] = item
                            host_name = templar.template(
                                original_task.delegate_to)
                            actual_host = self._inventory.get_host(host_name)
                            if actual_host is None:
                                actual_host = Host(name=host_name)
                        else:
                            actual_host = original_host

                        if original_task.action == 'include_vars':
                            for (var_name, var_value) in iteritems(
                                    result_item['ansible_facts']):
                                # find the host we're actually referring too here, which may
                                # be a host that is not really in inventory at all

                                if run_once:
                                    host_list = [
                                        host
                                        for host in self._inventory.get_hosts(
                                            iterator._play.hosts) if host.name
                                        not in self._tqm._unreachable_hosts
                                    ]
                                else:
                                    host_list = [actual_host]

                                for target_host in host_list:
                                    self._variable_manager.set_host_variable(
                                        target_host, var_name, var_value)
                        else:
                            if run_once:
                                host_list = [
                                    host for host in self._inventory.get_hosts(
                                        iterator._play.hosts) if host.name
                                    not in self._tqm._unreachable_hosts
                                ]
                            else:
                                host_list = [actual_host]

                            for target_host in host_list:
                                if original_task.action == 'set_fact':
                                    self._variable_manager.set_nonpersistent_facts(
                                        target_host,
                                        result_item['ansible_facts'].copy())
                                else:
                                    self._variable_manager.set_host_facts(
                                        target_host,
                                        result_item['ansible_facts'].copy())

                if 'diff' in task_result._result:
                    if self._diff:
                        self._tqm.send_callback('v2_on_file_diff', task_result)

                if original_task.action not in ['include', 'include_role']:
                    self._tqm._stats.increment('ok', original_host.name)
                    if 'changed' in task_result._result and task_result._result[
                            'changed']:
                        self._tqm._stats.increment('changed',
                                                   original_host.name)

                # finally, send the ok for this task
                self._tqm.send_callback('v2_runner_on_ok', task_result)

            self._pending_results -= 1
            if original_host.name in self._blocked_hosts:
                del self._blocked_hosts[original_host.name]

            # If this is a role task, mark the parent role as being run (if
            # the task was ok or failed, but not skipped or unreachable)
            if original_task._role is not None and role_ran:  #TODO:  and original_task.action != 'include_role':?
                # lookup the role in the ROLE_CACHE to make sure we're dealing
                # with the correct object and mark it as executed
                for (entry, role_obj) in iteritems(iterator._play.ROLE_CACHE[
                        original_task._role._role_name]):
                    if role_obj._uuid == original_task._role._uuid:
                        role_obj._had_task_run[original_host.name] = True

            ret_results.append(task_result)

            if one_pass or max_passes is not None and (cur_pass +
                                                       1) >= max_passes:
                break

            cur_pass += 1

        return ret_results
Пример #15
0
    def get_vars(self,
                 loader,
                 play=None,
                 host=None,
                 task=None,
                 include_hostvars=True,
                 include_delegate_to=True,
                 use_cache=True):
        '''
        Returns the variables, with optional "context" given via the parameters
        for the play, host, and task (which could possibly result in different
        sets of variables being returned due to the additional context).

        The order of precedence is:
        - play->roles->get_default_vars (if there is a play context)
        - group_vars_files[host] (if there is a host context)
        - host_vars_files[host] (if there is a host context)
        - host->get_vars (if there is a host context)
        - fact_cache[host] (if there is a host context)
        - play vars (if there is a play context)
        - play vars_files (if there's no host context, ignore
          file names that cannot be templated)
        - task->get_vars (if there is a task context)
        - vars_cache[host] (if there is a host context)
        - extra vars
        '''

        debug("in VariableManager get_vars()")
        cache_entry = self._get_cache_entry(play=play, host=host, task=task)
        if cache_entry in VARIABLE_CACHE and use_cache:
            debug("vars are cached, returning them now")
            return VARIABLE_CACHE[cache_entry]

        all_vars = defaultdict(dict)

        if play:
            # first we compile any vars specified in defaults/main.yml
            # for all roles within the specified play
            for role in play.get_roles():
                all_vars = combine_vars(all_vars, role.get_default_vars())

            # if we have a task in this context, and that task has a role, make
            # sure it sees its defaults above any other roles, as we previously
            # (v1) made sure each task had a copy of its roles default vars
            if task and task._role is not None:
                all_vars = combine_vars(all_vars,
                                        task._role.get_default_vars())

        if host:
            # next, if a host is specified, we load any vars from group_vars
            # files and then any vars from host_vars files which may apply to
            # this host or the groups it belongs to

            # we merge in vars from groups specified in the inventory (INI or script)
            all_vars = combine_vars(all_vars, host.get_group_vars())

            # then we merge in the special 'all' group_vars first, if they exist
            if 'all' in self._group_vars_files:
                data = preprocess_vars(self._group_vars_files['all'])
                for item in data:
                    all_vars = combine_vars(all_vars, item)

            for group in host.get_groups():
                if group.name in self._group_vars_files and group.name != 'all':
                    for data in self._group_vars_files[group.name]:
                        data = preprocess_vars(data)
                        for item in data:
                            all_vars = combine_vars(all_vars, item)

            # then we merge in vars from the host specified in the inventory (INI or script)
            all_vars = combine_vars(all_vars, host.get_vars())

            # then we merge in the host_vars/<hostname> file, if it exists
            host_name = host.get_name()
            if host_name in self._host_vars_files:
                for data in self._host_vars_files[host_name]:
                    data = preprocess_vars(data)
                    for item in data:
                        all_vars = combine_vars(all_vars, item)

            # finally, the facts caches for this host, if it exists
            try:
                host_facts = self._fact_cache.get(host.name, dict())
                for k in host_facts.keys():
                    if host_facts[k] is not None and not isinstance(
                            host_facts[k], UnsafeProxy):
                        host_facts[k] = UnsafeProxy(host_facts[k])
                all_vars = combine_vars(all_vars, host_facts)
            except KeyError:
                pass

        if play:
            all_vars = combine_vars(all_vars, play.get_vars())

            for vars_file_item in play.get_vars_files():
                # create a set of temporary vars here, which incorporate the
                # extra vars so we can properly template the vars_files entries
                temp_vars = combine_vars(all_vars, self._extra_vars)
                templar = Templar(loader=loader, variables=temp_vars)

                # we assume each item in the list is itself a list, as we
                # support "conditional includes" for vars_files, which mimics
                # the with_first_found mechanism.
                #vars_file_list = templar.template(vars_file_item)
                vars_file_list = vars_file_item
                if not isinstance(vars_file_list, list):
                    vars_file_list = [vars_file_list]

                # now we iterate through the (potential) files, and break out
                # as soon as we read one from the list. If none are found, we
                # raise an error, which is silently ignored at this point.
                try:
                    for vars_file in vars_file_list:
                        vars_file = templar.template(vars_file)
                        try:
                            data = preprocess_vars(
                                loader.load_from_file(vars_file))
                            if data is not None:
                                for item in data:
                                    all_vars = combine_vars(all_vars, item)
                            break
                        except AnsibleFileNotFound as e:
                            # we continue on loader failures
                            continue
                        except AnsibleParserError as e:
                            raise
                    else:
                        raise AnsibleFileNotFound(
                            "vars file %s was not found" % vars_file_item)
                except (UndefinedError, AnsibleUndefinedVariable):
                    if host is not None and self._fact_cache.get(
                            host.name,
                            dict()).get('module_setup') and task is not None:
                        raise AnsibleUndefinedVariable(
                            "an undefined variable was found when attempting to template the vars_files item '%s'"
                            % vars_file_item,
                            obj=vars_file_item)
                    else:
                        # we do not have a full context here, and the missing variable could be
                        # because of that, so just show a warning and continue
                        display.vvv(
                            "skipping vars_file '%s' due to an undefined variable"
                            % vars_file_item)
                        continue

            if not C.DEFAULT_PRIVATE_ROLE_VARS:
                for role in play.get_roles():
                    all_vars = combine_vars(
                        all_vars, role.get_vars(include_params=False))

        if task:
            if task._role:
                all_vars = combine_vars(all_vars, task._role.get_vars())
            all_vars = combine_vars(all_vars, task.get_vars())

        if host:
            all_vars = combine_vars(
                all_vars, self._vars_cache.get(host.get_name(), dict()))
            all_vars = combine_vars(
                all_vars,
                self._nonpersistent_fact_cache.get(host.name, dict()))

        all_vars = combine_vars(all_vars, self._extra_vars)

        # FIXME: make sure all special vars are here
        # Finally, we create special vars

        all_vars['playbook_dir'] = loader.get_basedir()

        if host:
            all_vars['group_names'] = [
                group.name for group in host.get_groups()
            ]

            if self._inventory is not None:
                all_vars['groups'] = dict()
                for (group_name, group) in iteritems(self._inventory.groups):
                    all_vars['groups'][group_name] = [
                        h.name for h in group.get_hosts()
                    ]

                if include_hostvars:
                    hostvars_cache_entry = self._get_cache_entry(play=play)
                    if hostvars_cache_entry in HOSTVARS_CACHE:
                        hostvars = HOSTVARS_CACHE[hostvars_cache_entry]
                    else:
                        hostvars = HostVars(play=play,
                                            inventory=self._inventory,
                                            loader=loader,
                                            variable_manager=self)
                        HOSTVARS_CACHE[hostvars_cache_entry] = hostvars
                    all_vars['hostvars'] = hostvars

        if task:
            if task._role:
                all_vars['role_path'] = task._role._role_path

            # if we have a task and we're delegating to another host, figure out the
            # variables for that host now so we don't have to rely on hostvars later
            if task.delegate_to is not None and include_delegate_to:
                # we unfortunately need to template the delegate_to field here,
                # as we're fetching vars before post_validate has been called on
                # the task that has been passed in
                templar = Templar(loader=loader, variables=all_vars)

                items = []
                if task.loop is not None:
                    if task.loop in lookup_loader:
                        #TODO: remove convert_bare true and deprecate this in with_
                        try:
                            loop_terms = listify_lookup_plugin_terms(
                                terms=task.loop_args,
                                templar=templar,
                                loader=loader,
                                fail_on_undefined=True,
                                convert_bare=True)
                        except AnsibleUndefinedVariable as e:
                            if 'has no attribute' in str(e):
                                loop_terms = []
                                self._display.deprecated(
                                    "Skipping task due to undefined attribute, in the future this will be a fatal error."
                                )
                            else:
                                raise
                        items = lookup_loader.get(task.loop,
                                                  loader=loader,
                                                  templar=templar).run(
                                                      terms=loop_terms,
                                                      variables=all_vars)
                    else:
                        raise AnsibleError(
                            "Unexpected failure in finding the lookup named '%s' in the available lookup plugins"
                            % task.loop)
                else:
                    items = [None]

                vars_copy = all_vars.copy()
                delegated_host_vars = dict()
                for item in items:
                    # update the variables with the item value for templating, in case we need it
                    if item is not None:
                        vars_copy['item'] = item

                    templar.set_available_variables(vars_copy)
                    delegated_host_name = templar.template(
                        task.delegate_to, fail_on_undefined=False)
                    if delegated_host_name in delegated_host_vars:
                        # no need to repeat ourselves, as the delegate_to value
                        # does not appear to be tied to the loop item variable
                        continue

                    # a dictionary of variables to use if we have to create a new host below
                    new_delegated_host_vars = dict(
                        ansible_host=delegated_host_name,
                        ansible_user=C.DEFAULT_REMOTE_USER,
                        ansible_connection=C.DEFAULT_TRANSPORT,
                    )

                    # now try to find the delegated-to host in inventory, or failing that,
                    # create a new host on the fly so we can fetch variables for it
                    delegated_host = None
                    if self._inventory is not None:
                        delegated_host = self._inventory.get_host(
                            delegated_host_name)
                        # try looking it up based on the address field, and finally
                        # fall back to creating a host on the fly to use for the var lookup
                        if delegated_host is None:
                            for h in self._inventory.get_hosts(
                                    ignore_limits_and_restrictions=True):
                                # check if the address matches, or if both the delegated_to host
                                # and the current host are in the list of localhost aliases
                                if h.address == delegated_host_name or h.name in C.LOCALHOST and delegated_host_name in C.LOCALHOST:
                                    delegated_host = h
                                    break
                            else:
                                delegated_host = Host(name=delegated_host_name)
                                delegated_host.vars.update(
                                    new_delegated_host_vars)
                    else:
                        delegated_host = Host(name=delegated_host_name)
                        delegated_host.vars.update(new_delegated_host_vars)

                    # now we go fetch the vars for the delegated-to host and save them in our
                    # master dictionary of variables to be used later in the TaskExecutor/PlayContext
                    delegated_host_vars[delegated_host_name] = self.get_vars(
                        loader=loader,
                        play=play,
                        host=delegated_host,
                        task=task,
                        include_delegate_to=False,
                        include_hostvars=False,
                    )
                all_vars['ansible_delegated_vars'] = delegated_host_vars

        if self._inventory is not None:
            all_vars['inventory_dir'] = self._inventory.basedir()
            if play:
                # add the list of hosts in the play, as adjusted for limit/filters
                # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts,
                #             however this would take work in the templating engine, so for now
                #             we'll add both so we can give users something transitional to use
                host_list = [x.name for x in self._inventory.get_hosts()]
                all_vars['play_hosts'] = host_list
                all_vars['ansible_play_hosts'] = host_list

        # the 'omit' value alows params to be left out if the variable they are based on is undefined
        all_vars['omit'] = self._omit_token
        all_vars['ansible_version'] = CLI.version_info(gitinfo=False)

        if 'hostvars' in all_vars and host:
            all_vars['vars'] = all_vars['hostvars'][host.get_name()]

        #VARIABLE_CACHE[cache_entry] = all_vars

        debug("done with get_vars()")
        return all_vars
Пример #16
0
class AnsibleContainerConductorConfig(Mapping):
    _config = None

    @container.conductor_only
    def __init__(self, container_config):
        self._config = container_config
        self._templar = Templar(loader=None, variables={})
        self._process_defaults()
        self._process_top_level_sections()
        self._process_services()

    def _process_section(self, section_value, callback=None, templar=None):
        if not templar:
            templar = self._templar
        processed = yaml.compat.ordereddict()
        for key, value in section_value.items():
            if isinstance(value, basestring):
                # strings can be templated
                processed[key] = templar.template(value)
                if isinstance(processed[key], AnsibleUnsafeText):
                    processed[key] = str(processed[key])
            elif isinstance(value, (list, dict)):
                # if it's a dimensional structure, it's cheaper just to serialize
                # it, treat it like a template, and then deserialize it again
                buffer = BytesIO() # use bytes explicitly, not unicode
                yaml.round_trip_dump(value, buffer)
                processed[key] = yaml.round_trip_load(
                    templar.template(buffer.getvalue())
                )
            else:
                # ints, booleans, etc.
                processed[key] = value
            if callback:
                callback(processed)
        return processed

    def _process_defaults(self):
        logger.debug('Processing defaults section...')
        self.defaults = self._process_section(
            self._config.get('defaults', yaml.compat.ordereddict()),
            callback=lambda processed: self._templar.set_available_variables(
                dict(processed)))

    def _process_top_level_sections(self):
        self._config['settings'] = self._config.get('settings', yaml.compat.ordereddict())
        for section in ['volumes', 'registries']:
            logger.debug('Processing section...', section=section)
            setattr(self, section, self._process_section(self._config.get(section, yaml.compat.ordereddict())))

    def _process_services(self):
        services = yaml.compat.ordereddict()
        for service, service_data in self._config.get('services', yaml.compat.ordereddict()).items():
            logger.debug('Processing service...', service=service, service_data=service_data)
            processed = yaml.compat.ordereddict()
            service_defaults = self.defaults.copy()
            for idx in range(len(service_data.get('volumes', []))):
                # To mount the project directory, let users specify `$PWD` and
                # have that filled in with the project path
                service_data['volumes'][idx] = re.sub(r'\$(PWD|\{PWD\})', self._config['settings'].get('pwd'),
                                                      service_data['volumes'][idx])
            for role_spec in service_data.get('roles', []):
                if isinstance(role_spec, dict):
                    # A role with parameters to run it with
                    role_spec_copy = copy.deepcopy(role_spec)
                    role_name = role_spec_copy.pop('role')
                    role_args = role_spec_copy
                else:
                    role_name = role_spec
                    role_args = {}
                role_metadata = get_metadata_from_role(role_name)
                processed.update(role_metadata, relax=True)
                service_defaults.update(get_defaults_from_role(role_name),
                                        relax=True)
                service_defaults.update(role_args, relax=True)
            processed.update(service_data, relax=True)
            logger.debug('Rendering service keys from defaults', service=service, defaults=service_defaults)
            services[service] = self._process_section(
                processed,
                templar=Templar(loader=None, variables=service_defaults)
            )
            services[service]['defaults'] = service_defaults
        self.services = services

    def __getitem__(self, key):
        if key.startswith('_'):
            raise KeyError(key)
        try:
            return getattr(self, key)
        except AttributeError:
            raise KeyError(key)

    def __len__(self):
        # volumes, registries, services, and defaults
        return 4

    def __iter__(self):
        yield self.defaults
        yield self.registries
        yield self.volumes
        yield self.services
Пример #17
0
class VariableManager:

    _ALLOWED = frozenset(['plugins_by_group', 'groups_plugins_play', 'groups_plugins_inventory', 'groups_inventory',
                          'all_plugins_play', 'all_plugins_inventory', 'all_inventory'])

    def __init__(self, loader=None, inventory=None):
        self._nonpersistent_fact_cache = defaultdict(dict)
        self._vars_cache = defaultdict(dict)
        self._extra_vars = defaultdict(dict)
        self._host_vars_files = defaultdict(dict)
        self._group_vars_files = defaultdict(dict)
        self._inventory = inventory
        self._loader = loader
        self._hostvars = None
        self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
        self._options_vars = defaultdict(dict)
        self.safe_basedir = False
        self._templar = Templar(loader=self._loader)

        # bad cache plugin is not fatal error
        try:
            self._fact_cache = FactCache()
        except AnsibleError as e:
            display.warning(to_native(e))
            # fallback to a dict as in memory cache
            self._fact_cache = {}

    def __getstate__(self):
        data = dict(
            fact_cache=self._fact_cache,
            np_fact_cache=self._nonpersistent_fact_cache,
            vars_cache=self._vars_cache,
            extra_vars=self._extra_vars,
            host_vars_files=self._host_vars_files,
            group_vars_files=self._group_vars_files,
            omit_token=self._omit_token,
            options_vars=self._options_vars,
            inventory=self._inventory,
            safe_basedir=self.safe_basedir,
        )
        return data

    def __setstate__(self, data):
        self._fact_cache = data.get('fact_cache', defaultdict(dict))
        self._nonpersistent_fact_cache = data.get('np_fact_cache', defaultdict(dict))
        self._vars_cache = data.get('vars_cache', defaultdict(dict))
        self._extra_vars = data.get('extra_vars', dict())
        self._host_vars_files = data.get('host_vars_files', defaultdict(dict))
        self._group_vars_files = data.get('group_vars_files', defaultdict(dict))
        self._omit_token = data.get('omit_token', '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest())
        self._inventory = data.get('inventory', None)
        self._options_vars = data.get('options_vars', dict())
        self.safe_basedir = data.get('safe_basedir', False)

    @property
    def extra_vars(self):
        ''' ensures a clean copy of the extra_vars are made '''
        return self._extra_vars.copy()

    @extra_vars.setter
    def extra_vars(self, value):
        ''' ensures a clean copy of the extra_vars are used to set the value '''
        if not isinstance(value, MutableMapping):
            raise AnsibleAssertionError("the type of 'value' for extra_vars should be a MutableMapping, but is a %s" % type(value))
        self._extra_vars = value.copy()

    def set_inventory(self, inventory):
        self._inventory = inventory

    @property
    def options_vars(self):
        ''' ensures a clean copy of the options_vars are made '''
        return self._options_vars.copy()

    @options_vars.setter
    def options_vars(self, value):
        ''' ensures a clean copy of the options_vars are used to set the value '''
        if not isinstance(value, dict):
            raise AnsibleAssertionError("the type of 'value' for options_vars should be a dict, but is a %s" % type(value))
        self._options_vars = value.copy()

    def get_vars(self, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True):
        '''
        Returns the variables, with optional "context" given via the parameters
        for the play, host, and task (which could possibly result in different
        sets of variables being returned due to the additional context).

        The order of precedence is:
        - play->roles->get_default_vars (if there is a play context)
        - group_vars_files[host] (if there is a host context)
        - host_vars_files[host] (if there is a host context)
        - host->get_vars (if there is a host context)
        - fact_cache[host] (if there is a host context)
        - play vars (if there is a play context)
        - play vars_files (if there's no host context, ignore
          file names that cannot be templated)
        - task->get_vars (if there is a task context)
        - vars_cache[host] (if there is a host context)
        - extra vars
        '''

        display.debug("in VariableManager get_vars()")

        all_vars = dict()
        magic_variables = self._get_magic_variables(
            play=play,
            host=host,
            task=task,
            include_hostvars=include_hostvars,
            include_delegate_to=include_delegate_to,
        )

        # default for all cases
        basedirs = []
        if self.safe_basedir:  # avoid adhoc/console loading cwd
            basedirs = [self._loader.get_basedir()]

        if play:
            # first we compile any vars specified in defaults/main.yml
            # for all roles within the specified play
            for role in play.get_roles():
                all_vars = combine_vars(all_vars, role.get_default_vars())

        if task:
            # set basedirs
            if C.PLAYBOOK_VARS_ROOT == 'all':  # should be default
                basedirs = task.get_search_path()
            elif C.PLAYBOOK_VARS_ROOT in ('bottom', 'playbook_dir'):  # only option in 2.4.0
                basedirs = [task.get_search_path()[0]]
            elif C.PLAYBOOK_VARS_ROOT != 'top':
                # preserves default basedirs, only option pre 2.3
                raise AnsibleError('Unkown playbook vars logic: %s' % C.PLAYBOOK_VARS_ROOT)

            # if we have a task in this context, and that task has a role, make
            # sure it sees its defaults above any other roles, as we previously
            # (v1) made sure each task had a copy of its roles default vars
            if task._role is not None and (play or task.action == 'include_role'):
                all_vars = combine_vars(all_vars, task._role.get_default_vars(dep_chain=task.get_dep_chain()))

        if host:
            # THE 'all' group and the rest of groups for a host, used below
            all_group = self._inventory.groups.get('all')
            host_groups = sort_groups([g for g in host.get_groups() if g.name not in ['all']])

            def _get_plugin_vars(plugin, path, entities):
                data = {}
                try:
                    data = plugin.get_vars(self._loader, path, entities)
                except AttributeError:
                    try:
                        for entity in entities:
                            if isinstance(entity, Host):
                                data.update(plugin.get_host_vars(entity.name))
                            else:
                                data.update(plugin.get_group_vars(entity.name))
                    except AttributeError:
                        if hasattr(plugin, 'run'):
                            raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
                        else:
                            raise AnsibleError("Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
                return data

            # internal fuctions that actually do the work
            def _plugins_inventory(entities):
                ''' merges all entities by inventory source '''
                data = {}
                for inventory_dir in self._inventory._sources:
                    if ',' in inventory_dir and not os.path.exists(inventory_dir):  # skip host lists
                        continue
                    elif not os.path.isdir(inventory_dir):  # always pass 'inventory directory'
                        inventory_dir = os.path.dirname(inventory_dir)

                    for plugin in vars_loader.all():

                        data = combine_vars(data, _get_plugin_vars(plugin, inventory_dir, entities))
                return data

            def _plugins_play(entities):
                ''' merges all entities adjacent to play '''
                data = {}
                for plugin in vars_loader.all():

                    for path in basedirs:
                        data = combine_vars(data, _get_plugin_vars(plugin, path, entities))
                return data

            # configurable functions that are sortable via config, rememer to add to _ALLOWED if expanding this list
            def all_inventory():
                return all_group.get_vars()

            def all_plugins_inventory():
                return _plugins_inventory([all_group])

            def all_plugins_play():
                return _plugins_play([all_group])

            def groups_inventory():
                ''' gets group vars from inventory '''
                return get_group_vars(host_groups)

            def groups_plugins_inventory():
                ''' gets plugin sources from inventory for groups '''
                return _plugins_inventory(host_groups)

            def groups_plugins_play():
                ''' gets plugin sources from play for groups '''
                return _plugins_play(host_groups)

            def plugins_by_groups():
                '''
                    merges all plugin sources by group,
                    This should be used instead, NOT in combination with the other groups_plugins* functions
                '''
                data = {}
                for group in host_groups:
                    data[group] = combine_vars(data[group], _plugins_inventory(group))
                    data[group] = combine_vars(data[group], _plugins_play(group))
                return data

            # Merge groups as per precedence config
            # only allow to call the functions we want exposed
            for entry in C.VARIABLE_PRECEDENCE:
                if entry in self._ALLOWED:
                    display.debug('Calling %s to load vars for %s' % (entry, host.name))
                    all_vars = combine_vars(all_vars, locals()[entry]())
                else:
                    display.warning('Ignoring unknown variable precedence entry: %s' % (entry))

            # host vars, from inventory, inventory adjacent and play adjacent via plugins
            all_vars = combine_vars(all_vars, host.get_vars())
            all_vars = combine_vars(all_vars, _plugins_inventory([host]))
            all_vars = combine_vars(all_vars, _plugins_play([host]))

            # finally, the facts caches for this host, if it exists
            # TODO: cleaning of facts should eventually become part of taskresults instead of vars
            try:
                facts = wrap_var(self._fact_cache.get(host.name, {}))
                all_vars.update(namespace_facts(facts))

                # push facts to main namespace
                if C.INJECT_FACTS_AS_VARS:
                    all_vars = combine_vars(all_vars, wrap_var(clean_facts(facts)))
                else:
                    # always 'promote' ansible_local
                    all_vars = combine_vars(all_vars, wrap_var({'ansible_local': facts.get('ansible_local', {})}))
            except KeyError:
                pass

        if play:
            all_vars = combine_vars(all_vars, play.get_vars())

            vars_files = play.get_vars_files()
            try:
                for vars_file_item in vars_files:
                    # create a set of temporary vars here, which incorporate the extra
                    # and magic vars so we can properly template the vars_files entries
                    temp_vars = combine_vars(all_vars, self._extra_vars)
                    temp_vars = combine_vars(temp_vars, magic_variables)
                    self._templar.set_available_variables(temp_vars)

                    # we assume each item in the list is itself a list, as we
                    # support "conditional includes" for vars_files, which mimics
                    # the with_first_found mechanism.
                    vars_file_list = vars_file_item
                    if not isinstance(vars_file_list, list):
                        vars_file_list = [vars_file_list]

                    # now we iterate through the (potential) files, and break out
                    # as soon as we read one from the list. If none are found, we
                    # raise an error, which is silently ignored at this point.
                    try:
                        for vars_file in vars_file_list:
                            vars_file = self._templar.template(vars_file)
                            if not (isinstance(vars_file, Sequence)):
                                raise AnsibleError(
                                    "Invalid vars_files entry found: %r\n"
                                    "vars_files entries should be either a string type or "
                                    "a list of string types after template expansion" % vars_file
                                )
                            try:
                                data = preprocess_vars(self._loader.load_from_file(vars_file, unsafe=True))
                                if data is not None:
                                    for item in data:
                                        all_vars = combine_vars(all_vars, item)
                                break
                            except AnsibleFileNotFound:
                                # we continue on loader failures
                                continue
                            except AnsibleParserError:
                                raise
                        else:
                            # if include_delegate_to is set to False, we ignore the missing
                            # vars file here because we're working on a delegated host
                            if include_delegate_to:
                                raise AnsibleFileNotFound("vars file %s was not found" % vars_file_item)
                    except (UndefinedError, AnsibleUndefinedVariable):
                        if host is not None and self._fact_cache.get(host.name, dict()).get('module_setup') and task is not None:
                            raise AnsibleUndefinedVariable("an undefined variable was found when attempting to template the vars_files item '%s'"
                                                           % vars_file_item, obj=vars_file_item)
                        else:
                            # we do not have a full context here, and the missing variable could be because of that
                            # so just show a warning and continue
                            display.vvv("skipping vars_file '%s' due to an undefined variable" % vars_file_item)
                            continue

                    display.vvv("Read vars_file '%s'" % vars_file_item)
            except TypeError:
                raise AnsibleParserError("Error while reading vars files - please supply a list of file names. "
                                         "Got '%s' of type %s" % (vars_files, type(vars_files)))

            # By default, we now merge in all vars from all roles in the play,
            # unless the user has disabled this via a config option
            if not C.DEFAULT_PRIVATE_ROLE_VARS:
                for role in play.get_roles():
                    all_vars = combine_vars(all_vars, role.get_vars(include_params=False))

        # next, we merge in the vars from the role, which will specifically
        # follow the role dependency chain, and then we merge in the tasks
        # vars (which will look at parent blocks/task includes)
        if task:
            if task._role:
                all_vars = combine_vars(all_vars, task._role.get_vars(task.get_dep_chain(), include_params=False))
            all_vars = combine_vars(all_vars, task.get_vars())

        # next, we merge in the vars cache (include vars) and nonpersistent
        # facts cache (set_fact/register), in that order
        if host:
            # include_vars non-persistent cache
            all_vars = combine_vars(all_vars, self._vars_cache.get(host.get_name(), dict()))
            # fact non-persistent cache
            all_vars = combine_vars(all_vars, self._nonpersistent_fact_cache.get(host.name, dict()))

        # next, we merge in role params and task include params
        if task:
            if task._role:
                all_vars = combine_vars(all_vars, task._role.get_role_params(task.get_dep_chain()))

            # special case for include tasks, where the include params
            # may be specified in the vars field for the task, which should
            # have higher precedence than the vars/np facts above
            all_vars = combine_vars(all_vars, task.get_include_params())

        # extra vars
        all_vars = combine_vars(all_vars, self._extra_vars)

        # magic variables
        all_vars = combine_vars(all_vars, magic_variables)

        # special case for the 'environment' magic variable, as someone
        # may have set it as a variable and we don't want to stomp on it
        if task:
            all_vars['environment'] = task.environment

        # if we have a task and we're delegating to another host, figure out the
        # variables for that host now so we don't have to rely on hostvars later
        if task and task.delegate_to is not None and include_delegate_to:
            all_vars['ansible_delegated_vars'] = self._get_delegated_vars(play, task, all_vars)

        # 'vars' magic var
        if task or play:
            # has to be copy, otherwise recursive ref
            all_vars['vars'] = all_vars.copy()

        display.debug("done with get_vars()")
        return all_vars

    def _get_magic_variables(self, play, host, task, include_hostvars, include_delegate_to):
        '''
        Returns a dictionary of so-called "magic" variables in Ansible,
        which are special variables we set internally for use.
        '''

        variables = {}
        variables['playbook_dir'] = os.path.abspath(self._loader.get_basedir())
        variables['ansible_playbook_python'] = sys.executable

        if play:
            variables['role_names'] = [r._role_name for r in play.roles]

        if task:
            if task._role:
                variables['role_name'] = task._role.get_name()
                variables['role_path'] = task._role._role_path
                variables['role_uuid'] = text_type(task._role._uuid)

        if self._inventory is not None:
            variables['groups'] = self._inventory.get_groups_dict()
            if play:
                if self._templar.is_template(play.hosts):
                    pattern = 'all'
                else:
                    pattern = play.hosts or 'all'
                # add the list of hosts in the play, as adjusted for limit/filters
                variables['ansible_play_hosts_all'] = [x.name for x in self._inventory.get_hosts(pattern=pattern, ignore_restrictions=True)]
                variables['ansible_play_hosts'] = [x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts]
                variables['ansible_play_batch'] = [x.name for x in self._inventory.get_hosts() if x.name not in play._removed_hosts]

                # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
                # however this would take work in the templating engine, so for now we'll add both
                variables['play_hosts'] = variables['ansible_play_batch']

        # the 'omit' value alows params to be left out if the variable they are based on is undefined
        variables['omit'] = self._omit_token
        # Set options vars
        for option, option_value in iteritems(self._options_vars):
            variables[option] = option_value

        if self._hostvars is not None and include_hostvars:
            variables['hostvars'] = self._hostvars

        return variables

    def _get_delegated_vars(self, play, task, existing_variables):
        if not hasattr(task, 'loop'):
            # This "task" is not a Task, so we need to skip it
            return {}

        # we unfortunately need to template the delegate_to field here,
        # as we're fetching vars before post_validate has been called on
        # the task that has been passed in
        vars_copy = existing_variables.copy()
        self._templar.set_available_variables(vars_copy)

        items = []
        has_loop = True
        if task.loop_with is not None:
            if task.loop_with in lookup_loader:
                try:
                    loop_terms = listify_lookup_plugin_terms(terms=task.loop, templar=self._templar,
                                                             loader=self._loader, fail_on_undefined=True, convert_bare=False)
                    items = lookup_loader.get(task.loop_with, loader=self._loader, templar=self._templar).run(terms=loop_terms, variables=vars_copy)
                except AnsibleUndefinedVariable:
                    # This task will be skipped later due to this, so we just setup
                    # a dummy array for the later code so it doesn't fail
                    items = [None]
            else:
                raise AnsibleError("Failed to find the lookup named '%s' in the available lookup plugins" % task.loop_with)
        elif task.loop is not None:
            try:
                items = self._templar.template(task.loop)
            except AnsibleUndefinedVariable:
                # This task will be skipped later due to this, so we just setup
                # a dummy array for the later code so it doesn't fail
                items = [None]
        else:
            has_loop = False
            items = [None]

        delegated_host_vars = dict()
        item_var = getattr(task.loop_control, 'loop_var', 'item')
        cache_items = False
        for item in items:
            # update the variables with the item value for templating, in case we need it
            if item is not None:
                vars_copy[item_var] = item

            self._templar.set_available_variables(vars_copy)
            delegated_host_name = self._templar.template(task.delegate_to, fail_on_undefined=False)
            if delegated_host_name != task.delegate_to:
                cache_items = True
            if delegated_host_name is None:
                raise AnsibleError(message="Undefined delegate_to host for task:", obj=task._ds)
            if not isinstance(delegated_host_name, string_types):
                raise AnsibleError(message="the field 'delegate_to' has an invalid type (%s), and could not be"
                                           " converted to a string type." % type(delegated_host_name),
                                   obj=task._ds)
            if delegated_host_name in delegated_host_vars:
                # no need to repeat ourselves, as the delegate_to value
                # does not appear to be tied to the loop item variable
                continue

            # a dictionary of variables to use if we have to create a new host below
            # we set the default port based on the default transport here, to make sure
            # we use the proper default for windows
            new_port = C.DEFAULT_REMOTE_PORT
            if C.DEFAULT_TRANSPORT == 'winrm':
                new_port = 5986

            new_delegated_host_vars = dict(
                ansible_delegated_host=delegated_host_name,
                ansible_host=delegated_host_name,  # not redundant as other sources can change ansible_host
                ansible_port=new_port,
                ansible_user=C.DEFAULT_REMOTE_USER,
                ansible_connection=C.DEFAULT_TRANSPORT,
            )

            # now try to find the delegated-to host in inventory, or failing that,
            # create a new host on the fly so we can fetch variables for it
            delegated_host = None
            if self._inventory is not None:
                delegated_host = self._inventory.get_host(delegated_host_name)
                # try looking it up based on the address field, and finally
                # fall back to creating a host on the fly to use for the var lookup
                if delegated_host is None:
                    if delegated_host_name in C.LOCALHOST:
                        delegated_host = self._inventory.localhost
                    else:
                        for h in self._inventory.get_hosts(ignore_limits=True, ignore_restrictions=True):
                            # check if the address matches, or if both the delegated_to host
                            # and the current host are in the list of localhost aliases
                            if h.address == delegated_host_name:
                                delegated_host = h
                                break
                        else:
                            delegated_host = Host(name=delegated_host_name)
                            delegated_host.vars = combine_vars(delegated_host.vars, new_delegated_host_vars)
            else:
                delegated_host = Host(name=delegated_host_name)
                delegated_host.vars = combine_vars(delegated_host.vars, new_delegated_host_vars)

            # now we go fetch the vars for the delegated-to host and save them in our
            # master dictionary of variables to be used later in the TaskExecutor/PlayContext
            delegated_host_vars[delegated_host_name] = self.get_vars(
                play=play,
                host=delegated_host,
                task=task,
                include_delegate_to=False,
                include_hostvars=False,
            )

        if has_loop and cache_items:
            # delegate_to templating produced a change, update task.loop with templated items,
            # this ensures that delegate_to+loop doesn't produce different results than TaskExecutor
            # which may reprocess the loop
            # Set loop_with to None, so we don't do extra unexpected processing on the cached items later
            # in TaskExecutor
            task.loop_with = None
            task.loop = items

        return delegated_host_vars

    def clear_facts(self, hostname):
        '''
        Clears the facts for a host
        '''
        if hostname in self._fact_cache:
            del self._fact_cache[hostname]

    def set_host_facts(self, host, facts):
        '''
        Sets or updates the given facts for a host in the fact cache.
        '''

        if not isinstance(facts, dict):
            raise AnsibleAssertionError("the type of 'facts' to set for host_facts should be a dict but is a %s" % type(facts))

        if host.name not in self._fact_cache:
            self._fact_cache[host.name] = facts
        else:
            try:
                self._fact_cache.update(host.name, facts)
            except KeyError:
                self._fact_cache[host.name] = facts

    def set_nonpersistent_facts(self, host, facts):
        '''
        Sets or updates the given facts for a host in the fact cache.
        '''

        if not isinstance(facts, dict):
            raise AnsibleAssertionError("the type of 'facts' to set for nonpersistent_facts should be a dict but is a %s" % type(facts))

        if host.name not in self._nonpersistent_fact_cache:
            self._nonpersistent_fact_cache[host.name] = facts
        else:
            try:
                self._nonpersistent_fact_cache[host.name].update(facts)
            except KeyError:
                self._nonpersistent_fact_cache[host.name] = facts

    def set_host_variable(self, host, varname, value):
        '''
        Sets a value in the vars_cache for a host.
        '''
        host_name = host.get_name()
        if host_name not in self._vars_cache:
            self._vars_cache[host_name] = dict()
        if varname in self._vars_cache[host_name] and isinstance(self._vars_cache[host_name][varname], MutableMapping) and isinstance(value, MutableMapping):
            self._vars_cache[host_name] = combine_vars(self._vars_cache[host_name], {varname: value})
        else:
            self._vars_cache[host_name][varname] = value
Пример #18
0
    def _run_loop(self, items):
        '''
        Runs the task with the loop items specified and collates the result
        into an array named 'results' which is inserted into the final result
        along with the item for which the loop ran.
        '''

        results = []

        # make copies of the job vars and task so we can add the item to
        # the variables and re-validate the task with the item variable
        # task_vars = self._job_vars.copy()
        task_vars = self._job_vars

        loop_var = 'item'
        index_var = None
        label = None
        loop_pause = 0
        templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=self._job_vars)
        if self._task.loop_control:
            # FIXME: move this to the object itself to allow post_validate to take care of templating
            loop_var = templar.template(self._task.loop_control.loop_var)
            index_var = templar.template(self._task.loop_control.index_var)
            loop_pause = templar.template(self._task.loop_control.pause)
            # the these may be 'None', so we still need to default to something useful
            # this is tempalted below after an item is assigned
            label = (self._task.loop_control.label or ('{{' + loop_var + '}}'))

        if loop_var in task_vars:
            display.warning(u"The loop variable '%s' is already in use. "
                            u"You should set the `loop_var` value in the `loop_control` option for the task"
                            u" to something else to avoid variable collisions and unexpected behavior." % loop_var)

        ran_once = False
        if self._task.loop_with:
            # Only squash with 'with_:' not with the 'loop:', 'magic' squashing can be removed once with_ loops are
            items = self._squash_items(items, loop_var, task_vars)

        for item_index, item in enumerate(items):
            task_vars[loop_var] = item
            if index_var:
                task_vars[index_var] = item_index

            # Update template vars to reflect current loop iteration
            templar.set_available_variables(task_vars)

            # pause between loop iterations
            if loop_pause and ran_once:
                try:
                    time.sleep(float(loop_pause))
                except ValueError as e:
                    raise AnsibleError('Invalid pause value: %s, produced error: %s' % (loop_pause, to_native(e)))
            else:
                ran_once = True

            try:
                tmp_task = self._task.copy(exclude_parent=True, exclude_tasks=True)
                tmp_task._parent = self._task._parent
                tmp_play_context = self._play_context.copy()
            except AnsibleParserError as e:
                results.append(dict(failed=True, msg=to_text(e)))
                continue

            # now we swap the internal task and play context with their copies,
            # execute, and swap them back so we can do the next iteration cleanly
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
            res = self._execute(variables=task_vars)
            task_fields = self._task.dump_attrs()
            (self._task, tmp_task) = (tmp_task, self._task)
            (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)

            # now update the result with the item info, and append the result
            # to the list of results
            res[loop_var] = item
            if index_var:
                res[index_var] = item_index
            res['_ansible_item_result'] = True
            res['_ansible_ignore_errors'] = task_fields.get('ignore_errors')

            if label is not None:
                res['_ansible_item_label'] = templar.template(label, cache=False)

            self._rslt_q.put(
                TaskResult(
                    self._host.name,
                    self._task._uuid,
                    res,
                    task_fields=task_fields,
                ),
                block=False,
            )
            results.append(res)
            del task_vars[loop_var]

        return results
def _main():
  parser = argparse.ArgumentParser(
    description="template Ansible style YAML files that only contain variables, using Ansible's codebase",
  )
  parser.add_argument(
    "yaml_files_dir",
    help="folder where the YAML files to template are stored",
  )
  parser.add_argument(
    "--output-as-yaml",
    dest="output_as_yaml",
    action="store_true",
    help="Output resulting variables as YAML instead of JSON",
  )
  args = parser.parse_args()

  # Load variables from the YAML files
  yaml_files_dir = os.path.join(args.yaml_files_dir)
  var_files = [
    os.path.join(yaml_files_dir, file_name)
      for file_name in os.listdir(yaml_files_dir)
  ]
  dl = DataLoader()
  vars_to_template = dict()
  for var_file in var_files:
    vars_to_template.update(dl.load_from_file(var_file))

  templar = Templar(loader=dl)
  result_vars = dict()
  # because some variables depend on the value of other variables and we don't
  # want to spend the effort to do a topological sort on them, we adopt the
  # following strategy:
  #
  # 1. maintain a dict of all successfully templated variables in `result_vars`
  # 2. until the `vars_to_template` dict is empty, do the following:
  #
  #    Try templating each variable using `ansible.template.Templar.template`.
  #
  #    If we get a `AnsibleUndefinedVariable` error, this means that the current
  #    variable depends on another variable. We ignore the error and keep the
  #    variable around for a future round of templating.
  #
  #    Otherwise, we have successfully templated the variable and add it to the
  #    `result_vars` variable. We also add the variable name to the
  #    `successfully_templated_vars` list.
  #
  #    At the end of each templating round, remove all variables in
  #    `successfully_templated_vars` from `vars_to_template`.
  #
  #
  # Note that the above algorithm will only work if all variables required for
  # interpolation are present. Otherwise, it will be stuck in an infinite loop.
  while vars_to_template:
    successfully_templated_vars = []
    for var_name, value in vars_to_template.items():
      try:
        templated_value = templar.template(value)
        result_vars[var_name] = templated_value
        successfully_templated_vars.append(var_name)
        templar.set_available_variables(result_vars.copy())
      except AnsibleUndefinedVariable:
        pass
    for var_name in successfully_templated_vars:
      del vars_to_template[var_name]

  if args.output_as_yaml:
    # NOTE: While it seems from printing `result_vars` that the most fundamental
    #       values are strings, they are in fact
    #       `ansible.parsing.yaml.objects.AnsibleUnicode` objects. Hence when
    #       we use `yaml.dump` to serialize `result_vars`, we get some rather
    #       intimidating-looking stuff that may make it seem like we've gotten
    #       an error when in fact we haven't. So do not be too alarmed by the
    #       voluminous output.
    import yaml
    print yaml.dump(result_vars)
  else:
    import json
    print json.dumps(result_vars)
Пример #20
0
class AnsibleContainerConductorConfig(Mapping):
    _config = None

    @container.conductor_only
    def __init__(self, container_config):
        self._config = container_config
        self._templar = Templar(loader=None, variables={})
        self._process_defaults()
        self._process_top_level_sections()
        self._process_services()

    def _process_section(self, section_value, callback=None, templar=None):
        if not templar:
            templar = self._templar
        processed = yaml.compat.ordereddict()
        for key, value in section_value.items():
            if isinstance(value, basestring):
                # strings can be templated
                processed[key] = templar.template(value)
            elif isinstance(value, (list, dict)):
                # if it's a dimensional structure, it's cheaper just to serialize
                # it, treat it like a template, and then deserialize it again
                buffer = BytesIO() # use bytes explicitly, not unicode
                yaml.round_trip_dump(value, buffer)
                processed[key] = yaml.round_trip_load(
                    templar.template(buffer.getvalue())
                )
            else:
                # ints, booleans, etc.
                processed[key] = value
            if callback:
                callback(processed)
        return processed

    def _process_defaults(self):
        logger.debug('Processing defaults section...')
        self.defaults = self._process_section(
            self._config.get('defaults', yaml.compat.ordereddict()),
            callback=lambda processed: self._templar.set_available_variables(
                dict(processed)))

    def _process_top_level_sections(self):
        self._config['settings'] = self._config.get('settings', yaml.compat.ordereddict())
        for section in ['volumes', 'registries']:
            logger.debug('Processing section...', section=section)
            setattr(self, section,
                    self._process_section(self._config.get(
                        section, yaml.compat.ordereddict())))

    def _process_services(self):
        services = yaml.compat.ordereddict()
        for service, service_data in self._config.get(
                'services', yaml.compat.ordereddict()).items():
            logger.debug('Processing service...', service=service)
            processed = yaml.compat.ordereddict()
            service_defaults = self.defaults.copy()

            for idx in range(len(service_data.get('volumes', []))):
                # To mount the project directory, let users specify `$PWD` and
                # have that filled in with the project path
                service_data['volumes'][idx] = service_data['volumes'][idx].replace(
                    '$PWD', self._config['settings'].get('pwd'))

            for role_spec in service_data.get('roles', []):
                if isinstance(role_spec, dict):
                    # A role with parameters to run it with
                    role_spec_copy = role_spec.copy()
                    role_name = role_spec_copy.pop('role')
                    role_args = role_spec_copy
                else:
                    role_name = role_spec
                    role_args = {}
                role_metadata = get_metadata_from_role(role_name)
                processed.update(role_metadata, relax=True)
                service_defaults.update(get_defaults_from_role(role_name),
                                        relax=True)
                service_defaults.update(role_args, relax=True)
            processed.update(service_data, relax=True)
            logger.debug('Rendering service keys from defaults',
                service=service, defaults=service_defaults)
            services[service] = self._process_section(
                processed,
                templar=Templar(loader=None, variables=service_defaults)
            )
            services[service]['defaults'] = service_defaults
        self.services = services

    def __getitem__(self, key):
        if key.startswith('_'):
            raise KeyError(key)
        try:
            return getattr(self, key)
        except AttributeError:
            raise KeyError(key)

    def __len__(self):
        # volumes, registries, services, and defaults
        return 4

    def __iter__(self):
        yield self.defaults
        yield self.registries
        yield self.volumes
        yield self.services