Ejemplo n.º 1
0
def test_undefined_variable():
    fake_loader = DictDataLoader({})
    variables = {}
    templar = Templar(loader=fake_loader, variables=variables)

    with pytest.raises(AssibleUndefinedVariable):
        templar.template("{{ missing }}")
Ejemplo n.º 2
0
    def test_template_jinja2_extensions(self):
        fake_loader = DictDataLoader({})
        templar = Templar(loader=fake_loader)

        old_exts = C.DEFAULT_JINJA2_EXTENSIONS
        try:
            C.DEFAULT_JINJA2_EXTENSIONS = "foo,bar"
            self.assertEqual(templar._get_extensions(), ['foo', 'bar'])
        finally:
            C.DEFAULT_JINJA2_EXTENSIONS = old_exts
Ejemplo n.º 3
0
    def test_action_base__compute_environment_string(self):
        fake_loader = DictDataLoader({})

        # create our fake task
        mock_task = MagicMock()
        mock_task.action = "copy"
        mock_task.args = dict(a=1)

        # create a mock connection, so we don't actually try and connect to things
        def env_prefix(**args):
            return ' '.join([
                '%s=%s' % (k, shlex_quote(text_type(v)))
                for k, v in args.items()
            ])

        mock_connection = MagicMock()
        mock_connection._shell.env_prefix.side_effect = env_prefix

        # we're using a real play context here
        play_context = PlayContext()

        # and we're using a real templar here too
        templar = Templar(loader=fake_loader)

        # our test class
        action_base = DerivedActionBase(
            task=mock_task,
            connection=mock_connection,
            play_context=play_context,
            loader=fake_loader,
            templar=templar,
            shared_loader_obj=None,
        )

        # test standard environment setup
        mock_task.environment = [dict(FOO='foo'), None]
        env_string = action_base._compute_environment_string()
        self.assertEqual(env_string, "FOO=foo")

        # test where environment is not a list
        mock_task.environment = dict(FOO='foo')
        env_string = action_base._compute_environment_string()
        self.assertEqual(env_string, "FOO=foo")

        # test environment with a variable in it
        templar.available_variables = dict(the_var='bar')
        mock_task.environment = [dict(FOO='{{the_var}}')]
        env_string = action_base._compute_environment_string()
        self.assertEqual(env_string, "FOO=bar")

        # test with a bad environment set
        mock_task.environment = dict(FOO='foo')
        mock_task.environment = ['hi there']
        self.assertRaises(AssibleError,
                          action_base._compute_environment_string)
Ejemplo n.º 4
0
    def _normalize_parameters(self, thing, action=None, additional_args=None):
        '''
        arguments can be fuzzy.  Deal with all the forms.
        '''

        additional_args = {} if additional_args is None else additional_args

        # final args are the ones we'll eventually return, so first update
        # them with any additional args specified, which have lower priority
        # than those which may be parsed/normalized next
        final_args = dict()
        if additional_args:
            if isinstance(additional_args, string_types):
                templar = Templar(loader=None)
                if templar.is_template(additional_args):
                    final_args['_variable_params'] = additional_args
                else:
                    raise AssibleParserError("Complex args containing variables cannot use bare variables (without Jinja2 delimiters), "
                                             "and must use the full variable style ('{{var_name}}')")
            elif isinstance(additional_args, dict):
                final_args.update(additional_args)
            else:
                raise AssibleParserError('Complex args must be a dictionary or variable string ("{{var}}").')

        # how we normalize depends if we figured out what the module name is
        # yet.  If we have already figured it out, it's a 'new style' invocation.
        # otherwise, it's not

        if action is not None:
            args = self._normalize_new_style_args(thing, action)
        else:
            (action, args) = self._normalize_old_style_args(thing)

            # this can occasionally happen, simplify
            if args and 'args' in args:
                tmp_args = args.pop('args')
                if isinstance(tmp_args, string_types):
                    tmp_args = parse_kv(tmp_args)
                args.update(tmp_args)

        # only internal variables can start with an underscore, so
        # we don't allow users to set them directly in arguments
        if args and action not in FREEFORM_ACTIONS:
            for arg in args:
                arg = to_text(arg)
                if arg.startswith('_assible_'):
                    raise AssibleError("invalid parameter specified for action '%s': '%s'" % (action, arg))

        # finally, update the args we're going to return with the ones
        # which were normalized above
        if args:
            final_args.update(args)

        return (action, final_args)
Ejemplo n.º 5
0
 def setUp(self):
     self.test_vars = dict(
         foo="bar",
         bam="{{foo}}",
         num=1,
         var_true=True,
         var_false=False,
         var_dict=dict(a="b"),
         bad_dict="{a='b'",
         var_list=[1],
         recursive="{{recursive}}",
         some_var="blip",
         some_static_var="static_blip",
         some_keyword="{{ foo }}",
         some_unsafe_var=wrap_var("unsafe_blip"),
         some_static_unsafe_var=wrap_var("static_unsafe_blip"),
         some_unsafe_keyword=wrap_var("{{ foo }}"),
         str_with_error="{{ 'str' | from_json }}",
     )
     self.fake_loader = DictDataLoader({
         "/path/to/my_file.txt": "foo\n",
     })
     self.templar = Templar(loader=self.fake_loader,
                            variables=self.test_vars)
     self._assible_context = AssibleContext(self.templar.environment, {},
                                            {}, {})
Ejemplo n.º 6
0
 def test_get_validated_value_string_rewrap_unsafe(self):
     attribute = FieldAttribute(isa='string')
     value = AssibleUnsafeText(u'bar')
     templar = Templar(None)
     bsc = self.ClassUnderTest()
     result = bsc.get_validated_value('foo', attribute, value, templar)
     self.assertIsInstance(result, AssibleUnsafeText)
     self.assertEqual(result, AssibleUnsafeText(u'bar'))
Ejemplo n.º 7
0
    def evaluate_tags(self, only_tags, skip_tags, all_vars):
        ''' this checks if the current item should be executed depending on tag options '''

        if self.tags:
            templar = Templar(loader=self._loader, variables=all_vars)
            tags = templar.template(self.tags)

            _temp_tags = set()
            for tag in tags:
                if isinstance(tag, list):
                    _temp_tags.update(tag)
                else:
                    _temp_tags.add(tag)
            tags = _temp_tags
            self.tags = list(tags)
        else:
            # this makes isdisjoint work for untagged
            tags = self.untagged

        should_run = True  # default, tasks to run

        if only_tags:
            if 'always' in tags:
                should_run = True
            elif ('all' in only_tags and 'never' not in tags):
                should_run = True
            elif not tags.isdisjoint(only_tags):
                should_run = True
            elif 'tagged' in only_tags and tags != self.untagged and 'never' not in tags:
                should_run = True
            else:
                should_run = False

        if should_run and skip_tags:

            # Check for tags that we need to skip
            if 'all' in skip_tags:
                if 'always' not in tags or 'always' in skip_tags:
                    should_run = False
            elif not tags.isdisjoint(skip_tags):
                should_run = False
            elif 'tagged' in skip_tags and tags != self.untagged:
                should_run = False

        return should_run
Ejemplo n.º 8
0
 def test_attr_list_required_empty_string(self):
     string_list = [""]
     ds = {'test_attr_list_required': string_list}
     bsc = self.ClassUnderTest()
     bsc.load_data(ds)
     fake_loader = DictDataLoader({})
     templar = Templar(loader=fake_loader)
     self.assertRaisesRegexp(AssibleParserError, 'cannot have empty values',
                             bsc.post_validate, templar)
Ejemplo n.º 9
0
 def test_attr_list_required(self):
     string_list = ['foo', 'bar']
     ds = {'test_attr_list_required': string_list}
     bsc = self.ClassUnderTest()
     bsc.load_data(ds)
     fake_loader = DictDataLoader({})
     templar = Templar(loader=fake_loader)
     bsc.post_validate(templar)
     self.assertEqual(string_list,
                      bsc._attributes['test_attr_list_required'])
Ejemplo n.º 10
0
 def _base_validate(self, ds):
     bsc = self.ClassUnderTest()
     parent = ExampleParentBaseSubClass()
     bsc._parent = parent
     bsc._dep_chain = [parent]
     parent._dep_chain = None
     bsc.load_data(ds)
     fake_loader = DictDataLoader({})
     templar = Templar(loader=fake_loader)
     bsc.post_validate(templar)
     return bsc
Ejemplo n.º 11
0
    def _load_role_name(self, ds):
        '''
        Returns the role name (either the role: or name: field) from
        the role definition, or (when the role definition is a simple
        string), just that string
        '''

        if isinstance(ds, string_types):
            return ds

        role_name = ds.get('role', ds.get('name'))
        if not role_name or not isinstance(role_name, string_types):
            raise AssibleError('role definitions must contain a role name',
                               obj=ds)

        # if we have the required datastructures, and if the role_name
        # contains a variable, try and template it now
        if self._variable_manager:
            all_vars = self._variable_manager.get_vars(play=self._play)
            templar = Templar(loader=self._loader, variables=all_vars)
            role_name = templar.template(role_name)

        return role_name
Ejemplo n.º 12
0
    def parse(self, inventory, loader, path, cache=True):
        ''' Populates inventory from the given data. Raises an error on any parse failure
            :arg inventory: a copy of the previously accumulated inventory data,
                 to be updated with any new data this plugin provides.
                 The inventory can be empty if no other source/plugin ran successfully.
            :arg loader: a reference to the DataLoader, which can read in YAML and JSON files,
                 it also has Vault support to automatically decrypt files.
            :arg path: the string that represents the 'inventory source',
                 normally a path to a configuration file for this inventory,
                 but it can also be a raw string for this plugin to consume
            :arg cache: a boolean that indicates if the plugin should use the cache or not
                 you can ignore if this plugin does not implement caching.
        '''

        self.loader = loader
        self.inventory = inventory
        self.templar = Templar(loader=loader)
        self._vars = load_extra_vars(loader)
Ejemplo n.º 13
0
    def process_include_results(results, iterator, loader, variable_manager):
        included_files = []
        task_vars_cache = {}

        for res in results:

            original_host = res._host
            original_task = res._task

            if original_task.action in ('include', 'include_tasks',
                                        'include_role'):
                if original_task.loop:
                    if 'results' not in res._result:
                        continue
                    include_results = res._result['results']
                else:
                    include_results = [res._result]

                for include_result in include_results:
                    # if the task result was skipped or failed, continue
                    if 'skipped' in include_result and include_result[
                            'skipped'] or 'failed' in include_result and include_result[
                                'failed']:
                        continue

                    cache_key = (iterator._play, original_host, original_task)
                    try:
                        task_vars = task_vars_cache[cache_key]
                    except KeyError:
                        task_vars = task_vars_cache[
                            cache_key] = variable_manager.get_vars(
                                play=iterator._play,
                                host=original_host,
                                task=original_task)

                    include_args = include_result.get('include_args', dict())
                    special_vars = {}
                    loop_var = include_result.get('assible_loop_var', 'item')
                    index_var = include_result.get('assible_index_var')
                    if loop_var in include_result:
                        task_vars[loop_var] = special_vars[
                            loop_var] = include_result[loop_var]
                    if index_var and index_var in include_result:
                        task_vars[index_var] = special_vars[
                            index_var] = include_result[index_var]
                    if '_assible_item_label' in include_result:
                        task_vars['_assible_item_label'] = special_vars[
                            '_assible_item_label'] = include_result[
                                '_assible_item_label']
                    if 'assible_loop' in include_result:
                        task_vars['assible_loop'] = special_vars[
                            'assible_loop'] = include_result['assible_loop']
                    if original_task.no_log and '_assible_no_log' not in include_args:
                        task_vars['_assible_no_log'] = special_vars[
                            '_assible_no_log'] = original_task.no_log

                    # get search path for this task to pass to lookup plugins that may be used in pathing to
                    # the included file
                    task_vars[
                        'assible_search_path'] = original_task.get_search_path(
                        )

                    # ensure basedir is always in (dwim already searches here but we need to display it)
                    if loader.get_basedir(
                    ) not in task_vars['assible_search_path']:
                        task_vars['assible_search_path'].append(
                            loader.get_basedir())

                    templar = Templar(loader=loader, variables=task_vars)

                    if original_task.action in ('include', 'include_tasks'):
                        include_file = None
                        if original_task:
                            if original_task.static:
                                continue

                            if original_task._parent:
                                # handle relative includes by walking up the list of parent include
                                # tasks and checking the relative result to see if it exists
                                parent_include = original_task._parent
                                cumulative_path = None
                                while parent_include is not None:
                                    if not isinstance(parent_include,
                                                      TaskInclude):
                                        parent_include = parent_include._parent
                                        continue
                                    if isinstance(parent_include, IncludeRole):
                                        parent_include_dir = parent_include._role_path
                                    else:
                                        try:
                                            parent_include_dir = os.path.dirname(
                                                templar.template(
                                                    parent_include.args.get(
                                                        '_raw_params')))
                                        except AssibleError as e:
                                            parent_include_dir = ''
                                            display.warning(
                                                'Templating the path of the parent %s failed. The path to the '
                                                'included file may not be found. '
                                                'The error was: %s.' %
                                                (original_task.action,
                                                 to_text(e)))
                                    if cumulative_path is not None and not os.path.isabs(
                                            cumulative_path):
                                        cumulative_path = os.path.join(
                                            parent_include_dir,
                                            cumulative_path)
                                    else:
                                        cumulative_path = parent_include_dir
                                    include_target = templar.template(
                                        include_result['include'])
                                    if original_task._role:
                                        new_basedir = os.path.join(
                                            original_task._role._role_path,
                                            'tasks', cumulative_path)
                                        candidates = [
                                            loader.path_dwim_relative(
                                                original_task._role._role_path,
                                                'tasks', include_target),
                                            loader.path_dwim_relative(
                                                new_basedir, 'tasks',
                                                include_target)
                                        ]
                                        for include_file in candidates:
                                            try:
                                                # may throw OSError
                                                os.stat(include_file)
                                                # or select the task file if it exists
                                                break
                                            except OSError:
                                                pass
                                    else:
                                        include_file = loader.path_dwim_relative(
                                            loader.get_basedir(),
                                            cumulative_path, include_target)

                                    if os.path.exists(include_file):
                                        break
                                    else:
                                        parent_include = parent_include._parent

                        if include_file is None:
                            if original_task._role:
                                include_target = templar.template(
                                    include_result['include'])
                                include_file = loader.path_dwim_relative(
                                    original_task._role._role_path, 'tasks',
                                    include_target)
                            else:
                                include_file = loader.path_dwim(
                                    include_result['include'])

                        include_file = templar.template(include_file)
                        inc_file = IncludedFile(include_file, include_args,
                                                special_vars, original_task)
                    else:
                        # template the included role's name here
                        role_name = include_args.pop(
                            'name', include_args.pop('role', None))
                        if role_name is not None:
                            role_name = templar.template(role_name)

                        new_task = original_task.copy()
                        new_task._role_name = role_name
                        for from_arg in new_task.FROM_ARGS:
                            if from_arg in include_args:
                                from_key = from_arg.replace('_from', '')
                                new_task._from_files[
                                    from_key] = templar.template(
                                        include_args.pop(from_arg))

                        inc_file = IncludedFile(role_name,
                                                include_args,
                                                special_vars,
                                                new_task,
                                                is_role=True)

                    idx = 0
                    orig_inc_file = inc_file
                    while 1:
                        try:
                            pos = included_files[idx:].index(orig_inc_file)
                            # pos is relative to idx since we are slicing
                            # use idx + pos due to relative indexing
                            inc_file = included_files[idx + pos]
                        except ValueError:
                            included_files.append(orig_inc_file)
                            inc_file = orig_inc_file

                        try:
                            inc_file.add_host(original_host)
                        except ValueError:
                            # The host already exists for this include, advance forward, this is a new include
                            idx += pos + 1
                        else:
                            break

        return included_files
Ejemplo n.º 14
0
 def test_post_validate_empty(self):
     fake_loader = DictDataLoader({})
     templar = Templar(loader=fake_loader)
     ret = self.b.post_validate(templar)
     self.assertIsNone(ret)
Ejemplo n.º 15
0
    def parse(self, skip_action_validation=False):
        '''
        Given a task in one of the supported forms, parses and returns
        returns the action, arguments, and delegate_to values for the
        task, dealing with all sorts of levels of fuzziness.
        '''

        thing = None

        action = None
        delegate_to = self._task_ds.get('delegate_to', Sentinel)
        args = dict()

        self.internal_redirect_list = []

        # This is the standard YAML form for command-type modules. We grab
        # the args and pass them in as additional arguments, which can/will
        # be overwritten via dict updates from the other arg sources below
        additional_args = self._task_ds.get('args', dict())

        # We can have one of action, local_action, or module specified
        # action
        if 'action' in self._task_ds:
            # an old school 'action' statement
            thing = self._task_ds['action']
            action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args)

        # local_action
        if 'local_action' in self._task_ds:
            # local_action is similar but also implies a delegate_to
            if action is not None:
                raise AssibleParserError("action and local_action are mutually exclusive", obj=self._task_ds)
            thing = self._task_ds.get('local_action', '')
            delegate_to = 'localhost'
            action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args)

        # module: <stuff> is the more new-style invocation

        # filter out task attributes so we're only querying unrecognized keys as actions/modules
        non_task_ds = dict((k, v) for k, v in iteritems(self._task_ds) if (k not in self._task_attrs) and (not k.startswith('with_')))

        # walk the filtered input dictionary to see if we recognize a module name
        for item, value in iteritems(non_task_ds):
            is_action_candidate = False
            if item in BUILTIN_TASKS:
                is_action_candidate = True
            elif skip_action_validation:
                is_action_candidate = True
            else:
                # If the plugin is resolved and redirected smuggle the list of candidate names via the task attribute 'internal_redirect_list'
                context = action_loader.find_plugin_with_context(item, collection_list=self._collection_list)
                if not context.resolved:
                    context = module_loader.find_plugin_with_context(item, collection_list=self._collection_list)
                    if context.resolved and context.redirect_list:
                        self.internal_redirect_list = context.redirect_list
                elif context.redirect_list:
                    self.internal_redirect_list = context.redirect_list

                is_action_candidate = bool(self.internal_redirect_list)

            if is_action_candidate:
                # finding more than one module name is a problem
                if action is not None:
                    raise AssibleParserError("conflicting action statements: %s, %s" % (action, item), obj=self._task_ds)
                action = item
                thing = value
                action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args)

        # if we didn't see any module in the task at all, it's not a task really
        if action is None:
            if non_task_ds:  # there was one non-task action, but we couldn't find it
                bad_action = list(non_task_ds.keys())[0]
                raise AssibleParserError("couldn't resolve module/action '{0}'. This often indicates a "
                                         "misspelling, missing collection, or incorrect module path.".format(bad_action),
                                         obj=self._task_ds)
            else:
                raise AssibleParserError("no module/action detected in task.",
                                         obj=self._task_ds)
        elif args.get('_raw_params', '') != '' and action not in RAW_PARAM_MODULES:
            templar = Templar(loader=None)
            raw_params = args.pop('_raw_params')
            if templar.is_template(raw_params):
                args['_variable_params'] = raw_params
            else:
                raise AssibleParserError("this task '%s' has extra params, which is only allowed in the following modules: %s" % (action,
                                                                                                                                  ", ".join(RAW_PARAM_MODULES)),
                                         obj=self._task_ds)

        return (action, args, delegate_to)
Ejemplo n.º 16
0
    def _get_delegated_vars(self, play, task, existing_variables):
        # This method has a lot of code copied from ``TaskExecutor._get_loop_items``
        # if this is failing, and ``TaskExecutor._get_loop_items`` is not
        # then more will have to be copied here.
        # TODO: dedupe code here and with ``TaskExecutor._get_loop_items``
        #       this may be possible once we move pre-processing pre fork

        if not hasattr(task, 'loop'):
            # This "task" is not a Task, so we need to skip it
            return {}, None

        # we unfortunately need to template the delegate_to field here,
        # as we're fetching vars before post_validate has been called on
        # the task that has been passed in
        vars_copy = existing_variables.copy()

        # get search path for this task to pass to lookup plugins
        vars_copy['assible_search_path'] = task.get_search_path()

        # ensure basedir is always in (dwim already searches here but we need to display it)
        if self._loader.get_basedir() not in vars_copy['assible_search_path']:
            vars_copy['assible_search_path'].append(self._loader.get_basedir())

        templar = Templar(loader=self._loader, variables=vars_copy)

        items = []
        has_loop = True
        if task.loop_with is not None:
            if task.loop_with in lookup_loader:
                fail = True
                if task.loop_with == 'first_found':
                    # first_found loops are special. If the item is undefined then we want to fall through to the next
                    fail = False
                try:
                    loop_terms = listify_lookup_plugin_terms(
                        terms=task.loop,
                        templar=templar,
                        loader=self._loader,
                        fail_on_undefined=fail,
                        convert_bare=False)

                    if not fail:
                        loop_terms = [
                            t for t in loop_terms if not templar.is_template(t)
                        ]

                    mylookup = lookup_loader.get(task.loop_with,
                                                 loader=self._loader,
                                                 templar=templar)

                    # give lookup task 'context' for subdir (mostly needed for first_found)
                    for subdir in ['template', 'var',
                                   'file']:  # TODO: move this to constants?
                        if subdir in task.action:
                            break
                    setattr(mylookup, '_subdir', subdir + 's')

                    items = wrap_var(
                        mylookup.run(terms=loop_terms, variables=vars_copy))

                except AssibleTemplateError:
                    # This task will be skipped later due to this, so we just setup
                    # a dummy array for the later code so it doesn't fail
                    items = [None]
            else:
                raise AssibleError(
                    "Failed to find the lookup named '%s' in the available lookup plugins"
                    % task.loop_with)
        elif task.loop is not None:
            try:
                items = templar.template(task.loop)
            except AssibleTemplateError:
                # This task will be skipped later due to this, so we just setup
                # a dummy array for the later code so it doesn't fail
                items = [None]
        else:
            has_loop = False
            items = [None]

        # since host can change per loop, we keep dict per host name resolved
        delegated_host_vars = dict()
        item_var = getattr(task.loop_control, 'loop_var', 'item')
        cache_items = False
        for item in items:
            # update the variables with the item value for templating, in case we need it
            if item is not None:
                vars_copy[item_var] = item

            templar.available_variables = vars_copy
            delegated_host_name = templar.template(task.delegate_to,
                                                   fail_on_undefined=False)
            if delegated_host_name != task.delegate_to:
                cache_items = True
            if delegated_host_name is None:
                raise AssibleError(
                    message="Undefined delegate_to host for task:",
                    obj=task._ds)
            if not isinstance(delegated_host_name, string_types):
                raise AssibleError(
                    message=
                    "the field 'delegate_to' has an invalid type (%s), and could not be"
                    " converted to a string type." % type(delegated_host_name),
                    obj=task._ds)

            if delegated_host_name in delegated_host_vars:
                # no need to repeat ourselves, as the delegate_to value
                # does not appear to be tied to the loop item variable
                continue

            # now try to find the delegated-to host in inventory, or failing that,
            # create a new host on the fly so we can fetch variables for it
            delegated_host = None
            if self._inventory is not None:
                delegated_host = self._inventory.get_host(delegated_host_name)
                # try looking it up based on the address field, and finally
                # fall back to creating a host on the fly to use for the var lookup
                if delegated_host is None:
                    for h in self._inventory.get_hosts(
                            ignore_limits=True, ignore_restrictions=True):
                        # check if the address matches, or if both the delegated_to host
                        # and the current host are in the list of localhost aliases
                        if h.address == delegated_host_name:
                            delegated_host = h
                            break
                    else:
                        delegated_host = Host(name=delegated_host_name)
            else:
                delegated_host = Host(name=delegated_host_name)

            # now we go fetch the vars for the delegated-to host and save them in our
            # master dictionary of variables to be used later in the TaskExecutor/PlayContext
            delegated_host_vars[delegated_host_name] = self.get_vars(
                play=play,
                host=delegated_host,
                task=task,
                include_delegate_to=False,
                include_hostvars=True,
            )
            delegated_host_vars[delegated_host_name][
                'inventory_hostname'] = vars_copy.get('inventory_hostname')

        _assible_loop_cache = None
        if has_loop and cache_items:
            # delegate_to templating produced a change, so we will cache the templated items
            # in a special private hostvar
            # this ensures that delegate_to+loop doesn't produce different results than TaskExecutor
            # which may reprocess the loop
            _assible_loop_cache = items

        return delegated_host_vars, _assible_loop_cache
Ejemplo n.º 17
0
    def load_data(self, ds, basedir, variable_manager=None, loader=None):
        '''
        Overrides the base load_data(), as we're actually going to return a new
        Playbook() object rather than a PlaybookInclude object
        '''

        # import here to avoid a dependency loop
        from assible.playbook import Playbook
        from assible.playbook.play import Play

        # first, we use the original parent method to correctly load the object
        # via the load_data/preprocess_data system we normally use for other
        # playbook objects
        new_obj = super(PlaybookInclude,
                        self).load_data(ds, variable_manager, loader)

        all_vars = self.vars.copy()
        if variable_manager:
            all_vars.update(variable_manager.get_vars())

        templar = Templar(loader=loader, variables=all_vars)

        # then we use the object to load a Playbook
        pb = Playbook(loader=loader)

        file_name = templar.template(new_obj.import_playbook)
        if not os.path.isabs(file_name):
            file_name = os.path.join(basedir, file_name)

        pb._load_playbook_data(file_name=file_name,
                               variable_manager=variable_manager,
                               vars=self.vars.copy())

        # finally, update each loaded playbook entry with any variables specified
        # on the included playbook and/or any tags which may have been set
        for entry in pb._entries:

            # conditional includes on a playbook need a marker to skip gathering
            if new_obj.when and isinstance(entry, Play):
                entry._included_conditional = new_obj.when[:]

            temp_vars = entry.vars.copy()
            temp_vars.update(new_obj.vars)
            param_tags = temp_vars.pop('tags', None)
            if param_tags is not None:
                entry.tags.extend(param_tags.split(','))
            entry.vars = temp_vars
            entry.tags = list(set(entry.tags).union(new_obj.tags))
            if entry._included_path is None:
                entry._included_path = os.path.dirname(file_name)

            # Check to see if we need to forward the conditionals on to the included
            # plays. If so, we can take a shortcut here and simply prepend them to
            # those attached to each block (if any)
            if new_obj.when:
                for task_block in (entry.pre_tasks + entry.roles +
                                   entry.tasks + entry.post_tasks):
                    task_block._attributes[
                        'when'] = new_obj.when[:] + task_block.when[:]

        return pb
Ejemplo n.º 18
0
    def get_vars(self,
                 play=None,
                 host=None,
                 task=None,
                 include_hostvars=True,
                 include_delegate_to=True,
                 use_cache=True,
                 _hosts=None,
                 _hosts_all=None,
                 stage='task'):
        '''
        Returns the variables, with optional "context" given via the parameters
        for the play, host, and task (which could possibly result in different
        sets of variables being returned due to the additional context).

        The order of precedence is:
        - play->roles->get_default_vars (if there is a play context)
        - group_vars_files[host] (if there is a host context)
        - host_vars_files[host] (if there is a host context)
        - host->get_vars (if there is a host context)
        - fact_cache[host] (if there is a host context)
        - play vars (if there is a play context)
        - play vars_files (if there's no host context, ignore
          file names that cannot be templated)
        - task->get_vars (if there is a task context)
        - vars_cache[host] (if there is a host context)
        - extra vars

        ``_hosts`` and ``_hosts_all`` should be considered private args, with only internal trusted callers relying
        on the functionality they provide. These arguments may be removed at a later date without a deprecation
        period and without warning.
        '''

        display.debug("in VariableManager get_vars()")

        all_vars = dict()
        magic_variables = self._get_magic_variables(
            play=play,
            host=host,
            task=task,
            include_hostvars=include_hostvars,
            include_delegate_to=include_delegate_to,
            _hosts=_hosts,
            _hosts_all=_hosts_all,
        )

        _vars_sources = {}

        def _combine_and_track(data, new_data, source):
            '''
            Wrapper function to update var sources dict and call combine_vars()

            See notes in the VarsWithSources docstring for caveats and limitations of the source tracking
            '''
            if C.DEFAULT_DEBUG:
                # Populate var sources dict
                for key in new_data:
                    _vars_sources[key] = source
            return combine_vars(data, new_data)

        # default for all cases
        basedirs = []
        if self.safe_basedir:  # avoid adhoc/console loading cwd
            basedirs = [self._loader.get_basedir()]

        if play:
            # first we compile any vars specified in defaults/main.yml
            # for all roles within the specified play
            for role in play.get_roles():
                all_vars = _combine_and_track(all_vars,
                                              role.get_default_vars(),
                                              "role '%s' defaults" % role.name)

        if task:
            # set basedirs
            if C.PLAYBOOK_VARS_ROOT == 'all':  # should be default
                basedirs = task.get_search_path()
            elif C.PLAYBOOK_VARS_ROOT in (
                    'bottom', 'playbook_dir'):  # only option in 2.4.0
                basedirs = [task.get_search_path()[0]]
            elif C.PLAYBOOK_VARS_ROOT != 'top':
                # preserves default basedirs, only option pre 2.3
                raise AssibleError('Unknown playbook vars logic: %s' %
                                   C.PLAYBOOK_VARS_ROOT)

            # if we have a task in this context, and that task has a role, make
            # sure it sees its defaults above any other roles, as we previously
            # (v1) made sure each task had a copy of its roles default vars
            if task._role is not None and (play
                                           or task.action == 'include_role'):
                all_vars = _combine_and_track(
                    all_vars,
                    task._role.get_default_vars(
                        dep_chain=task.get_dep_chain()),
                    "role '%s' defaults" % task._role.name)

        if host:
            # THE 'all' group and the rest of groups for a host, used below
            all_group = self._inventory.groups.get('all')
            host_groups = sort_groups(
                [g for g in host.get_groups() if g.name not in ['all']])

            def _get_plugin_vars(plugin, path, entities):
                data = {}
                try:
                    data = plugin.get_vars(self._loader, path, entities)
                except AttributeError:
                    try:
                        for entity in entities:
                            if isinstance(entity, Host):
                                data.update(plugin.get_host_vars(entity.name))
                            else:
                                data.update(plugin.get_group_vars(entity.name))
                    except AttributeError:
                        if hasattr(plugin, 'run'):
                            raise AssibleError(
                                "Cannot use v1 type vars plugin %s from %s" %
                                (plugin._load_name, plugin._original_path))
                        else:
                            raise AssibleError(
                                "Invalid vars plugin %s from %s" %
                                (plugin._load_name, plugin._original_path))
                return data

            # internal functions that actually do the work
            def _plugins_inventory(entities):
                ''' merges all entities by inventory source '''
                return get_vars_from_inventory_sources(
                    self._loader, self._inventory._sources, entities, stage)

            def _plugins_play(entities):
                ''' merges all entities adjacent to play '''
                data = {}
                for path in basedirs:
                    data = _combine_and_track(
                        data,
                        get_vars_from_path(self._loader, path, entities,
                                           stage), "path '%s'" % path)
                return data

            # configurable functions that are sortable via config, remember to add to _ALLOWED if expanding this list
            def all_inventory():
                return all_group.get_vars()

            def all_plugins_inventory():
                return _plugins_inventory([all_group])

            def all_plugins_play():
                return _plugins_play([all_group])

            def groups_inventory():
                ''' gets group vars from inventory '''
                return get_group_vars(host_groups)

            def groups_plugins_inventory():
                ''' gets plugin sources from inventory for groups '''
                return _plugins_inventory(host_groups)

            def groups_plugins_play():
                ''' gets plugin sources from play for groups '''
                return _plugins_play(host_groups)

            def plugins_by_groups():
                '''
                    merges all plugin sources by group,
                    This should be used instead, NOT in combination with the other groups_plugins* functions
                '''
                data = {}
                for group in host_groups:
                    data[group] = _combine_and_track(
                        data[group], _plugins_inventory(group),
                        "inventory group_vars for '%s'" % group)
                    data[group] = _combine_and_track(
                        data[group], _plugins_play(group),
                        "playbook group_vars for '%s'" % group)
                return data

            # Merge groups as per precedence config
            # only allow to call the functions we want exposed
            for entry in C.VARIABLE_PRECEDENCE:
                if entry in self._ALLOWED:
                    display.debug('Calling %s to load vars for %s' %
                                  (entry, host.name))
                    all_vars = _combine_and_track(
                        all_vars,
                        locals()[entry](),
                        "group vars, precedence entry '%s'" % entry)
                else:
                    display.warning(
                        'Ignoring unknown variable precedence entry: %s' %
                        (entry))

            # host vars, from inventory, inventory adjacent and play adjacent via plugins
            all_vars = _combine_and_track(all_vars, host.get_vars(),
                                          "host vars for '%s'" % host)
            all_vars = _combine_and_track(
                all_vars, _plugins_inventory([host]),
                "inventory host_vars for '%s'" % host)
            all_vars = _combine_and_track(all_vars, _plugins_play([host]),
                                          "playbook host_vars for '%s'" % host)

            # finally, the facts caches for this host, if it exists
            # TODO: cleaning of facts should eventually become part of taskresults instead of vars
            try:
                facts = wrap_var(self._fact_cache.get(host.name, {}))
                all_vars.update(namespace_facts(facts))

                # push facts to main namespace
                if C.INJECT_FACTS_AS_VARS:
                    all_vars = _combine_and_track(all_vars,
                                                  wrap_var(clean_facts(facts)),
                                                  "facts")
                else:
                    # always 'promote' assible_local
                    all_vars = _combine_and_track(
                        all_vars,
                        wrap_var(
                            {'assible_local': facts.get('assible_local', {})}),
                        "facts")
            except KeyError:
                pass

        if play:
            all_vars = _combine_and_track(all_vars, play.get_vars(),
                                          "play vars")

            vars_files = play.get_vars_files()
            try:
                for vars_file_item in vars_files:
                    # create a set of temporary vars here, which incorporate the extra
                    # and magic vars so we can properly template the vars_files entries
                    temp_vars = combine_vars(all_vars, self._extra_vars)
                    temp_vars = combine_vars(temp_vars, magic_variables)
                    templar = Templar(loader=self._loader, variables=temp_vars)

                    # we assume each item in the list is itself a list, as we
                    # support "conditional includes" for vars_files, which mimics
                    # the with_first_found mechanism.
                    vars_file_list = vars_file_item
                    if not isinstance(vars_file_list, list):
                        vars_file_list = [vars_file_list]

                    # now we iterate through the (potential) files, and break out
                    # as soon as we read one from the list. If none are found, we
                    # raise an error, which is silently ignored at this point.
                    try:
                        for vars_file in vars_file_list:
                            vars_file = templar.template(vars_file)
                            if not (isinstance(vars_file, Sequence)):
                                raise AssibleError(
                                    "Invalid vars_files entry found: %r\n"
                                    "vars_files entries should be either a string type or "
                                    "a list of string types after template expansion"
                                    % vars_file)
                            try:
                                data = preprocess_vars(
                                    self._loader.load_from_file(vars_file,
                                                                unsafe=True))
                                if data is not None:
                                    for item in data:
                                        all_vars = _combine_and_track(
                                            all_vars, item,
                                            "play vars_files from '%s'" %
                                            vars_file)
                                break
                            except AssibleFileNotFound:
                                # we continue on loader failures
                                continue
                            except AssibleParserError:
                                raise
                        else:
                            # if include_delegate_to is set to False, we ignore the missing
                            # vars file here because we're working on a delegated host
                            if include_delegate_to:
                                raise AssibleFileNotFound(
                                    "vars file %s was not found" %
                                    vars_file_item)
                    except (UndefinedError, AssibleUndefinedVariable):
                        if host is not None and self._fact_cache.get(
                                host.name, dict()).get(
                                    'module_setup') and task is not None:
                            raise AssibleUndefinedVariable(
                                "an undefined variable was found when attempting to template the vars_files item '%s'"
                                % vars_file_item,
                                obj=vars_file_item)
                        else:
                            # we do not have a full context here, and the missing variable could be because of that
                            # so just show a warning and continue
                            display.vvv(
                                "skipping vars_file '%s' due to an undefined variable"
                                % vars_file_item)
                            continue

                    display.vvv("Read vars_file '%s'" % vars_file_item)
            except TypeError:
                raise AssibleParserError(
                    "Error while reading vars files - please supply a list of file names. "
                    "Got '%s' of type %s" % (vars_files, type(vars_files)))

            # By default, we now merge in all vars from all roles in the play,
            # unless the user has disabled this via a config option
            if not C.DEFAULT_PRIVATE_ROLE_VARS:
                for role in play.get_roles():
                    all_vars = _combine_and_track(
                        all_vars, role.get_vars(include_params=False),
                        "role '%s' vars" % role.name)

        # next, we merge in the vars from the role, which will specifically
        # follow the role dependency chain, and then we merge in the tasks
        # vars (which will look at parent blocks/task includes)
        if task:
            if task._role:
                all_vars = _combine_and_track(
                    all_vars,
                    task._role.get_vars(task.get_dep_chain(),
                                        include_params=False),
                    "role '%s' vars" % task._role.name)
            all_vars = _combine_and_track(all_vars, task.get_vars(),
                                          "task vars")

        # next, we merge in the vars cache (include vars) and nonpersistent
        # facts cache (set_fact/register), in that order
        if host:
            # include_vars non-persistent cache
            all_vars = _combine_and_track(
                all_vars, self._vars_cache.get(host.get_name(), dict()),
                "include_vars")
            # fact non-persistent cache
            all_vars = _combine_and_track(
                all_vars,
                self._nonpersistent_fact_cache.get(host.name,
                                                   dict()), "set_fact")

        # next, we merge in role params and task include params
        if task:
            if task._role:
                all_vars = _combine_and_track(
                    all_vars, task._role.get_role_params(task.get_dep_chain()),
                    "role '%s' params" % task._role.name)

            # special case for include tasks, where the include params
            # may be specified in the vars field for the task, which should
            # have higher precedence than the vars/np facts above
            all_vars = _combine_and_track(all_vars, task.get_include_params(),
                                          "include params")

        # extra vars
        all_vars = _combine_and_track(all_vars, self._extra_vars, "extra vars")

        # magic variables
        all_vars = _combine_and_track(all_vars, magic_variables, "magic vars")

        # special case for the 'environment' magic variable, as someone
        # may have set it as a variable and we don't want to stomp on it
        if task:
            all_vars['environment'] = task.environment

        # 'vars' magic var
        if task or play:
            # has to be copy, otherwise recursive ref
            all_vars['vars'] = all_vars.copy()

        # if we have a task and we're delegating to another host, figure out the
        # variables for that host now so we don't have to rely on hostvars later
        if task and task.delegate_to is not None and include_delegate_to:
            all_vars['assible_delegated_vars'], all_vars[
                '_assible_loop_cache'] = self._get_delegated_vars(
                    play, task, all_vars)

        display.debug("done with get_vars()")
        if C.DEFAULT_DEBUG:
            # Use VarsWithSources wrapper class to display var sources
            return VarsWithSources.new_vars_with_sources(
                all_vars, _vars_sources)
        else:
            return all_vars
Ejemplo n.º 19
0
    def _get_magic_variables(self,
                             play,
                             host,
                             task,
                             include_hostvars,
                             include_delegate_to,
                             _hosts=None,
                             _hosts_all=None):
        '''
        Returns a dictionary of so-called "magic" variables in Assible,
        which are special variables we set internally for use.
        '''

        variables = {}
        variables['playbook_dir'] = os.path.abspath(self._loader.get_basedir())
        variables['assible_playbook_python'] = sys.executable
        variables['assible_config_file'] = C.CONFIG_FILE

        if play:
            # This is a list of all role names of all dependencies for all roles for this play
            dependency_role_names = list(
                set([
                    d.get_name() for r in play.roles
                    for d in r.get_all_dependencies()
                ]))
            # This is a list of all role names of all roles for this play
            play_role_names = [r.get_name() for r in play.roles]

            # assible_role_names includes all role names, dependent or directly referenced by the play
            variables['assible_role_names'] = list(
                set(dependency_role_names + play_role_names))
            # assible_play_role_names includes the names of all roles directly referenced by this play
            # roles that are implicitly referenced via dependencies are not listed.
            variables['assible_play_role_names'] = play_role_names
            # assible_dependent_role_names includes the names of all roles that are referenced via dependencies
            # dependencies that are also explicitly named as roles are included in this list
            variables['assible_dependent_role_names'] = dependency_role_names

            # DEPRECATED: role_names should be deprecated in favor of assible_role_names or assible_play_role_names
            variables['role_names'] = variables['assible_play_role_names']

            variables['assible_play_name'] = play.get_name()

        if task:
            if task._role:
                variables['role_name'] = task._role.get_name(
                    include_role_fqcn=False)
                variables['role_path'] = task._role._role_path
                variables['role_uuid'] = text_type(task._role._uuid)
                variables[
                    'assible_collection_name'] = task._role._role_collection
                variables['assible_role_name'] = task._role.get_name()

        if self._inventory is not None:
            variables['groups'] = self._inventory.get_groups_dict()
            if play:
                templar = Templar(loader=self._loader)
                if templar.is_template(play.hosts):
                    pattern = 'all'
                else:
                    pattern = play.hosts or 'all'
                # add the list of hosts in the play, as adjusted for limit/filters
                if not _hosts_all:
                    _hosts_all = [
                        h.name for h in self._inventory.get_hosts(
                            pattern=pattern, ignore_restrictions=True)
                    ]
                if not _hosts:
                    _hosts = [h.name for h in self._inventory.get_hosts()]

                variables['assible_play_hosts_all'] = _hosts_all[:]
                variables['assible_play_hosts'] = [
                    x for x in variables['assible_play_hosts_all']
                    if x not in play._removed_hosts
                ]
                variables['assible_play_batch'] = [
                    x for x in _hosts if x not in play._removed_hosts
                ]

                # DEPRECATED: play_hosts should be deprecated in favor of assible_play_batch,
                # however this would take work in the templating engine, so for now we'll add both
                variables['play_hosts'] = variables['assible_play_batch']

        # the 'omit' value allows params to be left out if the variable they are based on is undefined
        variables['omit'] = self._omit_token
        # Set options vars
        for option, option_value in iteritems(self._options_vars):
            variables[option] = option_value

        if self._hostvars is not None and include_hostvars:
            variables['hostvars'] = self._hostvars

        return variables
Ejemplo n.º 20
0
    def run(self):
        '''
        Run the given playbook, based on the settings in the play which
        may limit the runs to serialized groups, etc.
        '''

        result = 0
        entrylist = []
        entry = {}
        try:
            # preload become/connection/shell to set config defs cached
            list(connection_loader.all(class_only=True))
            list(shell_loader.all(class_only=True))
            list(become_loader.all(class_only=True))

            for playbook_path in self._playbooks:
                pb = Playbook.load(playbook_path,
                                   variable_manager=self._variable_manager,
                                   loader=self._loader)
                # FIXME: move out of inventory self._inventory.set_playbook_basedir(os.path.realpath(os.path.dirname(playbook_path)))

                if self._tqm is None:  # we are doing a listing
                    entry = {'playbook': playbook_path}
                    entry['plays'] = []
                else:
                    # make sure the tqm has callbacks loaded
                    self._tqm.load_callbacks()
                    self._tqm.send_callback('v2_playbook_on_start', pb)

                i = 1
                plays = pb.get_plays()
                display.vv(u'%d plays in %s' %
                           (len(plays), to_text(playbook_path)))

                for play in plays:
                    if play._included_path is not None:
                        self._loader.set_basedir(play._included_path)
                    else:
                        self._loader.set_basedir(pb._basedir)

                    # clear any filters which may have been applied to the inventory
                    self._inventory.remove_restriction()

                    # Allow variables to be used in vars_prompt fields.
                    all_vars = self._variable_manager.get_vars(play=play)
                    templar = Templar(loader=self._loader, variables=all_vars)
                    setattr(play, 'vars_prompt',
                            templar.template(play.vars_prompt))

                    # FIXME: this should be a play 'sub object' like loop_control
                    if play.vars_prompt:
                        for var in play.vars_prompt:
                            vname = var['name']
                            prompt = var.get("prompt", vname)
                            default = var.get("default", None)
                            private = boolean(var.get("private", True))
                            confirm = boolean(var.get("confirm", False))
                            encrypt = var.get("encrypt", None)
                            salt_size = var.get("salt_size", None)
                            salt = var.get("salt", None)
                            unsafe = var.get("unsafe", None)

                            if vname not in self._variable_manager.extra_vars:
                                if self._tqm:
                                    self._tqm.send_callback(
                                        'v2_playbook_on_vars_prompt', vname,
                                        private, prompt, encrypt, confirm,
                                        salt_size, salt, default, unsafe)
                                    play.vars[vname] = display.do_var_prompt(
                                        vname, private, prompt, encrypt,
                                        confirm, salt_size, salt, default,
                                        unsafe)
                                else:  # we are either in --list-<option> or syntax check
                                    play.vars[vname] = default

                    # Post validate so any play level variables are templated
                    all_vars = self._variable_manager.get_vars(play=play)
                    templar = Templar(loader=self._loader, variables=all_vars)
                    play.post_validate(templar)

                    if context.CLIARGS['syntax']:
                        continue

                    if self._tqm is None:
                        # we are just doing a listing
                        entry['plays'].append(play)

                    else:
                        self._tqm._unreachable_hosts.update(
                            self._unreachable_hosts)

                        previously_failed = len(self._tqm._failed_hosts)
                        previously_unreachable = len(
                            self._tqm._unreachable_hosts)

                        break_play = False
                        # we are actually running plays
                        batches = self._get_serialized_batches(play)
                        if len(batches) == 0:
                            self._tqm.send_callback(
                                'v2_playbook_on_play_start', play)
                            self._tqm.send_callback(
                                'v2_playbook_on_no_hosts_matched')
                        for batch in batches:
                            # restrict the inventory to the hosts in the serialized batch
                            self._inventory.restrict_to_hosts(batch)
                            # and run it...
                            result = self._tqm.run(play=play)

                            # break the play if the result equals the special return code
                            if result & self._tqm.RUN_FAILED_BREAK_PLAY != 0:
                                result = self._tqm.RUN_FAILED_HOSTS
                                break_play = True

                            # check the number of failures here, to see if they're above the maximum
                            # failure percentage allowed, or if any errors are fatal. If either of those
                            # conditions are met, we break out, otherwise we only break out if the entire
                            # batch failed
                            failed_hosts_count = len(self._tqm._failed_hosts) + len(self._tqm._unreachable_hosts) - \
                                (previously_failed + previously_unreachable)

                            if len(batch) == failed_hosts_count:
                                break_play = True
                                break

                            # update the previous counts so they don't accumulate incorrectly
                            # over multiple serial batches
                            previously_failed += len(
                                self._tqm._failed_hosts) - previously_failed
                            previously_unreachable += len(
                                self._tqm._unreachable_hosts
                            ) - previously_unreachable

                            # save the unreachable hosts from this batch
                            self._unreachable_hosts.update(
                                self._tqm._unreachable_hosts)

                        if break_play:
                            break

                    i = i + 1  # per play

                if entry:
                    entrylist.append(entry)  # per playbook

                # send the stats callback for this playbook
                if self._tqm is not None:
                    if C.RETRY_FILES_ENABLED:
                        retries = set(self._tqm._failed_hosts.keys())
                        retries.update(self._tqm._unreachable_hosts.keys())
                        retries = sorted(retries)
                        if len(retries) > 0:
                            if C.RETRY_FILES_SAVE_PATH:
                                basedir = C.RETRY_FILES_SAVE_PATH
                            elif playbook_path:
                                basedir = os.path.dirname(
                                    os.path.abspath(playbook_path))
                            else:
                                basedir = '~/'

                            (retry_name, _) = os.path.splitext(
                                os.path.basename(playbook_path))
                            filename = os.path.join(basedir,
                                                    "%s.retry" % retry_name)
                            if self._generate_retry_inventory(
                                    filename, retries):
                                display.display(
                                    "\tto retry, use: --limit @%s\n" %
                                    filename)

                    self._tqm.send_callback('v2_playbook_on_stats',
                                            self._tqm._stats)

                # if the last result wasn't zero, break out of the playbook file name loop
                if result != 0:
                    break

            if entrylist:
                return entrylist

        finally:
            if self._tqm is not None:
                self._tqm.cleanup()
            if self._loader:
                self._loader.cleanup_all_tmp_files()

        if context.CLIARGS['syntax']:
            display.display("No issues encountered")
            return result

        if context.CLIARGS['start_at_task'] and not self._tqm._start_at_done:
            display.error(
                "No matching task \"%s\" found."
                " Note: --start-at-task can only follow static includes." %
                context.CLIARGS['start_at_task'])

        return result
Ejemplo n.º 21
0
    def run(self, iterator, play_context):
        '''
        The "free" strategy is a bit more complex, in that it allows tasks to
        be sent to hosts as quickly as they can be processed. This means that
        some hosts may finish very quickly if run tasks result in little or no
        work being done versus other systems.

        The algorithm used here also tries to be more "fair" when iterating
        through hosts by remembering the last host in the list to be given a task
        and starting the search from there as opposed to the top of the hosts
        list again, which would end up favoring hosts near the beginning of the
        list.
        '''

        # the last host to be given a task
        last_host = 0

        result = self._tqm.RUN_OK

        # start with all workers being counted as being free
        workers_free = len(self._workers)

        self._set_hosts_cache(iterator._play)

        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            hosts_left = self.get_hosts_left(iterator)

            if len(hosts_left) == 0:
                self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
                result = False
                break

            work_to_do = False  # assume we have no more work to do
            starting_host = last_host  # save current position so we know when we've looped back around and need to break

            # try and find an unblocked host with a task to run
            host_results = []
            while True:
                host = hosts_left[last_host]
                display.debug("next free host: %s" % host)
                host_name = host.get_name()

                # peek at the next task for the host, to see if there's
                # anything to do do for this host
                (state, task) = iterator.get_next_task_for_host(host,
                                                                peek=True)
                display.debug("free host state: %s" % state, host=host_name)
                display.debug("free host task: %s" % task, host=host_name)
                if host_name not in self._tqm._unreachable_hosts and task:

                    # set the flag so the outer loop knows we've still found
                    # some work which needs to be done
                    work_to_do = True

                    display.debug("this host has work to do", host=host_name)

                    # check to see if this host is blocked (still executing a previous task)
                    if (host_name not in self._blocked_hosts
                            or not self._blocked_hosts[host_name]):

                        display.debug("getting variables", host=host_name)
                        task_vars = self._variable_manager.get_vars(
                            play=iterator._play,
                            host=host,
                            task=task,
                            _hosts=self._hosts_cache,
                            _hosts_all=self._hosts_cache_all)
                        self.add_tqm_variables(task_vars, play=iterator._play)
                        templar = Templar(loader=self._loader,
                                          variables=task_vars)
                        display.debug("done getting variables", host=host_name)

                        try:
                            throttle = int(templar.template(task.throttle))
                        except Exception as e:
                            raise AssibleError(
                                "Failed to convert the throttle value to an integer.",
                                obj=task._ds,
                                orig_exc=e)

                        if throttle > 0:
                            same_tasks = 0
                            for worker in self._workers:
                                if worker and worker.is_alive(
                                ) and worker._task._uuid == task._uuid:
                                    same_tasks += 1

                            display.debug("task: %s, same_tasks: %d" %
                                          (task.get_name(), same_tasks))
                            if same_tasks >= throttle:
                                break

                        # pop the task, mark the host blocked, and queue it
                        self._blocked_hosts[host_name] = True
                        (state, task) = iterator.get_next_task_for_host(host)

                        try:
                            action = action_loader.get(
                                task.action,
                                class_only=True,
                                collection_list=task.collections)
                        except KeyError:
                            # we don't care here, because the action may simply not have a
                            # corresponding action plugin
                            action = None

                        try:
                            task.name = to_text(templar.template(
                                task.name, fail_on_undefined=False),
                                                nonstring='empty')
                            display.debug("done templating", host=host_name)
                        except Exception:
                            # just ignore any errors during task name templating,
                            # we don't care if it just shows the raw name
                            display.debug("templating failed for some reason",
                                          host=host_name)

                        run_once = templar.template(
                            task.run_once) or action and getattr(
                                action, 'BYPASS_HOST_LOOP', False)
                        if run_once:
                            if action and getattr(action, 'BYPASS_HOST_LOOP',
                                                  False):
                                raise AssibleError(
                                    "The '%s' module bypasses the host loop, which is currently not supported in the free strategy "
                                    "and would instead execute for every host in the inventory list."
                                    % task.action,
                                    obj=task._ds)
                            else:
                                display.warning(
                                    "Using run_once with the free strategy is not currently supported. This task will still be "
                                    "executed for every host in the inventory list."
                                )

                        # check to see if this task should be skipped, due to it being a member of a
                        # role which has already run (and whether that role allows duplicate execution)
                        if task._role and task._role.has_run(host):
                            # If there is no metadata, the default behavior is to not allow duplicates,
                            # if there is metadata, check to see if the allow_duplicates flag was set to true
                            if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                                display.debug(
                                    "'%s' skipped because role has already run"
                                    % task,
                                    host=host_name)
                                del self._blocked_hosts[host_name]
                                continue

                        if task.action == 'meta':
                            self._execute_meta(task,
                                               play_context,
                                               iterator,
                                               target_host=host)
                            self._blocked_hosts[host_name] = False
                        else:
                            # handle step if needed, skip meta actions as they are used internally
                            if not self._step or self._take_step(
                                    task, host_name):
                                if task.any_errors_fatal:
                                    display.warning(
                                        "Using any_errors_fatal with the free strategy is not supported, "
                                        "as tasks are executed independently on each host"
                                    )
                                self._tqm.send_callback(
                                    'v2_playbook_on_task_start',
                                    task,
                                    is_conditional=False)
                                self._queue_task(host, task, task_vars,
                                                 play_context)
                                # each task is counted as a worker being busy
                                workers_free -= 1
                                del task_vars
                    else:
                        display.debug("%s is blocked, skipping for now" %
                                      host_name)

                # all workers have tasks to do (and the current host isn't done with the play).
                # loop back to starting host and break out
                if self._host_pinned and workers_free == 0 and work_to_do:
                    last_host = starting_host
                    break

                # move on to the next host and make sure we
                # haven't gone past the end of our hosts list
                last_host += 1
                if last_host > len(hosts_left) - 1:
                    last_host = 0

                # if we've looped around back to the start, break out
                if last_host == starting_host:
                    break

            results = self._process_pending_results(iterator)
            host_results.extend(results)

            # each result is counted as a worker being free again
            workers_free += len(results)

            self.update_active_connections(results)

            included_files = IncludedFile.process_include_results(
                host_results,
                iterator=iterator,
                loader=self._loader,
                variable_manager=self._variable_manager)

            if len(included_files) > 0:
                all_blocks = dict((host, []) for host in hosts_left)
                for included_file in included_files:
                    display.debug("collecting new blocks for %s" %
                                  included_file)
                    try:
                        if included_file._is_role:
                            new_ir = self._copy_included_file(included_file)

                            new_blocks, handler_blocks = new_ir.get_block_list(
                                play=iterator._play,
                                variable_manager=self._variable_manager,
                                loader=self._loader,
                            )
                        else:
                            new_blocks = self._load_included_file(
                                included_file, iterator=iterator)
                    except AssibleError as e:
                        for host in included_file._hosts:
                            iterator.mark_host_failed(host)
                        display.warning(to_text(e))
                        continue

                    for new_block in new_blocks:
                        task_vars = self._variable_manager.get_vars(
                            play=iterator._play,
                            task=new_block._parent,
                            _hosts=self._hosts_cache,
                            _hosts_all=self._hosts_cache_all)
                        final_block = new_block.filter_tagged_tasks(task_vars)
                        for host in hosts_left:
                            if host in included_file._hosts:
                                all_blocks[host].append(final_block)
                    display.debug("done collecting new blocks for %s" %
                                  included_file)

                display.debug(
                    "adding all collected blocks from %d included file(s) to iterator"
                    % len(included_files))
                for host in hosts_left:
                    iterator.add_tasks(host, all_blocks[host])
                display.debug("done adding collected blocks to iterator")

            # pause briefly so we don't spin lock
            time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)

        # collect all the final results
        results = self._wait_on_pending_results(iterator)

        # run the base class run() method, which executes the cleanup function
        # and runs any outstanding handlers which have been triggered
        return super(StrategyModule, self).run(iterator, play_context, result)
Ejemplo n.º 22
0
def inventory_module():
    r = InventoryModule()
    r.inventory = InventoryData()
    r.templar = Templar(None)
    return r
Ejemplo n.º 23
0
 def __getitem__(self, var):
     templar = Templar(variables=self._vars, loader=self._loader)
     foo = templar.template(self._vars[var],
                            fail_on_undefined=False,
                            static_vars=STATIC_VARS)
     return foo
Ejemplo n.º 24
0
def load_list_of_tasks(ds,
                       play,
                       block=None,
                       role=None,
                       task_include=None,
                       use_handlers=False,
                       variable_manager=None,
                       loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from assible.playbook.block import Block
    from assible.playbook.handler import Handler
    from assible.playbook.task import Task
    from assible.playbook.task_include import TaskInclude
    from assible.playbook.role_include import IncludeRole
    from assible.playbook.handler_task_include import HandlerTaskInclude
    from assible.template import Templar

    if not isinstance(ds, list):
        raise AssibleAssertionError(
            'The ds (%s) should be a list but was a %s' % (ds, type(ds)))

    task_list = []
    for task_ds in ds:
        if not isinstance(task_ds, dict):
            raise AssibleAssertionError(
                'The ds (%s) should be a dict but was a %s' % (ds, type(ds)))

        if 'block' in task_ds:
            t = Block.load(
                task_ds,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            task_list.append(t)
        else:
            args_parser = ModuleArgsParser(task_ds)
            try:
                (action, args,
                 delegate_to) = args_parser.parse(skip_action_validation=True)
            except AssibleParserError as e:
                # if the raises exception was created with obj=ds args, then it includes the detail
                # so we dont need to add it so we can just re raise.
                if e._obj:
                    raise
                # But if it wasn't, we can add the yaml object now to get more detail
                raise AssibleParserError(to_native(e), obj=task_ds, orig_exc=e)

            if action in ('include', 'import_tasks', 'include_tasks'):

                if use_handlers:
                    include_class = HandlerTaskInclude
                else:
                    include_class = TaskInclude

                t = include_class.load(task_ds,
                                       block=block,
                                       role=role,
                                       task_include=None,
                                       variable_manager=variable_manager,
                                       loader=loader)

                all_vars = variable_manager.get_vars(play=play, task=t)
                templar = Templar(loader=loader, variables=all_vars)

                # check to see if this include is dynamic or static:
                # 1. the user has set the 'static' option to false or true
                # 2. one of the appropriate config options was set
                if action == 'include_tasks':
                    is_static = False
                elif action == 'import_tasks':
                    is_static = True
                elif t.static is not None:
                    display.deprecated(
                        "The use of 'static' has been deprecated. "
                        "Use 'import_tasks' for static inclusion, or 'include_tasks' for dynamic inclusion",
                        version='2.12',
                        collection_name='assible.builtin')
                    is_static = t.static
                else:
                    is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
                        (use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
                        (not templar.is_template(t.args['_raw_params']) and t.all_parents_static() and not t.loop)

                if is_static:
                    if t.loop is not None:
                        if action == 'import_tasks':
                            raise AssibleParserError(
                                "You cannot use loops on 'import_tasks' statements. You should use 'include_tasks' instead.",
                                obj=task_ds)
                        else:
                            raise AssibleParserError(
                                "You cannot use 'static' on an include with a loop",
                                obj=task_ds)

                    # we set a flag to indicate this include was static
                    t.statically_loaded = True

                    # handle relative includes by walking up the list of parent include
                    # tasks and checking the relative result to see if it exists
                    parent_include = block
                    cumulative_path = None

                    found = False
                    subdir = 'tasks'
                    if use_handlers:
                        subdir = 'handlers'
                    while parent_include is not None:
                        if not isinstance(parent_include, TaskInclude):
                            parent_include = parent_include._parent
                            continue
                        try:
                            parent_include_dir = os.path.dirname(
                                templar.template(
                                    parent_include.args.get('_raw_params')))
                        except AssibleUndefinedVariable as e:
                            if not parent_include.statically_loaded:
                                raise AssibleParserError(
                                    "Error when evaluating variable in dynamic parent include path: %s. "
                                    "When using static imports, the parent dynamic include cannot utilize host facts "
                                    "or variables from inventory" %
                                    parent_include.args.get('_raw_params'),
                                    obj=task_ds,
                                    suppress_extended_error=True,
                                    orig_exc=e)
                            raise
                        if cumulative_path is None:
                            cumulative_path = parent_include_dir
                        elif not os.path.isabs(cumulative_path):
                            cumulative_path = os.path.join(
                                parent_include_dir, cumulative_path)
                        include_target = templar.template(
                            t.args['_raw_params'])
                        if t._role:
                            new_basedir = os.path.join(t._role._role_path,
                                                       subdir, cumulative_path)
                            include_file = loader.path_dwim_relative(
                                new_basedir, subdir, include_target)
                        else:
                            include_file = loader.path_dwim_relative(
                                loader.get_basedir(), cumulative_path,
                                include_target)

                        if os.path.exists(include_file):
                            found = True
                            break
                        else:
                            parent_include = parent_include._parent

                    if not found:
                        try:
                            include_target = templar.template(
                                t.args['_raw_params'])
                        except AssibleUndefinedVariable as e:
                            raise AssibleParserError(
                                "Error when evaluating variable in import path: %s.\n\n"
                                "When using static imports, ensure that any variables used in their names are defined in vars/vars_files\n"
                                "or extra-vars passed in from the command line. Static imports cannot use variables from facts or inventory\n"
                                "sources like group or host vars." %
                                t.args['_raw_params'],
                                obj=task_ds,
                                suppress_extended_error=True,
                                orig_exc=e)
                        if t._role:
                            include_file = loader.path_dwim_relative(
                                t._role._role_path, subdir, include_target)
                        else:
                            include_file = loader.path_dwim(include_target)

                    try:
                        data = loader.load_from_file(include_file)
                        if data is None:
                            display.warning(
                                'file %s is empty and had no tasks to include'
                                % include_file)
                            continue
                        elif not isinstance(data, list):
                            raise AssibleParserError(
                                "included task files must contain a list of tasks",
                                obj=data)

                        # since we can't send callbacks here, we display a message directly in
                        # the same fashion used by the on_include callback. We also do it here,
                        # because the recursive nature of helper methods means we may be loading
                        # nested includes, and we want the include order printed correctly
                        display.vv("statically imported: %s" % include_file)
                    except AssibleFileNotFound:
                        if action != 'include' or t.static or \
                           C.DEFAULT_TASK_INCLUDES_STATIC or \
                           C.DEFAULT_HANDLER_INCLUDES_STATIC and use_handlers:
                            raise
                        display.deprecated(
                            "Included file '%s' not found, however since this include is not "
                            "explicitly marked as 'static: yes', we will try and include it dynamically "
                            "later. In the future, this will be an error unless 'static: no' is used "
                            "on the include task. If you do not want missing includes to be considered "
                            "dynamic, use 'static: yes' on the include or set the global assible.cfg "
                            "options to make all includes static for tasks and/or handlers"
                            % include_file,
                            version="2.12",
                            collection_name='assible.builtin')
                        task_list.append(t)
                        continue

                    ti_copy = t.copy(exclude_parent=True)
                    ti_copy._parent = block
                    included_blocks = load_list_of_blocks(
                        data,
                        play=play,
                        parent_block=None,
                        task_include=ti_copy,
                        role=role,
                        use_handlers=use_handlers,
                        loader=loader,
                        variable_manager=variable_manager,
                    )

                    # FIXME: remove once 'include' is removed
                    # pop tags out of the include args, if they were specified there, and assign
                    # them to the include. If the include already had tags specified, we raise an
                    # error so that users know not to specify them both ways
                    tags = ti_copy.vars.pop('tags', [])
                    if isinstance(tags, string_types):
                        tags = tags.split(',')

                    if len(tags) > 0:
                        if action in ('include_tasks', 'import_tasks'):
                            raise AssibleParserError(
                                'You cannot specify "tags" inline to the task, it is a task keyword'
                            )
                        if len(ti_copy.tags) > 0:
                            raise AssibleParserError(
                                "Include tasks should not specify tags in more than one way (both via args and directly on the task). "
                                "Mixing styles in which tags are specified is prohibited for whole import hierarchy, not only for single import statement",
                                obj=task_ds,
                                suppress_extended_error=True,
                            )
                        display.deprecated(
                            "You should not specify tags in the include parameters. All tags should be specified using the task-level option",
                            version="2.12",
                            collection_name='assible.builtin')
                    else:
                        tags = ti_copy.tags[:]

                    # now we extend the tags on each of the included blocks
                    for b in included_blocks:
                        b.tags = list(set(b.tags).union(tags))
                    # END FIXME

                    # FIXME: handlers shouldn't need this special handling, but do
                    #        right now because they don't iterate blocks correctly
                    if use_handlers:
                        for b in included_blocks:
                            task_list.extend(b.block)
                    else:
                        task_list.extend(included_blocks)
                else:
                    t.is_static = False
                    task_list.append(t)

            elif action in ('include_role', 'import_role'):
                ir = IncludeRole.load(
                    task_ds,
                    block=block,
                    role=role,
                    task_include=None,
                    variable_manager=variable_manager,
                    loader=loader,
                )

                #   1. the user has set the 'static' option to false or true
                #   2. one of the appropriate config options was set
                is_static = False
                if action == 'import_role':
                    is_static = True

                elif ir.static is not None:
                    display.deprecated(
                        "The use of 'static' for 'include_role' has been deprecated. "
                        "Use 'import_role' for static inclusion, or 'include_role' for dynamic inclusion",
                        version='2.12',
                        collection_name='assible.builtin')
                    is_static = ir.static

                if is_static:
                    if ir.loop is not None:
                        if action == 'import_role':
                            raise AssibleParserError(
                                "You cannot use loops on 'import_role' statements. You should use 'include_role' instead.",
                                obj=task_ds)
                        else:
                            raise AssibleParserError(
                                "You cannot use 'static' on an include_role with a loop",
                                obj=task_ds)

                    # we set a flag to indicate this include was static
                    ir.statically_loaded = True

                    # template the role name now, if needed
                    all_vars = variable_manager.get_vars(play=play, task=ir)
                    templar = Templar(loader=loader, variables=all_vars)
                    ir._role_name = templar.template(ir._role_name)

                    # uses compiled list from object
                    blocks, _ = ir.get_block_list(
                        variable_manager=variable_manager, loader=loader)
                    task_list.extend(blocks)
                else:
                    # passes task object itself for latter generation of list
                    task_list.append(ir)
            else:
                if use_handlers:
                    t = Handler.load(task_ds,
                                     block=block,
                                     role=role,
                                     task_include=task_include,
                                     variable_manager=variable_manager,
                                     loader=loader)
                else:
                    t = Task.load(task_ds,
                                  block=block,
                                  role=role,
                                  task_include=task_include,
                                  variable_manager=variable_manager,
                                  loader=loader)

                task_list.append(t)

    return task_list
Ejemplo n.º 25
0
    def test_action_base__configure_module(self):
        fake_loader = DictDataLoader({})

        # create our fake task
        mock_task = MagicMock()
        mock_task.action = "copy"
        mock_task.async_val = 0
        mock_task.delegate_to = None

        # create a mock connection, so we don't actually try and connect to things
        mock_connection = MagicMock()

        # create a mock shared loader object
        def mock_find_plugin_with_context(name, options, collection_list=None):
            mockctx = MagicMock()
            if name == 'badmodule':
                mockctx.resolved = False
                mockctx.plugin_resolved_path = None
            elif '.ps1' in options:
                mockctx.resolved = True
                mockctx.plugin_resolved_path = '/fake/path/to/%s.ps1' % name
            else:
                mockctx.resolved = True
                mockctx.plugin_resolved_path = '/fake/path/to/%s' % name
            return mockctx

        mock_module_loader = MagicMock()
        mock_module_loader.find_plugin_with_context.side_effect = mock_find_plugin_with_context
        mock_shared_obj_loader = MagicMock()
        mock_shared_obj_loader.module_loader = mock_module_loader

        # we're using a real play context here
        play_context = PlayContext()

        # our test class
        action_base = DerivedActionBase(
            task=mock_task,
            connection=mock_connection,
            play_context=play_context,
            loader=fake_loader,
            templar=Templar(loader=fake_loader),
            shared_loader_obj=mock_shared_obj_loader,
        )

        # test python module formatting
        with patch.object(
                builtins, 'open',
                mock_open(read_data=to_bytes(python_module_replacers.strip(),
                                             encoding='utf-8'))):
            with patch.object(os, 'rename'):
                mock_task.args = dict(a=1, foo='fö〩')
                mock_connection.module_implementation_preferences = ('', )
                (style, shebang, data, path) = action_base._configure_module(
                    mock_task.action,
                    mock_task.args,
                    task_vars=dict(
                        assible_python_interpreter='/usr/bin/python'))
                self.assertEqual(style, "new")
                self.assertEqual(shebang, u"#!/usr/bin/python")

                # test module not found
                self.assertRaises(AssibleError, action_base._configure_module,
                                  'badmodule', mock_task.args, {})

        # test powershell module formatting
        with patch.object(
                builtins, 'open',
                mock_open(read_data=to_bytes(
                    powershell_module_replacers.strip(), encoding='utf-8'))):
            mock_task.action = 'win_copy'
            mock_task.args = dict(b=2)
            mock_connection.module_implementation_preferences = ('.ps1', )
            (style, shebang, data,
             path) = action_base._configure_module('stat', mock_task.args, {})
            self.assertEqual(style, "new")
            self.assertEqual(shebang, u'#!powershell')

            # test module not found
            self.assertRaises(AssibleError, action_base._configure_module,
                              'badmodule', mock_task.args, {})
Ejemplo n.º 26
0
 def __repr__(self):
     templar = Templar(variables=self._vars, loader=self._loader)
     return repr(
         templar.template(self._vars,
                          fail_on_undefined=False,
                          static_vars=STATIC_VARS))
Ejemplo n.º 27
0
 def setUp(self):
     self.loader = DictDataLoader({})
     self.cond = conditional.Conditional(loader=self.loader)
     self.templar = Templar(loader=self.loader, variables={})
Ejemplo n.º 28
0
    def test_get_serialized_batches(self):
        fake_loader = DictDataLoader({
            'no_serial.yml':
            '''
            - hosts: all
              gather_facts: no
              tasks:
              - debug: var=inventory_hostname
            ''',
            'serial_int.yml':
            '''
            - hosts: all
              gather_facts: no
              serial: 2
              tasks:
              - debug: var=inventory_hostname
            ''',
            'serial_pct.yml':
            '''
            - hosts: all
              gather_facts: no
              serial: 20%
              tasks:
              - debug: var=inventory_hostname
            ''',
            'serial_list.yml':
            '''
            - hosts: all
              gather_facts: no
              serial: [1, 2, 3]
              tasks:
              - debug: var=inventory_hostname
            ''',
            'serial_list_mixed.yml':
            '''
            - hosts: all
              gather_facts: no
              serial: [1, "20%", -1]
              tasks:
              - debug: var=inventory_hostname
            ''',
        })

        mock_inventory = MagicMock()
        mock_var_manager = MagicMock()

        templar = Templar(loader=fake_loader)

        pbe = PlaybookExecutor(
            playbooks=[
                'no_serial.yml', 'serial_int.yml', 'serial_pct.yml',
                'serial_list.yml', 'serial_list_mixed.yml'
            ],
            inventory=mock_inventory,
            variable_manager=mock_var_manager,
            loader=fake_loader,
            passwords=[],
        )

        playbook = Playbook.load(pbe._playbooks[0],
                                 variable_manager=mock_var_manager,
                                 loader=fake_loader)
        play = playbook.get_plays()[0]
        play.post_validate(templar)
        mock_inventory.get_hosts.return_value = [
            'host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6',
            'host7', 'host8', 'host9'
        ]
        self.assertEqual(pbe._get_serialized_batches(play), [[
            'host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6',
            'host7', 'host8', 'host9'
        ]])

        playbook = Playbook.load(pbe._playbooks[1],
                                 variable_manager=mock_var_manager,
                                 loader=fake_loader)
        play = playbook.get_plays()[0]
        play.post_validate(templar)
        mock_inventory.get_hosts.return_value = [
            'host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6',
            'host7', 'host8', 'host9'
        ]
        self.assertEqual(
            pbe._get_serialized_batches(play),
            [['host0', 'host1'], ['host2', 'host3'], ['host4', 'host5'],
             ['host6', 'host7'], ['host8', 'host9']])

        playbook = Playbook.load(pbe._playbooks[2],
                                 variable_manager=mock_var_manager,
                                 loader=fake_loader)
        play = playbook.get_plays()[0]
        play.post_validate(templar)
        mock_inventory.get_hosts.return_value = [
            'host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6',
            'host7', 'host8', 'host9'
        ]
        self.assertEqual(
            pbe._get_serialized_batches(play),
            [['host0', 'host1'], ['host2', 'host3'], ['host4', 'host5'],
             ['host6', 'host7'], ['host8', 'host9']])

        playbook = Playbook.load(pbe._playbooks[3],
                                 variable_manager=mock_var_manager,
                                 loader=fake_loader)
        play = playbook.get_plays()[0]
        play.post_validate(templar)
        mock_inventory.get_hosts.return_value = [
            'host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6',
            'host7', 'host8', 'host9'
        ]
        self.assertEqual(
            pbe._get_serialized_batches(play),
            [['host0'], ['host1', 'host2'], ['host3', 'host4', 'host5'],
             ['host6', 'host7', 'host8'], ['host9']])

        playbook = Playbook.load(pbe._playbooks[4],
                                 variable_manager=mock_var_manager,
                                 loader=fake_loader)
        play = playbook.get_plays()[0]
        play.post_validate(templar)
        mock_inventory.get_hosts.return_value = [
            'host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6',
            'host7', 'host8', 'host9'
        ]
        self.assertEqual(
            pbe._get_serialized_batches(play),
            [['host0'], ['host1', 'host2'],
             ['host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']])

        # Test when serial percent is under 1.0
        playbook = Playbook.load(pbe._playbooks[2],
                                 variable_manager=mock_var_manager,
                                 loader=fake_loader)
        play = playbook.get_plays()[0]
        play.post_validate(templar)
        mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2']
        self.assertEqual(pbe._get_serialized_batches(play),
                         [['host0'], ['host1'], ['host2']])

        # Test when there is a remainder for serial as a percent
        playbook = Playbook.load(pbe._playbooks[2],
                                 variable_manager=mock_var_manager,
                                 loader=fake_loader)
        play = playbook.get_plays()[0]
        play.post_validate(templar)
        mock_inventory.get_hosts.return_value = [
            'host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6',
            'host7', 'host8', 'host9', 'host10'
        ]
        self.assertEqual(
            pbe._get_serialized_batches(play),
            [['host0', 'host1'], ['host2', 'host3'], ['host4', 'host5'],
             ['host6', 'host7'], ['host8', 'host9'], ['host10']])
Ejemplo n.º 29
0
    def _load_role_path(self, role_name):
        '''
        the 'role', as specified in the ds (or as a bare string), can either
        be a simple name or a full path. If it is a full path, we use the
        basename as the role name, otherwise we take the name as-given and
        append it to the default role path
        '''

        # create a templar class to template the dependency names, in
        # case they contain variables
        if self._variable_manager is not None:
            all_vars = self._variable_manager.get_vars(play=self._play)
        else:
            all_vars = dict()

        templar = Templar(loader=self._loader, variables=all_vars)
        role_name = templar.template(role_name)

        role_tuple = None

        # try to load as a collection-based role first
        if self._collection_list or AssibleCollectionRef.is_valid_fqcr(
                role_name):
            role_tuple = _get_collection_role_path(role_name,
                                                   self._collection_list)

        if role_tuple:
            # we found it, stash collection data and return the name/path tuple
            self._role_collection = role_tuple[2]
            return role_tuple[0:2]

        # We didn't find a collection role, look in defined role paths
        # FUTURE: refactor this to be callable from internal so we can properly order
        # assible.legacy searches with the collections keyword

        # we always start the search for roles in the base directory of the playbook
        role_search_paths = [
            os.path.join(self._loader.get_basedir(), u'roles'),
        ]

        # also search in the configured roles path
        if C.DEFAULT_ROLES_PATH:
            role_search_paths.extend(C.DEFAULT_ROLES_PATH)

        # next, append the roles basedir, if it was set, so we can
        # search relative to that directory for dependent roles
        if self._role_basedir:
            role_search_paths.append(self._role_basedir)

        # finally as a last resort we look in the current basedir as set
        # in the loader (which should be the playbook dir itself) but without
        # the roles/ dir appended
        role_search_paths.append(self._loader.get_basedir())

        # now iterate through the possible paths and return the first one we find
        for path in role_search_paths:
            path = templar.template(path)
            role_path = unfrackpath(os.path.join(path, role_name))
            if self._loader.path_exists(role_path):
                return (role_name, role_path)

        # if not found elsewhere try to extract path from name
        role_path = unfrackpath(role_name)
        if self._loader.path_exists(role_path):
            role_name = os.path.basename(role_name)
            return (role_name, role_path)

        searches = (self._collection_list or []) + role_search_paths
        raise AssibleError("the role '%s' was not found in %s" %
                           (role_name, ":".join(searches)),
                           obj=self._ds)
Ejemplo n.º 30
0
    def run(self, iterator, play_context):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iterate over each task, while there is one left to run
        result = self._tqm.RUN_OK
        work_to_do = True

        self._set_hosts_cache(iterator._play)

        while work_to_do and not self._tqm._terminated:

            try:
                display.debug("getting the remaining hosts for this loop")
                hosts_left = self.get_hosts_left(iterator)
                display.debug("done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                # skip control
                skip_rest = False
                choose_step = True

                # flag set if task is set to any_errors_fatal
                any_errors_fatal = False

                results = []
                for (host, task) in host_tasks:
                    if not task:
                        continue

                    if self._tqm._terminated:
                        break

                    run_once = False
                    work_to_do = True

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run(host):
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            display.debug(
                                "'%s' skipped because role has already run" %
                                task)
                            continue

                    display.debug("getting variables")
                    task_vars = self._variable_manager.get_vars(
                        play=iterator._play,
                        host=host,
                        task=task,
                        _hosts=self._hosts_cache,
                        _hosts_all=self._hosts_cache_all)
                    self.add_tqm_variables(task_vars, play=iterator._play)
                    templar = Templar(loader=self._loader, variables=task_vars)
                    display.debug("done getting variables")

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    task.action = templar.template(task.action)

                    try:
                        action = action_loader.get(
                            task.action,
                            class_only=True,
                            collection_list=task.collections)
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        action = None

                    if task.action == 'meta':
                        # for the linear strategy, we run meta tasks just once and for
                        # all hosts currently being iterated over rather than one host
                        results.extend(
                            self._execute_meta(task, play_context, iterator,
                                               host))
                        if task.args.get('_raw_params',
                                         None) not in ('noop',
                                                       'reset_connection',
                                                       'end_host'):
                            run_once = True
                        if (task.any_errors_fatal
                                or run_once) and not task.ignore_errors:
                            any_errors_fatal = True
                    else:
                        # handle step if needed, skip meta actions as they are used internally
                        if self._step and choose_step:
                            if self._take_step(task):
                                choose_step = False
                            else:
                                skip_rest = True
                                break

                        run_once = templar.template(
                            task.run_once) or action and getattr(
                                action, 'BYPASS_HOST_LOOP', False)

                        if (task.any_errors_fatal
                                or run_once) and not task.ignore_errors:
                            any_errors_fatal = True

                        if not callback_sent:
                            display.debug(
                                "sending task start callback, copying the task so we can template it temporarily"
                            )
                            saved_name = task.name
                            display.debug(
                                "done copying, going to template now")
                            try:
                                task.name = to_text(templar.template(
                                    task.name, fail_on_undefined=False),
                                                    nonstring='empty')
                                display.debug("done templating")
                            except Exception:
                                # just ignore any errors during task name templating,
                                # we don't care if it just shows the raw name
                                display.debug(
                                    "templating failed for some reason")
                            display.debug("here goes the callback...")
                            self._tqm.send_callback(
                                'v2_playbook_on_task_start',
                                task,
                                is_conditional=False)
                            task.name = saved_name
                            callback_sent = True
                            display.debug("sending task start callback")

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars, play_context)
                        del task_vars

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                    results += self._process_pending_results(
                        iterator,
                        max_passes=max(1, int(len(self._tqm._workers) * 0.1)))

                # go to next host/task group
                if skip_rest:
                    continue

                display.debug(
                    "done queuing things up, now waiting for results queue to drain"
                )
                if self._pending_results > 0:
                    results += self._wait_on_pending_results(iterator)

                host_results.extend(results)

                self.update_active_connections(results)

                included_files = IncludedFile.process_include_results(
                    host_results,
                    iterator=iterator,
                    loader=self._loader,
                    variable_manager=self._variable_manager)

                include_failure = False
                if len(included_files) > 0:
                    display.debug("we have included files to process")

                    display.debug("generating all_blocks data")
                    all_blocks = dict((host, []) for host in hosts_left)
                    display.debug("done generating all_blocks data")
                    for included_file in included_files:
                        display.debug("processing included file: %s" %
                                      included_file._filename)
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            if included_file._is_role:
                                new_ir = self._copy_included_file(
                                    included_file)

                                new_blocks, handler_blocks = new_ir.get_block_list(
                                    play=iterator._play,
                                    variable_manager=self._variable_manager,
                                    loader=self._loader,
                                )
                            else:
                                new_blocks = self._load_included_file(
                                    included_file, iterator=iterator)

                            display.debug(
                                "iterating over new_blocks loaded from include file"
                            )
                            for new_block in new_blocks:
                                task_vars = self._variable_manager.get_vars(
                                    play=iterator._play,
                                    task=new_block._parent,
                                    _hosts=self._hosts_cache,
                                    _hosts_all=self._hosts_cache_all,
                                )
                                display.debug("filtering new block on tags")
                                final_block = new_block.filter_tagged_tasks(
                                    task_vars)
                                display.debug(
                                    "done filtering new block on tags")

                                noop_block = self._prepare_and_create_noop_block_from(
                                    final_block, task._parent, iterator)

                                for host in hosts_left:
                                    if host in included_file._hosts:
                                        all_blocks[host].append(final_block)
                                    else:
                                        all_blocks[host].append(noop_block)
                            display.debug(
                                "done iterating over new_blocks loaded from include file"
                            )

                        except AssibleError as e:
                            for host in included_file._hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                            display.error(to_text(e), wrap_text=False)
                            include_failure = True
                            continue

                    # finally go through all of the hosts and append the
                    # accumulated blocks to their list of tasks
                    display.debug(
                        "extending task lists for all hosts with included blocks"
                    )

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                    display.debug("done extending task lists")
                    display.debug("done processing included files")

                display.debug("results queue empty")

                display.debug("checking for any_errors_fatal")
                failed_hosts = []
                unreachable_hosts = []
                for res in results:
                    # execute_meta() does not set 'failed' in the TaskResult
                    # so we skip checking it with the meta tasks and look just at the iterator
                    if (res.is_failed() or res._task.action
                            == 'meta') and iterator.is_failed(res._host):
                        failed_hosts.append(res._host.name)
                    elif res.is_unreachable():
                        unreachable_hosts.append(res._host.name)

                # if any_errors_fatal and we had an error, mark all hosts as failed
                if any_errors_fatal and (len(failed_hosts) > 0
                                         or len(unreachable_hosts) > 0):
                    dont_fail_states = frozenset(
                        [iterator.ITERATING_RESCUE, iterator.ITERATING_ALWAYS])
                    for host in hosts_left:
                        (s, _) = iterator.get_next_task_for_host(host,
                                                                 peek=True)
                        # the state may actually be in a child state, use the get_active_state()
                        # method in the iterator to figure out the true active state
                        s = iterator.get_active_state(s)
                        if s.run_state not in dont_fail_states or \
                           s.run_state == iterator.ITERATING_RESCUE and s.fail_state & iterator.FAILED_RESCUE != 0:
                            self._tqm._failed_hosts[host.name] = True
                            result |= self._tqm.RUN_FAILED_BREAK_PLAY
                display.debug("done checking for any_errors_fatal")

                display.debug("checking for max_fail_percentage")
                if iterator._play.max_fail_percentage is not None and len(
                        results) > 0:
                    percentage = iterator._play.max_fail_percentage / 100.0

                    if (len(self._tqm._failed_hosts) /
                            iterator.batch_size) > percentage:
                        for host in hosts_left:
                            # don't double-mark hosts, or the iterator will potentially
                            # fail them out of the rescue/always states
                            if host.name not in failed_hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                        self._tqm.send_callback(
                            'v2_playbook_on_no_hosts_remaining')
                        result |= self._tqm.RUN_FAILED_BREAK_PLAY
                    display.debug('(%s failed / %s total )> %s max fail' %
                                  (len(self._tqm._failed_hosts),
                                   iterator.batch_size, percentage))
                display.debug("done checking for max_fail_percentage")

                display.debug(
                    "checking to see if all hosts have failed and the running result is not ok"
                )
                if result != self._tqm.RUN_OK and len(
                        self._tqm._failed_hosts) >= len(hosts_left):
                    display.debug("^ not ok, so returning result now")
                    self._tqm.send_callback(
                        'v2_playbook_on_no_hosts_remaining')
                    return result
                display.debug("done checking to see if all hosts have failed")

            except (IOError, EOFError) as e:
                display.debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return self._tqm.RUN_UNKNOWN_ERROR

        # run the base class run() method, which executes the cleanup function
        # and runs any outstanding handlers which have been triggered

        return super(StrategyModule, self).run(iterator, play_context, result)