Exemple #1
0
    def _load_role_data(self, role_include, parent_role=None):
        self._role_name        = role_include.role
        self._role_path        = role_include.get_role_path()
        self._role_params      = role_include.get_role_params()
        self._variable_manager = role_include.get_variable_manager()
        self._loader           = role_include.get_loader()

        if parent_role:
            self.add_parent(parent_role)

        # copy over all field attributes, except for when and tags, which
        # are special cases and need to preserve pre-existing values
        for (attr_name, _) in iteritems(self._get_base_attributes()):
            if attr_name not in ('when', 'tags'):
                setattr(self, attr_name, getattr(role_include, attr_name))

        current_when = getattr(self, 'when')[:]
        current_when.extend(role_include.when)
        setattr(self, 'when', current_when)
        
        current_tags = getattr(self, 'tags')[:]
        current_tags.extend(role_include.tags)
        setattr(self, 'tags', current_tags)

        # dynamically load any plugins from the role directory
        for name, obj in get_all_plugin_loaders():
            if obj.subdir:
                plugin_path = os.path.join(self._role_path, obj.subdir)
                if os.path.isdir(plugin_path):
                    obj.add_directory(plugin_path)

        # load the role's other files, if they exist
        metadata = self._load_role_yaml('meta')
        if metadata:
            self._metadata = RoleMetadata.load(metadata, owner=self, loader=self._loader)
            self._dependencies = self._load_dependencies()
        else:
            self._metadata = RoleMetadata()

        task_data = self._load_role_yaml('tasks')
        if task_data:
            self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader)

        handler_data = self._load_role_yaml('handlers')
        if handler_data:
            self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader)

        # vars and default vars are regular dictionaries
        self._role_vars  = self._load_role_yaml('vars')
        if self._role_vars is None:
            self._role_vars = dict()
        elif not isinstance(self._role_vars, dict):
            raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)

        self._default_vars = self._load_role_yaml('defaults')
        if self._default_vars is None:
            self._default_vars = dict()
        elif not isinstance(self._default_vars, dict):
            raise AnsibleParserError("The default/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
Exemple #2
0
    def _load_role_data(self, role_include, parent_role=None):
        self._role_name        = role_include.role
        self._role_path        = role_include.get_role_path()
        self._role_params      = role_include.get_role_params()
        self._variable_manager = role_include.get_variable_manager()
        self._loader           = role_include.get_loader()

        if parent_role:
            self.add_parent(parent_role)

        current_when = getattr(self, 'when')[:]
        current_when.extend(role_include.when)
        setattr(self, 'when', current_when)
        
        current_tags = getattr(self, 'tags')[:]
        current_tags.extend(role_include.tags)
        setattr(self, 'tags', current_tags)

        # save the current base directory for the loader and set it to the current role path
        #cur_basedir = self._loader.get_basedir()
        #self._loader.set_basedir(self._role_path)

        # load the role's files, if they exist
        library = os.path.join(self._role_path, 'library')
        if os.path.isdir(library):
            module_loader.add_directory(library)

        metadata = self._load_role_yaml('meta')
        if metadata:
            self._metadata = RoleMetadata.load(metadata, owner=self, loader=self._loader)
            self._dependencies = self._load_dependencies()

        task_data = self._load_role_yaml('tasks')
        if task_data:
            self._task_blocks = load_list_of_blocks(task_data, role=self, loader=self._loader)

        handler_data = self._load_role_yaml('handlers')
        if handler_data:
            self._handler_blocks = load_list_of_blocks(handler_data, role=self, loader=self._loader)

        # vars and default vars are regular dictionaries
        self._role_vars  = self._load_role_yaml('vars')
        if not isinstance(self._role_vars, (dict, NoneType)):
            raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name, obj=ds)
        elif self._role_vars is None:
            self._role_vars = dict()

        self._default_vars = self._load_role_yaml('defaults')
        if not isinstance(self._default_vars, (dict, NoneType)):
            raise AnsibleParserError("The default/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name, obj=ds)
        elif self._default_vars is None:
            self._default_vars = dict()
Exemple #3
0
    def _load_included_file(self, included_file):
        '''
        Loads an included YAML file of tasks, applying the optional set of variables.
        '''

        data = self._loader.load_from_file(included_file._filename)
        if not isinstance(data, list):
            raise AnsibleParserError("included task files must contain a list of tasks", obj=included_file._task._ds)

        is_handler = isinstance(included_file._task, Handler)
        block_list = load_list_of_blocks(
            data,
            play=included_file._task._block._play,
            parent_block=included_file._task._block,
            task_include=included_file._task,
            role=included_file._task._role,
            use_handlers=is_handler,
            loader=self._loader
        )

        # set the vars for this task from those specified as params to the include
        for b in block_list:
            b._vars = included_file._args.copy()

        return block_list
Exemple #4
0
 def _load_post_tasks(self, attr, ds):
     '''
     Loads a list of blocks from a list which may be mixed tasks/blocks.
     Bare tasks outside of a block are given an implicit block.
     '''
     try:
         return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
     except AssertionError as e:
         raise AnsibleParserError("A malformed block was encountered while loading post_tasks", obj=self._ds, orig_exc=e)
Exemple #5
0
    def test_block_unknown_action(self):
        ds = [{'action': 'foo'}]
        mock_play = MagicMock(name='MockPlay')
        res = helpers.load_list_of_blocks(ds, mock_play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None,
                                          loader=None)

        self.assertIsInstance(res, list)
        for block in res:
            self.assertIsInstance(block, Block)
Exemple #6
0
 def _load_handlers(self, attr, ds):
     '''
     Loads a list of blocks from a list which may be mixed handlers/blocks.
     Bare handlers outside of a block are given an implicit block.
     '''
     try:
         return load_list_of_blocks(ds=ds, play=self, use_handlers=True, variable_manager=self._variable_manager, loader=self._loader)
     except AssertionError:
         raise AnsibleParserError("A malformed block was encountered.", obj=self._ds)
Exemple #7
0
    def _load_role_data(self, role_include, parent_role=None):
        self._role_name   = role_include.role
        self._role_path   = role_include.get_role_path()
        self._role_params = role_include.get_role_params()
        self._loader      = role_include.get_loader()

        if parent_role:
            self.add_parent(parent_role)

        # save the current base directory for the loader and set it to the current role path
        cur_basedir = self._loader.get_basedir()
        self._loader.set_basedir(self._role_path)

        # load the role's files, if they exist
        metadata = self._load_role_yaml('meta')
        if metadata:
            self._metadata = RoleMetadata.load(metadata, owner=self, loader=self._loader)
            self._dependencies = self._load_dependencies()

        task_data = self._load_role_yaml('tasks')
        if task_data:
            self._task_blocks = load_list_of_blocks(task_data, role=self, loader=self._loader)

        handler_data = self._load_role_yaml('handlers')
        if handler_data:
            self._handler_blocks = load_list_of_blocks(handler_data, role=self, loader=self._loader)

        # vars and default vars are regular dictionaries
        self._role_vars    = self._load_role_yaml('vars')
        if not isinstance(self._role_vars, (dict, NoneType)):
            raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name, obj=ds)

        self._default_vars = self._load_role_yaml('defaults')
        if not isinstance(self._default_vars, (dict, NoneType)):
            raise AnsibleParserError("The default/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name, obj=ds)

        # and finally restore the previous base directory
        self._loader.set_basedir(cur_basedir)
Exemple #8
0
    def _load_include(self, attr, ds):
        ''' loads the file name specified in the ds and returns a list of blocks '''

        data = self._loader.load_from_file(ds)
        if not isinstance(data, list):
            raise AnsibleParsingError("included task files must contain a list of tasks", obj=ds)

        self._task_blocks = load_list_of_blocks(
                                data,
                                parent_block=self._block,
                                task_include=self,
                                role=self._role,
                                loader=self._loader
                            )
        return ds
Exemple #9
0
    def _load_included_file(self, included_file, iterator, is_handler=False):
        '''
        Loads an included YAML file of tasks, applying the optional set of variables.
        '''

        try:
            data = self._loader.load_from_file(included_file._filename)
            if data is None:
                return []
        except AnsibleError as e:
            for host in included_file._hosts:
                tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=str(e)))
                iterator.mark_host_failed(host)
                self._tqm._failed_hosts[host.name] = True
                self._tqm._stats.increment('failures', host.name)
                self._tqm.send_callback('v2_runner_on_failed', tr)
            return []

        if not isinstance(data, list):
            raise AnsibleParserError("included task files must contain a list of tasks", obj=included_file._task._ds)

        block_list = load_list_of_blocks(
            data,
            play=included_file._task._block._play,
            parent_block=included_file._task._block,
            task_include=included_file._task,
            role=included_file._task._role,
            use_handlers=is_handler,
            loader=self._loader
        )

        # set the vars for this task from those specified as params to the include
        for b in block_list:
            # first make a copy of the including task, so that each has a unique copy to modify
            # FIXME: not sure if this is the best way to fix this, as we might be losing
            #        information in the copy. Previously we assigned the include params to
            #        the block variables directly, which caused other problems, so we may
            #        need to figure out a third option if this also presents problems.
            b._task_include = b._task_include.copy(exclude_block=True)
            # then we create a temporary set of vars to ensure the variable reference is unique
            temp_vars = b._task_include.vars.copy()
            temp_vars.update(included_file._args.copy())
            b._task_include.vars = temp_vars

        return block_list
Exemple #10
0
    def _load_included_file(self, included_file, iterator, is_handler=False):
        '''
        Loads an included YAML file of tasks, applying the optional set of variables.
        '''

        try:
            data = self._loader.load_from_file(included_file._filename)
            if data is None:
                return []
            elif not isinstance(data, list):
                raise AnsibleError("included task files must contain a list of tasks")

            block_list = load_list_of_blocks(
                data,
                play=included_file._task._block._play,
                parent_block=included_file._task._block,
                task_include=included_file._task,
                role=included_file._task._role,
                use_handlers=is_handler,
                loader=self._loader
            )

            # since we skip incrementing the stats when the task result is
            # first processed, we do so now for each host in the list
            for host in included_file._hosts:
                self._tqm._stats.increment('ok', host.name)

        except AnsibleError as e:
            # mark all of the hosts including this file as failed, send callbacks,
            # and increment the stats for this host
            for host in included_file._hosts:
                tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=str(e)))
                iterator.mark_host_failed(host)
                self._tqm._failed_hosts[host.name] = True
                self._tqm._stats.increment('failures', host.name)
                self._tqm.send_callback('v2_runner_on_failed', tr)
            return []

        # set the vars for this task from those specified as params to the include
        for b in block_list:
            # first make a copy of the including task, so that each has a unique copy to modify
            # FIXME: not sure if this is the best way to fix this, as we might be losing
            #        information in the copy. Previously we assigned the include params to
            #        the block variables directly, which caused other problems, so we may
            #        need to figure out a third option if this also presents problems.
            b._task_include = b._task_include.copy(exclude_block=True)
            # then we create a temporary set of vars to ensure the variable reference is unique
            temp_vars = b._task_include.vars.copy()
            temp_vars.update(included_file._args.copy())
            # pop tags out of the include args, if they were specified there, and assign
            # them to the include. If the include already had tags specified, we raise an
            # error so that users know not to specify them both ways
            tags = temp_vars.pop('tags', [])
            if isinstance(tags, string_types):
                tags = [ tags ]
            if len(tags) > 0:
                if len(b._task_include.tags) > 0:
                    raise AnsibleParserError("Include tasks should not specify tags in more than one way (both via args and directly on the task)", obj=included_file._task._ds)
                self._display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
                b._task_include.tags = tags
            b._task_include.vars = temp_vars

        # finally, send the callback and return the list of blocks loaded
        self._tqm.send_callback('v2_playbook_on_include', included_file)
        return block_list
Exemple #11
0
    def _load_role_data(self, role_include, parent_role=None):
        self._role_name = role_include.role
        self._role_path = role_include.get_role_path()
        self._role_params = role_include.get_role_params()
        self._variable_manager = role_include.get_variable_manager()
        self._loader = role_include.get_loader()

        if parent_role:
            self.add_parent(parent_role)

        current_when = getattr(self, 'when')[:]
        current_when.extend(role_include.when)
        setattr(self, 'when', current_when)

        current_tags = getattr(self, 'tags')[:]
        current_tags.extend(role_include.tags)
        setattr(self, 'tags', current_tags)

        # save the current base directory for the loader and set it to the current role path
        #cur_basedir = self._loader.get_basedir()
        #self._loader.set_basedir(self._role_path)

        # load the role's files, if they exist
        library = os.path.join(self._role_path, 'library')
        if os.path.isdir(library):
            module_loader.add_directory(library)

        metadata = self._load_role_yaml('meta')
        if metadata:
            self._metadata = RoleMetadata.load(metadata,
                                               owner=self,
                                               loader=self._loader)
            self._dependencies = self._load_dependencies()

        task_data = self._load_role_yaml('tasks')
        if task_data:
            self._task_blocks = load_list_of_blocks(task_data,
                                                    role=self,
                                                    loader=self._loader)

        handler_data = self._load_role_yaml('handlers')
        if handler_data:
            self._handler_blocks = load_list_of_blocks(handler_data,
                                                       role=self,
                                                       loader=self._loader)

        # vars and default vars are regular dictionaries
        self._role_vars = self._load_role_yaml('vars')
        if not isinstance(self._role_vars, (dict, NoneType)):
            raise AnsibleParserError(
                "The vars/main.yml file for role '%s' must contain a dictionary of variables"
                % self._role_name,
                obj=ds)
        elif self._role_vars is None:
            self._role_vars = dict()

        self._default_vars = self._load_role_yaml('defaults')
        if not isinstance(self._default_vars, (dict, NoneType)):
            raise AnsibleParserError(
                "The default/main.yml file for role '%s' must contain a dictionary of variables"
                % self._role_name,
                obj=ds)
        elif self._default_vars is None:
            self._default_vars = dict()
Exemple #12
0
 def _load_handlers(self, attr, ds):
     '''
     Loads a list of blocks from a list which may be mixed handlers/blocks.
     Bare handlers outside of a block are given an implicit block.
     '''
     return load_list_of_blocks(ds=ds, play=self, use_handlers=True, variable_manager=self._variable_manager, loader=self._loader)
Exemple #13
0
 def _load_post_tasks(self, attr, ds):
     '''
     Loads a list of blocks from a list which may be mixed tasks/blocks.
     Bare tasks outside of a block are given an implicit block.
     '''
     return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
    def _load_included_file(self, included_file, iterator, is_handler=False):
        '''
        Loads an included YAML file of tasks, applying the optional set of variables.
        '''

        display.debug("loading included file: %s" % included_file._filename)
        try:
            data = self._loader.load_from_file(included_file._filename)
            if data is None:
                return []
            elif not isinstance(data, list):
                raise AnsibleError("included task files must contain a list of tasks")

            block_list = load_list_of_blocks(
                data,
                play=included_file._task._block._play,
                parent_block=None,
                task_include=included_file._task,
                role=included_file._task._role,
                use_handlers=is_handler,
                loader=self._loader,
                variable_manager=self._variable_manager,
            )

            # since we skip incrementing the stats when the task result is
            # first processed, we do so now for each host in the list
            for host in included_file._hosts:
                self._tqm._stats.increment('ok', host.name)

        except AnsibleError as e:
            # mark all of the hosts including this file as failed, send callbacks,
            # and increment the stats for this host
            for host in included_file._hosts:
                tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=to_unicode(e)))
                iterator.mark_host_failed(host)
                self._tqm._failed_hosts[host.name] = True
                self._tqm._stats.increment('failures', host.name)
                self._tqm.send_callback('v2_runner_on_failed', tr)
            return []

        # set the vars for this task from those specified as params to the include
        for b in block_list:
            # first make a copy of the including task, so that each has a unique copy to modify
            b._task_include = b._task_include.copy()
            # then we create a temporary set of vars to ensure the variable reference is unique
            temp_vars = b._task_include.vars.copy()
            temp_vars.update(included_file._args.copy())
            # pop tags out of the include args, if they were specified there, and assign
            # them to the include. If the include already had tags specified, we raise an
            # error so that users know not to specify them both ways
            tags = temp_vars.pop('tags', [])
            if isinstance(tags, string_types):
                tags = tags.split(',')
            if len(tags) > 0:
                if len(b._task_include.tags) > 0:
                    raise AnsibleParserError("Include tasks should not specify tags in more than one way (both via args and directly on the task). Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
                            obj=included_file._task._ds)
                display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
                b._task_include.tags = tags
            b._task_include.vars = temp_vars

        # finally, send the callback and return the list of blocks loaded
        self._tqm.send_callback('v2_playbook_on_include', included_file)
        display.debug("done processing included file")
        return block_list
Exemple #15
0
    def _load_role_data(self, role_include, parent_role=None):
        self._role_name = role_include.role
        self._role_path = role_include.get_role_path()
        self._role_collection = role_include._role_collection
        self._role_params = role_include.get_role_params()
        self._variable_manager = role_include.get_variable_manager()
        self._loader = role_include.get_loader()

        if parent_role:
            self.add_parent(parent_role)

        # copy over all field attributes from the RoleInclude
        # update self._attributes directly, to avoid squashing
        for (attr_name, dump) in iteritems(self._valid_attrs):
            if attr_name in ('when', 'tags'):
                self._attributes[attr_name] = self._extend_value(
                    self._attributes[attr_name],
                    role_include._attributes[attr_name],
                )
            else:
                self._attributes[attr_name] = role_include._attributes[
                    attr_name]

        # vars and default vars are regular dictionaries
        self._role_vars = self._load_role_yaml(
            'vars', main=self._from_files.get('vars'), allow_dir=True)
        if self._role_vars is None:
            self._role_vars = {}
        elif not isinstance(self._role_vars, Mapping):
            raise AnsibleParserError(
                "The vars/main.yml file for role '%s' must contain a dictionary of variables"
                % self._role_name)

        self._default_vars = self._load_role_yaml(
            'defaults', main=self._from_files.get('defaults'), allow_dir=True)
        if self._default_vars is None:
            self._default_vars = {}
        elif not isinstance(self._default_vars, Mapping):
            raise AnsibleParserError(
                "The defaults/main.yml file for role '%s' must contain a dictionary of variables"
                % self._role_name)

        # load the role's other files, if they exist
        metadata = self._load_role_yaml('meta')
        if metadata:
            self._metadata = RoleMetadata.load(
                metadata,
                owner=self,
                variable_manager=self._variable_manager,
                loader=self._loader)
            self._dependencies = self._load_dependencies()
        else:
            self._metadata = RoleMetadata()

        # reset collections list; roles do not inherit collections from parents, just use the defaults
        # FUTURE: use a private config default for this so we can allow it to be overridden later
        self.collections = []

        # configure plugin/collection loading; either prepend the current role's collection or configure legacy plugin loading
        # FIXME: need exception for explicit ansible.legacy?
        if self._role_collection:  # this is a collection-hosted role
            self.collections.insert(0, self._role_collection)
        else:  # this is a legacy role, but set the default collection if there is one
            default_collection = AnsibleCollectionConfig.default_collection
            if default_collection:
                self.collections.insert(0, default_collection)
            # legacy role, ensure all plugin dirs under the role are added to plugin search path
            add_all_plugin_dirs(self._role_path)

        # collections can be specified in metadata for legacy or collection-hosted roles
        if self._metadata.collections:
            self.collections.extend((c for c in self._metadata.collections
                                     if c not in self.collections))

        # if any collections were specified, ensure that core or legacy synthetic collections are always included
        if self.collections:
            # default append collection is core for collection-hosted roles, legacy for others
            default_append_collection = 'ansible.builtin' if self._role_collection else 'ansible.legacy'
            if 'ansible.builtin' not in self.collections and 'ansible.legacy' not in self.collections:
                self.collections.append(default_append_collection)

        task_data = self._load_role_yaml('tasks',
                                         main=self._from_files.get('tasks'))

        if self._should_validate:
            role_argspecs = self._get_role_argspecs()
            task_data = self._prepend_validation_task(task_data, role_argspecs)

        if task_data:
            try:
                self._task_blocks = load_list_of_blocks(
                    task_data,
                    play=self._play,
                    role=self,
                    loader=self._loader,
                    variable_manager=self._variable_manager)
            except AssertionError as e:
                raise AnsibleParserError(
                    "The tasks/main.yml file for role '%s' must contain a list of tasks"
                    % self._role_name,
                    obj=task_data,
                    orig_exc=e)

        handler_data = self._load_role_yaml(
            'handlers', main=self._from_files.get('handlers'))
        if handler_data:
            try:
                self._handler_blocks = load_list_of_blocks(
                    handler_data,
                    play=self._play,
                    role=self,
                    use_handlers=True,
                    loader=self._loader,
                    variable_manager=self._variable_manager)
            except AssertionError as e:
                raise AnsibleParserError(
                    "The handlers/main.yml file for role '%s' must contain a list of tasks"
                    % self._role_name,
                    obj=handler_data,
                    orig_exc=e)
Exemple #16
0
    def _include_tasks_in_blocks(self,
                                 current_play,
                                 graph,
                                 parent_node_name,
                                 parent_node_id,
                                 block,
                                 color,
                                 current_counter,
                                 play_vars=None,
                                 node_name_prefix=""):
        """
        Recursively read all the tasks of the block and add it to the graph
        FIXME: This function needs some refactoring. Thinking of a BlockGrapher to handle this
        :param current_play:
        :type current_play: ansible.playbook.play.Play
        :param graph:
        :type graph:
        :param parent_node_name:
        :type parent_node_name: str
        :param parent_node_id:
        :type parent_node_id: str
        :param block:
        :type block: Union[Block,TaskInclude]
        :param color:
        :type color: str
        :param current_counter:
        :type current_counter: int
        :param play_vars:
        :type play_vars: dict
        :param node_name_prefix:
        :type node_name_prefix: str
        :return:
        :rtype:
        """

        loop_counter = current_counter
        # loop through the tasks
        for counter, task_or_block in enumerate(block.block, 1):
            if isinstance(task_or_block, Block):
                loop_counter = self._include_tasks_in_blocks(
                    current_play=current_play,
                    graph=graph,
                    parent_node_name=parent_node_name,
                    parent_node_id=parent_node_id,
                    block=task_or_block,
                    color=color,
                    current_counter=loop_counter,
                    play_vars=play_vars,
                    node_name_prefix=node_name_prefix)
            elif isinstance(
                    task_or_block, TaskInclude
            ):  # include, include_tasks, include_role are dynamic
                # So we need to process it explicitly because Ansible does it during th execution of the playbook

                task_vars = self.variable_manager.get_vars(play=current_play,
                                                           task=task_or_block)

                if isinstance(task_or_block, IncludeRole):

                    self.display.v(
                        "An 'include_role' found. Including tasks from '{}'".
                        format(task_or_block.args["name"]))
                    # here we have an include_role. The class IncludeRole is a subclass of TaskInclude.
                    # We do this because the management of an include_role is different.
                    # See :func:`~ansible.playbook.included_file.IncludedFile.process_include_results` from line 155
                    my_blocks, _ = task_or_block.get_block_list(
                        play=current_play,
                        loader=self.data_loader,
                        variable_manager=self.variable_manager)
                else:
                    self.display.v(
                        "An 'include_tasks' found. Including tasks from '{}'".
                        format(task_or_block.get_name()))
                    templar = Templar(loader=self.data_loader,
                                      variables=task_vars)
                    try:
                        include_file = handle_include_path(
                            original_task=task_or_block,
                            loader=self.data_loader,
                            templar=templar)
                    except AnsibleUndefinedVariable as e:
                        # TODO: mark this task with some special shape or color
                        self.display.warning(
                            "Unable to translate the include task '{}' due to an undefined variable: {}. "
                            "Some variables are available only during the real execution."
                            .format(task_or_block.get_name(), str(e)))
                        loop_counter += 1
                        self._include_task(task_or_block, loop_counter,
                                           task_vars, graph, node_name_prefix,
                                           color, parent_node_id,
                                           parent_node_name)
                        continue

                    data = self.data_loader.load_from_file(include_file)
                    if data is None:
                        self.display.warning(
                            "file %s is empty and had no tasks to include" %
                            include_file)
                        continue
                    elif not isinstance(data, list):
                        raise AnsibleParserError(
                            "included task files must contain a list of tasks",
                            obj=data)

                    # get the blocks from the include_tasks
                    my_blocks = load_list_of_blocks(
                        data,
                        play=current_play,
                        variable_manager=self.variable_manager,
                        role=task_or_block._role,
                        loader=self.data_loader,
                        parent_block=task_or_block)

                for b in my_blocks:  # loop through the blocks inside the included tasks or role
                    loop_counter = self._include_tasks_in_blocks(
                        current_play=current_play,
                        graph=graph,
                        parent_node_name=parent_node_name,
                        parent_node_id=parent_node_id,
                        block=b,
                        color=color,
                        current_counter=loop_counter,
                        play_vars=task_vars,
                        node_name_prefix=node_name_prefix)
            else:
                # check if this task comes from a role and we dont want to include role's task
                if has_role_parent(
                        task_or_block) and not self.options.include_role_tasks:
                    # skip role's task
                    self.display.vv(
                        "The task '{}' has a role as parent and include_role_tasks is false. "
                        "It will be skipped.".format(task_or_block.get_name()))
                    continue

                self._include_task(task_or_block=task_or_block,
                                   loop_counter=loop_counter + 1,
                                   play_vars=play_vars,
                                   graph=graph,
                                   node_name_prefix=node_name_prefix,
                                   color=color,
                                   parent_node_id=parent_node_id,
                                   parent_node_name=parent_node_name)

                loop_counter += 1

        return loop_counter
Exemple #17
0
 def _load_handlers(self, attr, ds):
     '''
     Loads a list of blocks from a list which may be mixed handlers/blocks.
     Bare handlers outside of a block are given an implicit block.
     '''
     return load_list_of_blocks(ds, loader=self._loader)
Exemple #18
0
    def _load_role_data(self, role_include, parent_role=None):
        self._role_name = role_include.role
        self._role_path = role_include.get_role_path()
        self._role_params = role_include.get_role_params()
        self._variable_manager = role_include.get_variable_manager()
        self._loader = role_include.get_loader()

        push_basedir(self._role_path)

        if parent_role:
            self.add_parent(parent_role)

        # copy over all field attributes, except for when and tags, which
        # are special cases and need to preserve pre-existing values
        for (attr_name, _) in iteritems(self._get_base_attributes()):
            if attr_name not in ('when', 'tags'):
                setattr(self, attr_name, getattr(role_include, attr_name))

        current_when = getattr(self, 'when')[:]
        current_when.extend(role_include.when)
        setattr(self, 'when', current_when)

        current_tags = getattr(self, 'tags')[:]
        current_tags.extend(role_include.tags)
        setattr(self, 'tags', current_tags)

        # dynamically load any plugins from the role directory
        for name, obj in get_all_plugin_loaders():
            if obj.subdir:
                plugin_path = os.path.join(self._role_path, obj.subdir)
                if os.path.isdir(plugin_path):
                    obj.add_directory(plugin_path)

        # load the role's other files, if they exist
        metadata = self._load_role_yaml('meta')
        if metadata:
            self._metadata = RoleMetadata.load(metadata,
                                               owner=self,
                                               loader=self._loader)
            self._dependencies = self._load_dependencies()

        task_data = self._load_role_yaml('tasks')
        if task_data:
            self._task_blocks = load_list_of_blocks(task_data,
                                                    play=None,
                                                    role=self,
                                                    loader=self._loader)

        handler_data = self._load_role_yaml('handlers')
        if handler_data:
            self._handler_blocks = load_list_of_blocks(handler_data,
                                                       play=None,
                                                       role=self,
                                                       use_handlers=True,
                                                       loader=self._loader)

        # vars and default vars are regular dictionaries
        self._role_vars = self._load_role_yaml('vars')
        if not isinstance(self._role_vars, (dict, NoneType)):
            raise AnsibleParserError(
                "The vars/main.yml file for role '%s' must contain a dictionary of variables"
                % self._role_name)
        elif self._role_vars is None:
            self._role_vars = dict()

        self._default_vars = self._load_role_yaml('defaults')
        if not isinstance(self._default_vars, (dict, NoneType)):
            raise AnsibleParserError(
                "The default/main.yml file for role '%s' must contain a dictionary of variables"
                % self._role_name)
        elif self._default_vars is None:
            self._default_vars = dict()
    def _load_included_file(self, included_file, iterator, is_handler=False):
        '''
        Loads an included YAML file of tasks, applying the optional set of variables.
        '''

        display.debug("loading included file: %s" % included_file._filename)
        try:
            data = self._loader.load_from_file(included_file._filename)
            if data is None:
                return []
            elif not isinstance(data, list):
                raise AnsibleError(
                    "included task files must contain a list of tasks")

            ti_copy = included_file._task.copy()
            temp_vars = ti_copy.vars.copy()
            temp_vars.update(included_file._args)
            # pop tags out of the include args, if they were specified there, and assign
            # them to the include. If the include already had tags specified, we raise an
            # error so that users know not to specify them both ways
            tags = included_file._task.vars.pop('tags', [])
            if isinstance(tags, string_types):
                tags = tags.split(',')
            if len(tags) > 0:
                if len(included_file._task.tags) > 0:
                    raise AnsibleParserError(
                        "Include tasks should not specify tags in more than one way (both via args and directly on the task). Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
                        obj=included_file._task._ds)
                display.deprecated(
                    "You should not specify tags in the include parameters. All tags should be specified using the task-level option"
                )
                included_file._task.tags = tags

            ti_copy.vars = temp_vars

            block_list = load_list_of_blocks(
                data,
                play=iterator._play,
                parent_block=None,
                task_include=ti_copy,
                role=included_file._task._role,
                use_handlers=is_handler,
                loader=self._loader,
                variable_manager=self._variable_manager,
            )

            # since we skip incrementing the stats when the task result is
            # first processed, we do so now for each host in the list
            for host in included_file._hosts:
                self._tqm._stats.increment('ok', host.name)

        except AnsibleError as e:
            # mark all of the hosts including this file as failed, send callbacks,
            # and increment the stats for this host
            for host in included_file._hosts:
                tr = TaskResult(host=host,
                                task=included_file._task,
                                return_data=dict(failed=True,
                                                 reason=to_text(e)))
                iterator.mark_host_failed(host)
                self._tqm._failed_hosts[host.name] = True
                self._tqm._stats.increment('failures', host.name)
                self._tqm.send_callback('v2_runner_on_failed', tr)
            return []

        # finally, send the callback and return the list of blocks loaded
        self._tqm.send_callback('v2_playbook_on_include', included_file)
        display.debug("done processing included file")
        return block_list
Exemple #20
0
 def _load_handlers(self, attr, ds):
     '''
     Loads a list of blocks from a list which may be mixed handlers/blocks.
     Bare handlers outside of a block are given an implicit block.
     '''
     return load_list_of_blocks(ds, loader=self._loader)
Exemple #21
0
class StrategyBase:
    '''
    This is the base class for strategy plugins, which contains some common
    code useful to all strategies like running handlers, cleanup actions, etc.
    '''
    def __init__(self, tqm):
        self._tqm = tqm
        self._inventory = tqm.get_inventory()
        self._workers = tqm.get_workers()
        self._notified_handlers = tqm.get_notified_handlers()
        self._variable_manager = tqm.get_variable_manager()
        self._loader = tqm.get_loader()
        self._final_q = tqm._final_q
        self._step = getattr(tqm._options, 'step', False)
        self._diff = getattr(tqm._options, 'diff', False)
        self._display = display

        # internal counters
        self._pending_results = 0
        self._cur_worker = 0

        # this dictionary is used to keep track of hosts that have
        # outstanding tasks still in queue
        self._blocked_hosts = dict()

    def run(self, iterator, play_context, result=True):
        # save the failed/unreachable hosts, as the run_handlers()
        # method will clear that information during its execution
        failed_hosts = self._tqm._failed_hosts.keys()
        unreachable_hosts = self._tqm._unreachable_hosts.keys()

        self._display.debug("running handlers")
        result &= self.run_handlers(iterator, play_context)

        # now update with the hosts (if any) that failed or were
        # unreachable during the handler execution phase
        failed_hosts = set(failed_hosts).union(self._tqm._failed_hosts.keys())
        unreachable_hosts = set(unreachable_hosts).union(
            self._tqm._unreachable_hosts.keys())

        # send the stats callback
        self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats)

        if len(unreachable_hosts) > 0:
            return 3
        elif len(failed_hosts) > 0:
            return 2
        elif not result:
            return 1
        else:
            return 0

    def get_hosts_remaining(self, play):
        return [
            host for host in self._inventory.get_hosts(play.hosts)
            if host.name not in self._tqm._failed_hosts
            and host.name not in self._tqm._unreachable_hosts
        ]

    def get_failed_hosts(self, play):
        return [
            host for host in self._inventory.get_hosts(play.hosts)
            if host.name in self._tqm._failed_hosts
        ]

    def add_tqm_variables(self, vars, play):
        '''
        Base class method to add extra variables/information to the list of task
        vars sent through the executor engine regarding the task queue manager state.
        '''

        new_vars = vars.copy()
        new_vars['ansible_current_hosts'] = self.get_hosts_remaining(play)
        new_vars['ansible_failed_hosts'] = self.get_failed_hosts(play)
        return new_vars

    def _queue_task(self, host, task, task_vars, play_context):
        ''' handles queueing the task up to be sent to a worker '''

        self._display.debug("entering _queue_task() for %s/%s" % (host, task))

        # and then queue the new task
        self._display.debug("%s - putting task (%s) in queue" % (host, task))
        try:
            self._display.debug("worker is %d (out of %d available)" %
                                (self._cur_worker + 1, len(self._workers)))

            (worker_prc, main_q, rslt_q) = self._workers[self._cur_worker]
            self._cur_worker += 1
            if self._cur_worker >= len(self._workers):
                self._cur_worker = 0

            # create a dummy object with plugin loaders set as an easier
            # way to share them with the forked processes
            shared_loader_obj = SharedPluginLoaderObj()

            main_q.put((host, task, self._loader.get_basedir(), task_vars,
                        play_context, shared_loader_obj),
                       block=False)
            self._pending_results += 1
        except (EOFError, IOError, AssertionError) as e:
            # most likely an abort
            self._display.debug("got an error while queuing: %s" % e)
            return
        self._display.debug("exiting _queue_task() for %s/%s" % (host, task))

    def _process_pending_results(self, iterator):
        '''
        Reads results off the final queue and takes appropriate action
        based on the result (executing callbacks, updating state, etc.).
        '''

        ret_results = []

        while not self._final_q.empty() and not self._tqm._terminated:
            try:
                result = self._final_q.get(block=False)
                self._display.debug("got result from result worker: %s" %
                                    ([unicode(x) for x in result], ))

                # all host status messages contain 2 entries: (msg, task_result)
                if result[0] in ('host_task_ok', 'host_task_failed',
                                 'host_task_skipped', 'host_unreachable'):
                    task_result = result[1]
                    host = task_result._host
                    task = task_result._task
                    if result[0] == 'host_task_failed' or task_result.is_failed(
                    ):
                        if not task.ignore_errors:
                            self._display.debug("marking %s as failed" %
                                                host.name)
                            iterator.mark_host_failed(host)
                            self._tqm._failed_hosts[host.name] = True
                            self._tqm._stats.increment('failures', host.name)
                        else:
                            self._tqm._stats.increment('ok', host.name)
                        self._tqm.send_callback(
                            'v2_runner_on_failed',
                            task_result,
                            ignore_errors=task.ignore_errors)
                    elif result[0] == 'host_unreachable':
                        self._tqm._unreachable_hosts[host.name] = True
                        self._tqm._stats.increment('dark', host.name)
                        self._tqm.send_callback('v2_runner_on_unreachable',
                                                task_result)
                    elif result[0] == 'host_task_skipped':
                        self._tqm._stats.increment('skipped', host.name)
                        self._tqm.send_callback('v2_runner_on_skipped',
                                                task_result)
                    elif result[0] == 'host_task_ok':
                        self._tqm._stats.increment('ok', host.name)
                        if 'changed' in task_result._result and task_result._result[
                                'changed']:
                            self._tqm._stats.increment('changed', host.name)
                        self._tqm.send_callback('v2_runner_on_ok', task_result)

                        if self._diff and 'diff' in task_result._result:
                            self._tqm.send_callback('v2_on_file_diff',
                                                    task_result)

                    self._pending_results -= 1
                    if host.name in self._blocked_hosts:
                        del self._blocked_hosts[host.name]

                    # If this is a role task, mark the parent role as being run (if
                    # the task was ok or failed, but not skipped or unreachable)
                    if task_result._task._role is not None and result[0] in (
                            'host_task_ok', 'host_task_failed'):
                        # lookup the role in the ROLE_CACHE to make sure we're dealing
                        # with the correct object and mark it as executed
                        for (entry, role_obj) in iterator._play.ROLE_CACHE[
                                task_result._task._role._role_name].iteritems(
                                ):
                            if role_obj._uuid == task_result._task._role._uuid:
                                role_obj._had_task_run[host.name] = True

                    ret_results.append(task_result)

                elif result[0] == 'add_host':
                    task_result = result[1]
                    new_host_info = task_result.get('add_host', dict())

                    self._add_host(new_host_info)

                elif result[0] == 'add_group':
                    task = result[1]
                    self._add_group(task, iterator)

                elif result[0] == 'notify_handler':
                    task_result = result[1]
                    handler_name = result[2]

                    original_task = iterator.get_original_task(
                        task_result._host, task_result._task)
                    if handler_name not in self._notified_handlers:
                        self._notified_handlers[handler_name] = []

                    if task_result._host not in self._notified_handlers[
                            handler_name]:
                        self._notified_handlers[handler_name].append(
                            task_result._host)

                elif result[0] == 'register_host_var':
                    # essentially the same as 'set_host_var' below, however we
                    # never follow the delegate_to value for registered vars
                    host = result[1]
                    var_name = result[2]
                    var_value = result[3]
                    self._variable_manager.set_host_variable(
                        host, var_name, var_value)

                elif result[0] in ('set_host_var', 'set_host_facts'):
                    host = result[1]
                    task = result[2]
                    item = result[3]

                    if task.delegate_to is not None:
                        task_vars = self._variable_manager.get_vars(
                            loader=self._loader,
                            play=iterator._play,
                            host=host,
                            task=task)
                        task_vars = self.add_tqm_variables(task_vars,
                                                           play=iterator._play)
                        if item is not None:
                            task_vars['item'] = item
                        templar = Templar(loader=self._loader,
                                          variables=task_vars)
                        host_name = templar.template(task.delegate_to)
                        target_host = self._inventory.get_host(host_name)
                        if target_host is None:
                            target_host = Host(name=host_name)
                    else:
                        target_host = host

                    if result[0] == 'set_host_var':
                        var_name = result[4]
                        var_value = result[5]
                        self._variable_manager.set_host_variable(
                            target_host, var_name, var_value)
                    elif result[0] == 'set_host_facts':
                        facts = result[4]
                        self._variable_manager.set_host_facts(
                            target_host, facts)

                else:
                    raise AnsibleError("unknown result message received: %s" %
                                       result[0])
            except Queue.Empty:
                pass

        return ret_results

    def _wait_on_pending_results(self, iterator):
        '''
        Wait for the shared counter to drop to zero, using a short sleep
        between checks to ensure we don't spin lock
        '''

        ret_results = []

        self._display.debug("waiting for pending results...")
        while self._pending_results > 0 and not self._tqm._terminated:
            results = self._process_pending_results(iterator)
            ret_results.extend(results)
            time.sleep(0.01)
        self._display.debug("no more pending results, returning what we have")

        return ret_results

    def _add_host(self, host_info):
        '''
        Helper function to add a new host to inventory based on a task result.
        '''

        host_name = host_info.get('host_name')

        # Check if host in cache, add if not
        if host_name in self._inventory._hosts_cache:
            new_host = self._inventory._hosts_cache[host_name]
        else:
            new_host = Host(name=host_name)
            self._inventory._hosts_cache[host_name] = new_host

            allgroup = self._inventory.get_group('all')
            allgroup.add_host(new_host)

        # Set/update the vars for this host
        # FIXME: probably should have a set vars method for the host?
        new_vars = host_info.get('host_vars', dict())
        new_host.vars.update(new_vars)

        new_groups = host_info.get('groups', [])
        for group_name in new_groups:
            if not self._inventory.get_group(group_name):
                new_group = Group(group_name)
                self._inventory.add_group(new_group)
                new_group.vars = self._inventory.get_group_variables(
                    group_name)
            else:
                new_group = self._inventory.get_group(group_name)

            new_group.add_host(new_host)

            # add this host to the group cache
            if self._inventory._groups_list is not None:
                if group_name in self._inventory._groups_list:
                    if new_host.name not in self._inventory._groups_list[
                            group_name]:
                        self._inventory._groups_list[group_name].append(
                            new_host.name)

        # clear pattern caching completely since it's unpredictable what
        # patterns may have referenced the group
        # FIXME: is this still required?
        self._inventory.clear_pattern_cache()

    def _add_group(self, task, iterator):
        '''
        Helper function to add a group (if it does not exist), and to assign the
        specified host to that group.
        '''

        # the host here is from the executor side, which means it was a
        # serialized/cloned copy and we'll need to look up the proper
        # host object from the master inventory
        groups = {}
        changed = False

        for host in self._inventory.get_hosts():
            original_task = iterator.get_original_task(host, task)
            all_vars = self._variable_manager.get_vars(loader=self._loader,
                                                       play=iterator._play,
                                                       host=host,
                                                       task=original_task)
            templar = Templar(loader=self._loader, variables=all_vars)
            group_name = templar.template(original_task.args.get('key'))
            if task.evaluate_conditional(templar=templar, all_vars=all_vars):
                if group_name not in groups:
                    groups[group_name] = []
                groups[group_name].append(host)

        for group_name, hosts in groups.iteritems():
            new_group = self._inventory.get_group(group_name)
            if not new_group:
                # create the new group and add it to inventory
                new_group = Group(name=group_name)
                self._inventory.add_group(new_group)

                # and add the group to the proper hierarchy
                allgroup = self._inventory.get_group('all')
                allgroup.add_child_group(new_group)
                changed = True
            for host in hosts:
                if group_name not in host.get_groups():
                    new_group.add_host(host)
                    changed = True

        return changed

    def _load_included_file(self, included_file, iterator, is_handler=False):
        '''
        Loads an included YAML file of tasks, applying the optional set of variables.
        '''

        try:
            data = self._loader.load_from_file(included_file._filename)
            if data is None:
                return []
        except AnsibleError, e:
            for host in included_file._hosts:
                tr = TaskResult(host=host,
                                task=included_file._task,
                                return_data=dict(failed=True, reason=str(e)))
                iterator.mark_host_failed(host)
                self._tqm._failed_hosts[host.name] = True
                self._tqm._stats.increment('failures', host.name)
                self._tqm.send_callback('v2_runner_on_failed', tr)
            return []

        if not isinstance(data, list):
            raise AnsibleParserError(
                "included task files must contain a list of tasks",
                obj=included_file._task._ds)

        block_list = load_list_of_blocks(
            data,
            play=included_file._task._block._play,
            parent_block=included_file._task._block,
            task_include=included_file._task,
            role=included_file._task._role,
            use_handlers=is_handler,
            loader=self._loader)

        # set the vars for this task from those specified as params to the include
        for b in block_list:
            temp_vars = b._task_include.vars.copy()
            temp_vars.update(included_file._args.copy())
            b._task_include.vars = temp_vars

        return block_list
Exemple #22
0
    def _load_role_data(self, role_include, parent_role=None):
        self._role_name = role_include.role
        self._role_path = role_include.get_role_path()
        self._role_params = role_include.get_role_params()
        self._variable_manager = role_include.get_variable_manager()
        self._loader = role_include.get_loader()

        if parent_role:
            self.add_parent(parent_role)

        # copy over all field attributes from the RoleInclude
        # update self._attributes directly, to avoid squashing
        for (attr_name, _) in iteritems(self._valid_attrs):
            if attr_name in ('when', 'tags'):
                self._attributes[attr_name] = self._extend_value(
                    self._attributes[attr_name],
                    role_include._attributes[attr_name],
                )
            else:
                self._attributes[attr_name] = role_include._attributes[
                    attr_name]

        # ensure all plugins dirs for this role are added to plugin search path
        add_all_plugin_dirs(self._role_path)

        # vars and default vars are regular dictionaries
        self._role_vars = self._load_role_yaml(
            'vars', main=self._from_files.get('vars'), allow_dir=True)
        if self._role_vars is None:
            self._role_vars = dict()
        elif not isinstance(self._role_vars, dict):
            raise AnsibleParserError(
                "The vars/main.yml file for role '%s' must contain a dictionary of variables"
                % self._role_name)

        self._default_vars = self._load_role_yaml(
            'defaults', main=self._from_files.get('defaults'), allow_dir=True)
        if self._default_vars is None:
            self._default_vars = dict()
        elif not isinstance(self._default_vars, dict):
            raise AnsibleParserError(
                "The defaults/main.yml file for role '%s' must contain a dictionary of variables"
                % self._role_name)

        # load the role's other files, if they exist
        metadata = self._load_role_yaml('meta')
        if metadata:
            self._metadata = RoleMetadata.load(
                metadata,
                owner=self,
                variable_manager=self._variable_manager,
                loader=self._loader)
            self._dependencies = self._load_dependencies()
        else:
            self._metadata = RoleMetadata()

        task_data = self._load_role_yaml('tasks',
                                         main=self._from_files.get('tasks'))
        if task_data:
            try:
                self._task_blocks = load_list_of_blocks(
                    task_data,
                    play=self._play,
                    role=self,
                    loader=self._loader,
                    variable_manager=self._variable_manager)
            except AssertionError as e:
                raise AnsibleParserError(
                    "The tasks/main.yml file for role '%s' must contain a list of tasks"
                    % self._role_name,
                    obj=task_data,
                    orig_exc=e)

        handler_data = self._load_role_yaml(
            'handlers', main=self._from_files.get('handlers'))
        if handler_data:
            try:
                self._handler_blocks = load_list_of_blocks(
                    handler_data,
                    play=self._play,
                    role=self,
                    use_handlers=True,
                    loader=self._loader,
                    variable_manager=self._variable_manager)
            except AssertionError as e:
                raise AnsibleParserError(
                    "The handlers/main.yml file for role '%s' must contain a list of tasks"
                    % self._role_name,
                    obj=handler_data,
                    orig_exc=e)