Пример #1
0
    def _create_noop_block_from(self, original_block, parent):
        noop_block = Block(parent_block=parent)
        noop_block.block = self._replace_with_noop(original_block.block)
        noop_block.always = self._replace_with_noop(original_block.always)
        noop_block.rescue = self._replace_with_noop(original_block.rescue)

        return noop_block
Пример #2
0
    def deserialize(self, data):

        # import is here to avoid import loops
        #from ansible.playbook.task_include import TaskInclude

        block_data = data.get('block')

        if block_data:
            b = Block()
            b.deserialize(block_data)
            self._block = b
            del data['block']

        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r
            del data['role']

        ti_data = data.get('task_include')
        if ti_data:
            #ti = TaskInclude()
            ti = Task()
            ti.deserialize(ti_data)
            self._task_include = ti
            del data['task_include']

        super(Task, self).deserialize(data)
Пример #3
0
    def deserialize(self, data):

        # import is here to avoid import loops
        from ansible.playbook.task_include import TaskInclude
        from ansible.playbook.handler_task_include import HandlerTaskInclude

        parent_data = data.get('parent', None)
        if parent_data:
            parent_type = data.get('parent_type')
            if parent_type == 'Block':
                p = Block()
            elif parent_type == 'TaskInclude':
                p = TaskInclude()
            elif parent_type == 'HandlerTaskInclude':
                p = HandlerTaskInclude()
            p.deserialize(parent_data)
            self._parent = p
            del data['parent']

        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r
            del data['role']

        super(Task, self).deserialize(data)
Пример #4
0
 def test_block__load_list_of_tasks(self):
     task = dict(action='test')
     b = Block()
     self.assertEqual(b._load_list_of_tasks([]), [])
     res = b._load_list_of_tasks([task])
     self.assertEqual(len(res), 1)
     assert isinstance(res[0], Task)
     res = b._load_list_of_tasks([task,task,task])
     self.assertEqual(len(res), 3)
Пример #5
0
    def __init__(self, inventory, play, play_context, variable_manager, all_vars, start_at_done=False):
        self._play = play
        self._blocks = []

        setup_block = Block(play=self._play)
        setup_task = Task(block=setup_block)
        setup_task.action = 'setup'
        setup_task.tags   = ['always']
        setup_task.args   = {}
        setup_task.set_loader(self._play._loader)
        setup_block.block = [setup_task]

        setup_block = setup_block.filter_tagged_tasks(play_context, all_vars)
        self._blocks.append(setup_block)

        for block in self._play.compile():
            new_block = block.filter_tagged_tasks(play_context, all_vars)
            if new_block.has_tasks():
                self._blocks.append(new_block)

        self._host_states = {}
        start_at_matched = False
        for host in inventory.get_hosts(self._play.hosts):
            self._host_states[host.name] = HostState(blocks=self._blocks)
            # if the host's name is in the variable manager's fact cache, then set
            # its _gathered_facts flag to true for smart gathering tests later
            if host.name in variable_manager._fact_cache:
                host._gathered_facts = True
            # if we're looking to start at a specific task, iterate through
            # the tasks for this host until we find the specified task
            if play_context.start_at_task is not None and not start_at_done:
                while True:
                    (s, task) = self.get_next_task_for_host(host, peek=True)
                    if s.run_state == self.ITERATING_COMPLETE:
                        break
                    if task.name == play_context.start_at_task or fnmatch.fnmatch(task.name, play_context.start_at_task) or \
                       task.get_name() == play_context.start_at_task or fnmatch.fnmatch(task.get_name(), play_context.start_at_task):
                        start_at_matched = True
                        break
                    else:
                        self.get_next_task_for_host(host)

                # finally, reset the host's state to ITERATING_SETUP
                if start_at_matched:
                    self._host_states[host.name].did_start_at_task = True
                    self._host_states[host.name].run_state = self.ITERATING_SETUP

        if start_at_matched:
            # we have our match, so clear the start_at_task field on the
            # play context to flag that we've started at a task (and future
            # plays won't try to advance)
            play_context.start_at_task = None

        # Extend the play handlers list to include the handlers defined in roles
        self._play.handlers.extend(play.compile_roles_handlers())
Пример #6
0
def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
    '''
    Given a list of mixed task/block data (parsed from YAML),
    return a list of Block() objects, where implicit blocks
    are created for each bare Task.
    '''
 
    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block

    assert ds is None or isinstance(ds, list), 'block has bad type: %s' % type(ds)

    block_list = []
    if ds:
        for block in ds:
            b = Block.load(
                block,
                play=play,
                parent_block=parent_block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader
            )
            block_list.append(b)

    return block_list
Пример #7
0
    def compile(self):
        '''
        Compiles and returns the task list for this play, compiled from the
        roles (which are themselves compiled recursively) and/or the list of
        tasks specified in the play.
        '''

        # create a block containing a single flush handlers meta
        # task, so we can be sure to run handlers at certain points
        # of the playbook execution
        flush_block = Block.load(
            data={'meta': 'flush_handlers'},
            play=self,
            variable_manager=self._variable_manager,
            loader=self._loader
        )

        block_list = []

        block_list.extend(self.pre_tasks)
        block_list.append(flush_block)
        block_list.extend(self._compile_roles())
        block_list.extend(self.tasks)
        block_list.append(flush_block)
        block_list.extend(self.post_tasks)
        block_list.append(flush_block)

        return block_list
Пример #8
0
def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
    '''
    Given a list of mixed task/block data (parsed from YAML),
    return a list of Block() objects, where implicit blocks
    are created for each bare Task.
    '''
 
    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block

    if not isinstance(ds, (list, type(None))):
        raise AnsibleParserError('block has bad type: "%s". Expecting "list"' % type(ds).__name__, obj=ds)

    block_list = []
    if ds:
        for block in ds:
            b = Block.load(
                block,
                play=play,
                parent_block=parent_block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader
            )
            block_list.append(b)

    return block_list
Пример #9
0
def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
    '''
    Given a list of mixed task/block data (parsed from YAML),
    return a list of Block() objects, where implicit blocks
    are created for each bare Task.
    '''
 
    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block

    if not isinstance(ds, (list, type(None))):
        raise AnsibleParserError('block has bad type: "%s". Expecting "list"' % type(ds).__name__, obj=ds)

    block_list = []
    if ds:
        for block in ds:
            b = Block.load(
                block,
                play=play,
                parent_block=parent_block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader
            )
            # Implicit blocks are created by bare tasks listed in a play withou
            # an explicit block statement. If we have two implicit blocks in a row,
            # squash them down to a single block to save processing time later.
            if b._implicit and len(block_list) > 0 and block_list[-1]._implicit:
                block_list[-1].block.extend(b.block)
            else:
                block_list.append(b)

    return block_list
Пример #10
0
 def test_deserialize(self):
     ds = dict(
         block=[dict(action='block')],
         rescue=[dict(action='rescue')],
         always=[dict(action='always')],
     )
     b = Block.load(ds)
     data = dict(parent=ds, parent_type='Block')
     b.deserialize(data)
     self.assertIsInstance(b._parent, Block)
Пример #11
0
 def test_load_block_simple(self):
     ds = dict(
        block = [],
        rescue = [],
        always = [],
        #otherwise = [],
     )
     b = Block.load(ds)
     self.assertEqual(b.block, [])
     self.assertEqual(b.rescue, [])
     self.assertEqual(b.always, [])
Пример #12
0
 def test_load_block_simple(self):
     ds = dict(
        begin = [],
        rescue = [],
        end = [],
        otherwise = [],
     )
     b = Block.load(ds)
     self.assertEqual(b.begin, [])
     self.assertEqual(b.rescue, [])
     self.assertEqual(b.end, [])
     self.assertEqual(b.otherwise, [])
Пример #13
0
 def test_load_block_with_tasks(self):
     ds = dict(
        block = [dict(action='block')],
        rescue = [dict(action='rescue')],
        always = [dict(action='always')],
        #otherwise = [dict(action='otherwise')],
     )
     b = Block.load(ds)
     self.assertEqual(len(b.block), 1)
     self.assertIsInstance(b.block[0], Task)
     self.assertEqual(len(b.rescue), 1)
     self.assertIsInstance(b.rescue[0], Task)
     self.assertEqual(len(b.always), 1)
     self.assertIsInstance(b.always[0], Task)
Пример #14
0
 def test_load_block_with_tasks(self):
     ds = dict(
        begin = [dict(action='begin')],
        rescue = [dict(action='rescue')],
        end = [dict(action='end')],
        otherwise = [dict(action='otherwise')],
     )
     b = Block.load(ds)
     self.assertEqual(len(b.begin), 1)
     assert isinstance(b.begin[0], Task)
     self.assertEqual(len(b.rescue), 1)
     assert isinstance(b.rescue[0], Task)
     self.assertEqual(len(b.end), 1)
     assert isinstance(b.end[0], Task)
     self.assertEqual(len(b.otherwise), 1)
     assert isinstance(b.otherwise[0], Task)
Пример #15
0
def load_list_of_blocks(ds, parent_block=None, role=None, task_include=None, loader=None):
    """
    Given a list of mixed task/block data (parsed from YAML),
    return a list of Block() objects, where implicit blocks
    are created for each bare Task.
    """

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block

    assert type(ds) in (list, NoneType)

    block_list = []
    if ds:
        for block in ds:
            b = Block.load(block, parent_block=parent_block, role=role, task_include=task_include, loader=loader)
            block_list.append(b)

    return block_list
Пример #16
0
def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
    '''
    Given a list of mixed task/block data (parsed from YAML),
    return a list of Block() objects, where implicit blocks
    are created for each bare Task.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.task_include import TaskInclude
    from ansible.playbook.role_include import IncludeRole

    assert isinstance(ds, (list, type(None)))

    block_list = []
    if ds:
        for block_ds in ds:
            b = Block.load(
                block_ds,
                play=play,
                parent_block=parent_block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            # Implicit blocks are created by bare tasks listed in a play without
            # an explicit block statement. If we have two implicit blocks in a row,
            # squash them down to a single block to save processing time later.
            if b._implicit and len(block_list) > 0 and block_list[-1]._implicit:
                for t in b.block:
                    if isinstance(t._parent, (TaskInclude, IncludeRole)):
                        t._parent._parent = block_list[-1]
                    else:
                        t._parent = block_list[-1]
                block_list[-1].block.extend(b.block)
            else:
                block_list.append(b)

    return block_list
Пример #17
0
def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task

    assert isinstance(ds, list)

    task_list = []
    for task in ds:
        assert isinstance(task, dict)

        if 'block' in task:
            t = Block.load(
                task,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
        else:
            if use_handlers:
                t = Handler.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader, play=play)
            else:
                t = Task.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)

        if isinstance(t, list):
            task_list.extend(t)
        else:
            task_list.append(t)

    return task_list
Пример #18
0
def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task

    if not isinstance(ds, list):
        raise AnsibleParserError('task has bad type: "%s". Expected "list"' % type(ds).__name__, obj=ds)

    task_list = []
    for task in ds:
        if not isinstance(task, dict):
            raise AnsibleParserError('task/handler has bad type: "%s". Expected "dict"' % type(task).__name__, obj=task)

        if 'block' in task:
            t = Block.load(
                task,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
        else:
            if use_handlers:
                t = Handler.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
            else:
                t = Task.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)

        task_list.append(t)

    return task_list
 def test_construct_empty_block(self):
     b = Block()
Пример #20
0
def load_list_of_tasks(ds,
                       play,
                       block=None,
                       role=None,
                       task_include=None,
                       use_handlers=False,
                       variable_manager=None,
                       loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task
    from ansible.playbook.task_include import TaskInclude
    from ansible.playbook.role_include import IncludeRole
    from ansible.playbook.handler_task_include import HandlerTaskInclude
    from ansible.template import Templar

    if not isinstance(ds, list):
        raise AnsibleAssertionError(
            'The ds (%s) should be a list but was a %s' % (ds, type(ds)))

    task_list = []
    for task_ds in ds:
        if not isinstance(task_ds, dict):
            raise AnsibleAssertionError(
                'The ds (%s) should be a dict but was a %s' % (ds, type(ds)))

        if 'block' in task_ds:
            t = Block.load(
                task_ds,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            task_list.append(t)
        else:
            args_parser = ModuleArgsParser(task_ds)
            try:
                (action, args,
                 delegate_to) = args_parser.parse(skip_action_validation=True)
            except AnsibleParserError as e:
                # if the raises exception was created with obj=ds args, then it includes the detail
                # so we dont need to add it so we can just re raise.
                if e.obj:
                    raise
                # But if it wasn't, we can add the yaml object now to get more detail
                raise AnsibleParserError(to_native(e), obj=task_ds, orig_exc=e)

            if action in C._ACTION_ALL_INCLUDE_IMPORT_TASKS:

                if use_handlers:
                    include_class = HandlerTaskInclude
                else:
                    include_class = TaskInclude

                t = include_class.load(task_ds,
                                       block=block,
                                       role=role,
                                       task_include=None,
                                       variable_manager=variable_manager,
                                       loader=loader)

                all_vars = variable_manager.get_vars(play=play, task=t)
                templar = Templar(loader=loader, variables=all_vars)

                # check to see if this include is dynamic or static:
                # 1. the user has set the 'static' option to false or true
                # 2. one of the appropriate config options was set
                if action in C._ACTION_INCLUDE_TASKS:
                    is_static = False
                elif action in C._ACTION_IMPORT_TASKS:
                    is_static = True
                else:
                    display.deprecated(
                        '"include" is deprecated, use include_tasks/import_tasks instead',
                        "2.16")
                    is_static = not templar.is_template(
                        t.args['_raw_params']) and t.all_parents_static(
                        ) and not t.loop

                if is_static:
                    if t.loop is not None:
                        if action in C._ACTION_IMPORT_TASKS:
                            raise AnsibleParserError(
                                "You cannot use loops on 'import_tasks' statements. You should use 'include_tasks' instead.",
                                obj=task_ds)
                        else:
                            raise AnsibleParserError(
                                "You cannot use 'static' on an include with a loop",
                                obj=task_ds)

                    # we set a flag to indicate this include was static
                    t.statically_loaded = True

                    # handle relative includes by walking up the list of parent include
                    # tasks and checking the relative result to see if it exists
                    parent_include = block
                    cumulative_path = None

                    found = False
                    subdir = 'tasks'
                    if use_handlers:
                        subdir = 'handlers'
                    while parent_include is not None:
                        if not isinstance(parent_include, TaskInclude):
                            parent_include = parent_include._parent
                            continue
                        try:
                            parent_include_dir = os.path.dirname(
                                templar.template(
                                    parent_include.args.get('_raw_params')))
                        except AnsibleUndefinedVariable as e:
                            if not parent_include.statically_loaded:
                                raise AnsibleParserError(
                                    "Error when evaluating variable in dynamic parent include path: %s. "
                                    "When using static imports, the parent dynamic include cannot utilize host facts "
                                    "or variables from inventory" %
                                    parent_include.args.get('_raw_params'),
                                    obj=task_ds,
                                    suppress_extended_error=True,
                                    orig_exc=e)
                            raise
                        if cumulative_path is None:
                            cumulative_path = parent_include_dir
                        elif not os.path.isabs(cumulative_path):
                            cumulative_path = os.path.join(
                                parent_include_dir, cumulative_path)
                        include_target = templar.template(
                            t.args['_raw_params'])
                        if t._role:
                            new_basedir = os.path.join(t._role._role_path,
                                                       subdir, cumulative_path)
                            include_file = loader.path_dwim_relative(
                                new_basedir, subdir, include_target)
                        else:
                            include_file = loader.path_dwim_relative(
                                loader.get_basedir(), cumulative_path,
                                include_target)

                        if os.path.exists(include_file):
                            found = True
                            break
                        else:
                            parent_include = parent_include._parent

                    if not found:
                        try:
                            include_target = templar.template(
                                t.args['_raw_params'])
                        except AnsibleUndefinedVariable as e:
                            raise AnsibleParserError(
                                "Error when evaluating variable in import path: %s.\n\n"
                                "When using static imports, ensure that any variables used in their names are defined in vars/vars_files\n"
                                "or extra-vars passed in from the command line. Static imports cannot use variables from facts or inventory\n"
                                "sources like group or host vars." %
                                t.args['_raw_params'],
                                obj=task_ds,
                                suppress_extended_error=True,
                                orig_exc=e)
                        if t._role:
                            include_file = loader.path_dwim_relative(
                                t._role._role_path, subdir, include_target)
                        else:
                            include_file = loader.path_dwim(include_target)

                    data = loader.load_from_file(include_file)
                    if not data:
                        display.warning(
                            'file %s is empty and had no tasks to include' %
                            include_file)
                        continue
                    elif not isinstance(data, list):
                        raise AnsibleParserError(
                            "included task files must contain a list of tasks",
                            obj=data)

                    # since we can't send callbacks here, we display a message directly in
                    # the same fashion used by the on_include callback. We also do it here,
                    # because the recursive nature of helper methods means we may be loading
                    # nested includes, and we want the include order printed correctly
                    display.vv("statically imported: %s" % include_file)

                    ti_copy = t.copy(exclude_parent=True)
                    ti_copy._parent = block
                    included_blocks = load_list_of_blocks(
                        data,
                        play=play,
                        parent_block=None,
                        task_include=ti_copy,
                        role=role,
                        use_handlers=use_handlers,
                        loader=loader,
                        variable_manager=variable_manager,
                    )

                    tags = ti_copy.tags[:]

                    # now we extend the tags on each of the included blocks
                    for b in included_blocks:
                        b.tags = list(set(b.tags).union(tags))
                    # END FIXME

                    # FIXME: handlers shouldn't need this special handling, but do
                    #        right now because they don't iterate blocks correctly
                    if use_handlers:
                        for b in included_blocks:
                            task_list.extend(b.block)
                    else:
                        task_list.extend(included_blocks)
                else:
                    t.is_static = False
                    task_list.append(t)

            elif action in C._ACTION_ALL_PROPER_INCLUDE_IMPORT_ROLES:
                ir = IncludeRole.load(
                    task_ds,
                    block=block,
                    role=role,
                    task_include=None,
                    variable_manager=variable_manager,
                    loader=loader,
                )

                #   1. the user has set the 'static' option to false or true
                #   2. one of the appropriate config options was set
                is_static = False
                if action in C._ACTION_IMPORT_ROLE:
                    is_static = True

                if is_static:
                    if ir.loop is not None:
                        if action in C._ACTION_IMPORT_ROLE:
                            raise AnsibleParserError(
                                "You cannot use loops on 'import_role' statements. You should use 'include_role' instead.",
                                obj=task_ds)
                        else:
                            raise AnsibleParserError(
                                "You cannot use 'static' on an include_role with a loop",
                                obj=task_ds)

                    # we set a flag to indicate this include was static
                    ir.statically_loaded = True

                    # template the role name now, if needed
                    all_vars = variable_manager.get_vars(play=play, task=ir)
                    templar = Templar(loader=loader, variables=all_vars)
                    ir._role_name = templar.template(ir._role_name)

                    # uses compiled list from object
                    blocks, _ = ir.get_block_list(
                        variable_manager=variable_manager, loader=loader)
                    task_list.extend(blocks)
                else:
                    # passes task object itself for latter generation of list
                    task_list.append(ir)
            else:
                if use_handlers:
                    t = Handler.load(task_ds,
                                     block=block,
                                     role=role,
                                     task_include=task_include,
                                     variable_manager=variable_manager,
                                     loader=loader)
                else:
                    t = Task.load(task_ds,
                                  block=block,
                                  role=role,
                                  task_include=task_include,
                                  variable_manager=variable_manager,
                                  loader=loader)

                task_list.append(t)

    return task_list
Пример #21
0
    def run(self, iterator, connection_info):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iteratate over each task, while there is one left to run
        result = True
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                debug("getting the remaining hosts for this loop")
                self._tqm._failed_hosts = iterator.get_failed_hosts()
                hosts_left = self.get_hosts_remaining(iterator._play)
                debug("done getting the remaining hosts for this loop")
                if len(hosts_left) == 0:
                    debug("out of hosts to run on")
                    self._tqm.send_callback(
                        'v2_playbook_on_no_hosts_remaining')
                    result = False
                    break

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)
                for (host, task) in host_tasks:
                    if not task:
                        continue

                    run_once = False
                    work_to_do = True

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action,
                                                   class_only=True)
                        if task.run_once or getattr(action, 'BYPASS_HOST_LOOP',
                                                    False):
                            run_once = True
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        pass

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run():
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            debug("'%s' skipped because role has already run" %
                                  task)
                            continue

                    if task.action == 'meta':
                        # meta tasks store their args in the _raw_params field of args,
                        # since they do not use k=v pairs, so get that
                        meta_action = task.args.get('_raw_params')
                        if meta_action == 'noop':
                            # FIXME: issue a callback for the noop here?
                            continue
                        elif meta_action == 'flush_handlers':
                            self.run_handlers(iterator, connection_info)
                        else:
                            raise AnsibleError(
                                "invalid meta action requested: %s" %
                                meta_action,
                                obj=task._ds)
                    else:
                        debug("getting variables")
                        task_vars = self._variable_manager.get_vars(
                            loader=self._loader,
                            play=iterator._play,
                            host=host,
                            task=task)
                        task_vars = self.add_tqm_variables(task_vars,
                                                           play=iterator._play)
                        templar = Templar(loader=self._loader,
                                          variables=task_vars)
                        debug("done getting variables")

                        if not callback_sent:
                            temp_task = task.copy()
                            temp_task.name = templar.template(
                                temp_task.get_name(), fail_on_undefined=False)
                            self._tqm.send_callback(
                                'v2_playbook_on_task_start',
                                temp_task,
                                is_conditional=False)
                            callback_sent = True

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars,
                                         connection_info)

                    results = self._process_pending_results(iterator)
                    host_results.extend(results)

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                debug(
                    "done queuing things up, now waiting for results queue to drain"
                )
                results = self._wait_on_pending_results(iterator)
                host_results.extend(results)

                try:
                    included_files = IncludedFile.process_include_results(
                        host_results,
                        self._tqm,
                        iterator=iterator,
                        loader=self._loader)
                except AnsibleError, e:
                    return False

                if len(included_files) > 0:
                    noop_task = Task()
                    noop_task.action = 'meta'
                    noop_task.args['_raw_params'] = 'noop'
                    noop_task.set_loader(iterator._play._loader)

                    all_blocks = dict((host, []) for host in hosts_left)
                    for included_file in included_files:
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(
                                included_file, iterator=iterator)
                        except AnsibleError, e:
                            for host in included_file._hosts:
                                iterator.mark_host_failed(host)
                            # FIXME: callback here?
                            print(e)
                            continue

                        for new_block in new_blocks:
                            noop_block = Block(parent_block=task._block)
                            noop_block.block = [
                                noop_task for t in new_block.block
                            ]
                            noop_block.always = [
                                noop_task for t in new_block.always
                            ]
                            noop_block.rescue = [
                                noop_task for t in new_block.rescue
                            ]
                            for host in hosts_left:
                                if host in included_file._hosts:
                                    task_vars = self._variable_manager.get_vars(
                                        loader=self._loader,
                                        play=iterator._play,
                                        host=host,
                                        task=included_file._task)
                                    final_block = new_block.filter_tagged_tasks(
                                        connection_info, task_vars)
                                    all_blocks[host].append(final_block)
                                else:
                                    all_blocks[host].append(noop_block)

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                debug("results queue empty")
Пример #22
0
    def __init__(self, inventory, play, play_context, variable_manager, all_vars, start_at_done=False):
        self._play = play
        self._blocks = []

        self._task_uuid_cache = dict()

        # Default options to gather
        gather_subset = play_context.gather_subset
        gather_timeout = play_context.gather_timeout
        fact_path = play_context.fact_path

        # Retrieve subset to gather
        if self._play.gather_subset is not None:
            gather_subset = self._play.gather_subset
        # Retrieve timeout for gather
        if self._play.gather_timeout is not None:
            gather_timeout = self._play.gather_timeout
        # Retrieve fact_path
        if self._play.fact_path is not None:
            fact_path = self._play.fact_path

        setup_block = Block(play=self._play)
        setup_task = Task(block=setup_block)
        setup_task.action = 'setup'
        setup_task.name = 'Gathering Facts'
        setup_task.tags   = ['always']
        setup_task.args   = {
            'gather_subset': gather_subset,
        }
        if gather_timeout:
            setup_task.args['gather_timeout'] = gather_timeout
        if fact_path:
            setup_task.args['fact_path'] = fact_path
        setup_task.set_loader(self._play._loader)
        # short circuit fact gathering if the entire playbook is conditional
        if self._play._included_conditional is not None:
            setup_task.when = self._play._included_conditional[:]
        setup_block.block = [setup_task]

        setup_block = setup_block.filter_tagged_tasks(play_context, all_vars)
        self._blocks.append(setup_block)
        self.cache_block_tasks(setup_block)

        for block in self._play.compile():
            new_block = block.filter_tagged_tasks(play_context, all_vars)
            if new_block.has_tasks():
                self.cache_block_tasks(new_block)
                self._blocks.append(new_block)

        for handler_block in self._play.handlers:
            self.cache_block_tasks(handler_block)

        self._host_states = {}
        start_at_matched = False
        for host in inventory.get_hosts(self._play.hosts):
            self._host_states[host.name] = HostState(blocks=self._blocks)
            # if the host's name is in the variable manager's fact cache, then set
            # its _gathered_facts flag to true for smart gathering tests later
            if host.name in variable_manager._fact_cache and variable_manager._fact_cache.get(host.name).get('module_setup', False):
                host._gathered_facts = True
            # if we're looking to start at a specific task, iterate through
            # the tasks for this host until we find the specified task
            if play_context.start_at_task is not None and not start_at_done:
                while True:
                    (s, task) = self.get_next_task_for_host(host, peek=True)
                    if s.run_state == self.ITERATING_COMPLETE:
                        break
                    if task.name == play_context.start_at_task or fnmatch.fnmatch(task.name, play_context.start_at_task) or \
                       task.get_name() == play_context.start_at_task or fnmatch.fnmatch(task.get_name(), play_context.start_at_task):
                        start_at_matched = True
                        break
                    else:
                        self.get_next_task_for_host(host)

                # finally, reset the host's state to ITERATING_SETUP
                if start_at_matched:
                    self._host_states[host.name].did_start_at_task = True
                    self._host_states[host.name].run_state = self.ITERATING_SETUP

        if start_at_matched:
            # we have our match, so clear the start_at_task field on the
            # play context to flag that we've started at a task (and future
            # plays won't try to advance)
            play_context.start_at_task = None
Пример #23
0
def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task
    from ansible.playbook.task_include import TaskInclude
    from ansible.playbook.role_include import IncludeRole
    from ansible.playbook.handler_task_include import HandlerTaskInclude
    from ansible.template import Templar

    assert isinstance(ds, list)

    task_list = []
    for task_ds in ds:
        assert isinstance(task_ds, dict)

        if 'block' in task_ds:
            t = Block.load(
                task_ds,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            task_list.append(t)
        else:
            if 'include' in task_ds:
                if use_handlers:
                    include_class = HandlerTaskInclude
                else:
                    include_class = TaskInclude

                t = include_class.load(
                    task_ds,
                    block=block,
                    role=role,
                    task_include=None,
                    variable_manager=variable_manager,
                    loader=loader
                )

                all_vars = variable_manager.get_vars(loader=loader, play=play, task=t)
                templar = Templar(loader=loader, variables=all_vars)

                # check to see if this include is dynamic or static:
                # 1. the user has set the 'static' option to false or true
                # 2. one of the appropriate config options was set
                if t.static is not None:
                    is_static = t.static
                else:
                    is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
                        (use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
                        (not templar._contains_vars(t.args['_raw_params']) and t.all_parents_static() and not t.loop)

                if is_static:
                    if t.loop is not None:
                        raise AnsibleParserError("You cannot use 'static' on an include with a loop", obj=task_ds)

                    # we set a flag to indicate this include was static
                    t.statically_loaded = True

                    # handle relative includes by walking up the list of parent include
                    # tasks and checking the relative result to see if it exists
                    parent_include = block
                    cumulative_path = None

                    found = False
                    subdir = 'tasks'
                    if use_handlers:
                        subdir = 'handlers'
                    while parent_include is not None:
                        if not isinstance(parent_include, TaskInclude):
                            parent_include = parent_include._parent
                            continue
                        parent_include_dir = templar.template(os.path.dirname(parent_include.args.get('_raw_params')))
                        if cumulative_path is None:
                            cumulative_path = parent_include_dir
                        elif not os.path.isabs(cumulative_path):
                            cumulative_path = os.path.join(parent_include_dir, cumulative_path)
                        include_target = templar.template(t.args['_raw_params'])
                        if t._role:
                            new_basedir = os.path.join(t._role._role_path, subdir, cumulative_path)
                            include_file = loader.path_dwim_relative(new_basedir, subdir, include_target)
                        else:
                            include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target)

                        if os.path.exists(include_file):
                            found = True
                            break
                        else:
                            parent_include = parent_include._parent

                    if not found:
                        try:
                            include_target = templar.template(t.args['_raw_params'])
                        except AnsibleUndefinedVariable:
                            raise AnsibleParserError(
                                "Error when evaluating variable in include name: %s.\n\n" \
                                "When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n" \
                                "or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n" \
                                "sources like group or host vars." % t.args['_raw_params'],
                                obj=task_ds,
                                suppress_extended_error=True,
                                )
                        if t._role:
                            include_file = loader.path_dwim_relative(t._role._role_path, subdir, include_target)
                        else:
                            include_file = loader.path_dwim(include_target)

                    try:
                        data = loader.load_from_file(include_file)
                        if data is None:
                            return []
                        elif not isinstance(data, list):
                            raise AnsibleParserError("included task files must contain a list of tasks", obj=data)

                        # since we can't send callbacks here, we display a message directly in
                        # the same fashion used by the on_include callback. We also do it here,
                        # because the recursive nature of helper methods means we may be loading
                        # nested includes, and we want the include order printed correctly
                        display.vv("statically included: %s" % include_file)
                    except AnsibleFileNotFound:
                        if t.static or \
                           C.DEFAULT_TASK_INCLUDES_STATIC or \
                           C.DEFAULT_HANDLER_INCLUDES_STATIC and use_handlers:
                            raise
                        display.deprecated(
                            "Included file '%s' not found, however since this include is not " \
                            "explicitly marked as 'static: yes', we will try and include it dynamically " \
                            "later. In the future, this will be an error unless 'static: no' is used " \
                            "on the include task. If you do not want missing includes to be considered " \
                            "dynamic, use 'static: yes' on the include or set the global ansible.cfg " \
                            "options to make all inclues static for tasks and/or handlers" % include_file, version="2.7"
                        )
                        task_list.append(t)
                        continue

                    ti_copy = t.copy(exclude_parent=True)
                    ti_copy._parent = block
                    included_blocks = load_list_of_blocks(
                        data,
                        play=play,
                        parent_block=None,
                        task_include=ti_copy,
                        role=role,
                        use_handlers=use_handlers,
                        loader=loader,
                        variable_manager=variable_manager,
                    )

                    # pop tags out of the include args, if they were specified there, and assign
                    # them to the include. If the include already had tags specified, we raise an
                    # error so that users know not to specify them both ways
                    tags = ti_copy.vars.pop('tags', [])
                    if isinstance(tags, string_types):
                        tags = tags.split(',')

                    if len(tags) > 0:
                        if len(ti_copy.tags) > 0:
                            raise AnsibleParserError(
                                "Include tasks should not specify tags in more than one way (both via args and directly on the task). " \
                                "Mixing styles in which tags are specified is prohibited for whole import hierarchy, not only for single import statement",
                                obj=task_ds,
                                suppress_extended_error=True,
                            )
                        display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option",
                                           version="2.7")
                    else:
                        tags = ti_copy.tags[:]

                    # now we extend the tags on each of the included blocks
                    for b in included_blocks:
                        b.tags = list(set(b.tags).union(tags))
                    # END FIXME

                    # FIXME: handlers shouldn't need this special handling, but do
                    #        right now because they don't iterate blocks correctly
                    if use_handlers:
                        for b in included_blocks:
                            task_list.extend(b.block)
                    else:
                        task_list.extend(included_blocks)
                else:
                    task_list.append(t)

            elif 'include_role' in task_ds:

                ir = IncludeRole.load(
                    task_ds,
                    block=block,
                    role=role,
                    task_include=None,
                    variable_manager=variable_manager,
                    loader=loader
                    )

                #   1. the user has set the 'static' option to false or true
                #   2. one of the appropriate config options was set
                if ir.static is not None:
                    is_static = ir.static
                else:
                    display.debug('Determine if include_role is static')
                    # Check to see if this include is dynamic or static:
                    all_vars = variable_manager.get_vars(loader=loader, play=play, task=ir)
                    templar = Templar(loader=loader, variables=all_vars)
                    needs_templating = False
                    for param in ir.args:
                        if templar._contains_vars(ir.args[param]):
                            if not templar.is_template(ir.args[param]):
                                needs_templating = True
                                break
                    is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
                                (use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
                                (not needs_templating and ir.all_parents_static() and not ir.loop)
                    display.debug('Determined that if include_role static is %s' % str(is_static))
                if is_static:
                    # uses compiled list from object
                    t = task_list.extend(ir.get_block_list(variable_manager=variable_manager, loader=loader))
                else:
                    # passes task object itself for latter generation of list
                    t = task_list.append(ir)
            else:
                if use_handlers:
                    t = Handler.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
                else:
                    t = Task.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)

                task_list.append(t)

    return task_list
Пример #24
0
def test_process_include_simulate_free_block_role_tasks(
        mock_iterator, mock_variable_manager):
    """Test loading the same role returns different included files

    In the case of free, we may end up with included files from roles that
    have the same parent but are different tasks. Previously the comparison
    for equality did not check if the tasks were the same and only checked
    that the parents were the same. This lead to some tasks being run
    incorrectly and some tasks being silient dropped."""

    fake_loader = DictDataLoader({
        'include_test.yml':
        "",
        '/etc/ansible/roles/foo_role/tasks/task1.yml':
        """
            - debug: msg=task1
        """,
        '/etc/ansible/roles/foo_role/tasks/task2.yml':
        """
            - debug: msg=task2
        """,
    })

    hostname = "testhost1"
    hostname2 = "testhost2"

    role1_ds = {
        'name': 'task1 include',
        'include_role': {
            'name': 'foo_role',
            'tasks_from': 'task1.yml'
        }
    }
    role2_ds = {
        'name': 'task2 include',
        'include_role': {
            'name': 'foo_role',
            'tasks_from': 'task2.yml'
        }
    }
    parent_task_ds = {'block': [role1_ds, role2_ds]}
    parent_block = Block.load(parent_task_ds, loader=fake_loader)

    parent_block._play = None

    include_role1_ds = {
        'include_args': {
            'name': 'foo_role',
            'tasks_from': 'task1.yml'
        }
    }
    include_role2_ds = {
        'include_args': {
            'name': 'foo_role',
            'tasks_from': 'task2.yml'
        }
    }

    include_role1 = IncludeRole.load(role1_ds,
                                     block=parent_block,
                                     loader=fake_loader)
    include_role2 = IncludeRole.load(role2_ds,
                                     block=parent_block,
                                     loader=fake_loader)

    result1 = task_result.TaskResult(host=hostname,
                                     task=include_role1,
                                     return_data=include_role1_ds)
    result2 = task_result.TaskResult(host=hostname2,
                                     task=include_role2,
                                     return_data=include_role2_ds)
    results = [result1, result2]

    res = IncludedFile.process_include_results(results, mock_iterator,
                                               fake_loader,
                                               mock_variable_manager)
    assert isinstance(res, list)
    # we should get two different includes
    assert len(res) == 2
    assert res[0]._filename == 'foo_role'
    assert res[1]._filename == 'foo_role'
    # with different tasks
    assert res[0]._task != res[1]._task

    assert res[0]._hosts == ['testhost1']
    assert res[1]._hosts == ['testhost2']

    assert res[0]._args == {}
    assert res[1]._args == {}

    assert res[0]._vars == {}
    assert res[1]._vars == {}
Пример #25
0
    def load(data,
             block=None,
             role=None,
             task_include=None,
             variable_manager=None,
             loader=None):

        ir = IncludeRole(block, role, task_include=task_include).load_data(
            data, variable_manager=variable_manager, loader=loader)

        # Validate options
        my_arg_names = frozenset(ir.args.keys())

        # name is needed, or use role as alias
        ir._role_name = ir.args.get('name', ir.args.get('role'))
        if ir._role_name is None:
            raise AnsibleParserError("'name' is a required field for %s." %
                                     ir.action,
                                     obj=data)

        if 'public' in ir.args and ir.action != 'include_role':
            raise AnsibleParserError('Invalid options for %s: private' %
                                     ir.action,
                                     obj=data)

        if 'private' in ir.args:
            display.deprecated(
                msg='Supplying "private" for "%s" is a no op, and is deprecated'
                % ir.action,
                version='2.8')

        # validate bad args, otherwise we silently ignore
        bad_opts = my_arg_names.difference(IncludeRole.VALID_ARGS)
        if bad_opts:
            raise AnsibleParserError('Invalid options for %s: %s' %
                                     (ir.action, ','.join(list(bad_opts))),
                                     obj=data)

        # build options for role includes
        for key in my_arg_names.intersection(IncludeRole.FROM_ARGS):
            from_key = key.replace('_from', '')
            ir._from_files[from_key] = basename(ir.args.get(key))

        apply_attrs = ir.args.pop('apply', {})
        if apply_attrs and ir.action != 'include_role':
            raise AnsibleParserError('Invalid options for %s: apply' %
                                     ir.action,
                                     obj=data)
        elif apply_attrs:
            apply_attrs['block'] = []
            p_block = Block.load(
                apply_attrs,
                play=block._play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=block._use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            ir._parent = p_block

        # manual list as otherwise the options would set other task parameters we don't want.
        for option in my_arg_names.intersection(IncludeRole.OTHER_ARGS):
            setattr(ir, option, ir.args.get(option))

        return ir
Пример #26
0
def load_list_of_tasks(ds,
                       play,
                       block=None,
                       role=None,
                       task_include=None,
                       use_handlers=False,
                       variable_manager=None,
                       loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task
    from ansible.playbook.task_include import TaskInclude
    from ansible.playbook.role_include import IncludeRole
    from ansible.playbook.handler_task_include import HandlerTaskInclude
    from ansible.template import Templar

    if not isinstance(ds, list):
        raise AnsibleAssertionError(
            'The ds (%s) should be a list but was a %s' % (ds, type(ds)))

    task_list = []
    for task_ds in ds:
        if not isinstance(task_ds, dict):
            raise AnsibleAssertionError(
                'The ds (%s) should be a dict but was a %s' % (ds, type(ds)))

        if 'block' in task_ds:
            t = Block.load(
                task_ds,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            task_list.append(t)
        else:
            collection_list = task_ds.get('collections')
            if collection_list is None and block is not None and block.collections:
                collection_list = block.collections
            args_parser = ModuleArgsParser(task_ds,
                                           collection_list=collection_list)
            try:
                (action, args, delegate_to) = args_parser.parse()
            except AnsibleParserError as e:
                # if the raises exception was created with obj=ds args, then it includes the detail
                # so we dont need to add it so we can just re raise.
                if e._obj:
                    raise
                # But if it wasn't, we can add the yaml object now to get more detail
                raise AnsibleParserError(to_native(e), obj=task_ds, orig_exc=e)

            if action in ('include', 'import_tasks', 'include_tasks'):

                if use_handlers:
                    include_class = HandlerTaskInclude
                else:
                    include_class = TaskInclude

                t = include_class.load(task_ds,
                                       block=block,
                                       role=role,
                                       task_include=None,
                                       variable_manager=variable_manager,
                                       loader=loader)

                all_vars = variable_manager.get_vars(play=play, task=t)
                templar = Templar(loader=loader, variables=all_vars)

                # check to see if this include is dynamic or static:
                # 1. the user has set the 'static' option to false or true
                # 2. one of the appropriate config options was set
                if action == 'include_tasks':
                    is_static = False
                elif action == 'import_tasks':
                    is_static = True
                elif t.static is not None:
                    display.deprecated(
                        "The use of 'static' has been deprecated. "
                        "Use 'import_tasks' for static inclusion, or 'include_tasks' for dynamic inclusion",
                        version='2.12')
                    is_static = t.static
                else:
                    is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
                        (use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
                        (not templar._contains_vars(t.args['_raw_params']) and t.all_parents_static() and not t.loop)

                if is_static:
                    if t.loop is not None:
                        if action == 'import_tasks':
                            raise AnsibleParserError(
                                "You cannot use loops on 'import_tasks' statements. You should use 'include_tasks' instead.",
                                obj=task_ds)
                        else:
                            raise AnsibleParserError(
                                "You cannot use 'static' on an include with a loop",
                                obj=task_ds)

                    # we set a flag to indicate this include was static
                    t.statically_loaded = True

                    # handle relative includes by walking up the list of parent include
                    # tasks and checking the relative result to see if it exists
                    parent_include = block
                    cumulative_path = None

                    found = False
                    subdir = 'tasks'
                    if use_handlers:
                        subdir = 'handlers'
                    while parent_include is not None:
                        if not isinstance(parent_include, TaskInclude):
                            parent_include = parent_include._parent
                            continue
                        try:
                            parent_include_dir = os.path.dirname(
                                templar.template(
                                    parent_include.args.get('_raw_params')))
                        except AnsibleUndefinedVariable as e:
                            if not parent_include.statically_loaded:
                                raise AnsibleParserError(
                                    "Error when evaluating variable in dynamic parent include path: %s. "
                                    "When using static imports, the parent dynamic include cannot utilize host facts "
                                    "or variables from inventory" %
                                    parent_include.args.get('_raw_params'),
                                    obj=task_ds,
                                    suppress_extended_error=True,
                                    orig_exc=e)
                            raise
                        if cumulative_path is None:
                            cumulative_path = parent_include_dir
                        elif not os.path.isabs(cumulative_path):
                            cumulative_path = os.path.join(
                                parent_include_dir, cumulative_path)
                        include_target = templar.template(
                            t.args['_raw_params'])
                        if t._role:
                            new_basedir = os.path.join(t._role._role_path,
                                                       subdir, cumulative_path)
                            include_file = loader.path_dwim_relative(
                                new_basedir, subdir, include_target)
                        else:
                            include_file = loader.path_dwim_relative(
                                loader.get_basedir(), cumulative_path,
                                include_target)

                        if os.path.exists(include_file):
                            found = True
                            break
                        else:
                            parent_include = parent_include._parent

                    if not found:
                        try:
                            include_target = templar.template(
                                t.args['_raw_params'])
                        except AnsibleUndefinedVariable as e:
                            raise AnsibleParserError(
                                "Error when evaluating variable in import path: %s.\n\n"
                                "When using static imports, ensure that any variables used in their names are defined in vars/vars_files\n"
                                "or extra-vars passed in from the command line. Static imports cannot use variables from facts or inventory\n"
                                "sources like group or host vars." %
                                t.args['_raw_params'],
                                obj=task_ds,
                                suppress_extended_error=True,
                                orig_exc=e)
                        if t._role:
                            include_file = loader.path_dwim_relative(
                                t._role._role_path, subdir, include_target)
                        else:
                            include_file = loader.path_dwim(include_target)

                    try:
                        data = loader.load_from_file(include_file)
                        if data is None:
                            display.warning(
                                'file %s is empty and had no tasks to include'
                                % include_file)
                            continue
                        elif not isinstance(data, list):
                            raise AnsibleParserError(
                                "included task files must contain a list of tasks",
                                obj=data)

                        # since we can't send callbacks here, we display a message directly in
                        # the same fashion used by the on_include callback. We also do it here,
                        # because the recursive nature of helper methods means we may be loading
                        # nested includes, and we want the include order printed correctly
                        display.vv("statically imported: %s" % include_file)
                    except AnsibleFileNotFound:
                        if action != 'include' or t.static or \
                           C.DEFAULT_TASK_INCLUDES_STATIC or \
                           C.DEFAULT_HANDLER_INCLUDES_STATIC and use_handlers:
                            raise
                        display.deprecated(
                            "Included file '%s' not found, however since this include is not "
                            "explicitly marked as 'static: yes', we will try and include it dynamically "
                            "later. In the future, this will be an error unless 'static: no' is used "
                            "on the include task. If you do not want missing includes to be considered "
                            "dynamic, use 'static: yes' on the include or set the global ansible.cfg "
                            "options to make all includes static for tasks and/or handlers"
                            % include_file,
                            version="2.12")
                        task_list.append(t)
                        continue

                    ti_copy = t.copy(exclude_parent=True)
                    ti_copy._parent = block
                    included_blocks = load_list_of_blocks(
                        data,
                        play=play,
                        parent_block=None,
                        task_include=ti_copy,
                        role=role,
                        use_handlers=use_handlers,
                        loader=loader,
                        variable_manager=variable_manager,
                    )

                    # FIXME: remove once 'include' is removed
                    # pop tags out of the include args, if they were specified there, and assign
                    # them to the include. If the include already had tags specified, we raise an
                    # error so that users know not to specify them both ways
                    tags = ti_copy.vars.pop('tags', [])
                    if isinstance(tags, string_types):
                        tags = tags.split(',')

                    if len(tags) > 0:
                        if action in ('include_tasks', 'import_tasks'):
                            raise AnsibleParserError(
                                'You cannot specify "tags" inline to the task, it is a task keyword'
                            )
                        if len(ti_copy.tags) > 0:
                            raise AnsibleParserError(
                                "Include tasks should not specify tags in more than one way (both via args and directly on the task). "
                                "Mixing styles in which tags are specified is prohibited for whole import hierarchy, not only for single import statement",
                                obj=task_ds,
                                suppress_extended_error=True,
                            )
                        display.deprecated(
                            "You should not specify tags in the include parameters. All tags should be specified using the task-level option",
                            version="2.12")
                    else:
                        tags = ti_copy.tags[:]

                    # now we extend the tags on each of the included blocks
                    for b in included_blocks:
                        b.tags = list(set(b.tags).union(tags))
                    # END FIXME

                    # FIXME: handlers shouldn't need this special handling, but do
                    #        right now because they don't iterate blocks correctly
                    if use_handlers:
                        for b in included_blocks:
                            task_list.extend(b.block)
                    else:
                        task_list.extend(included_blocks)
                else:
                    t.is_static = False
                    task_list.append(t)

            elif action in ('include_role', 'import_role'):
                ir = IncludeRole.load(
                    task_ds,
                    block=block,
                    role=role,
                    task_include=None,
                    variable_manager=variable_manager,
                    loader=loader,
                )

                #   1. the user has set the 'static' option to false or true
                #   2. one of the appropriate config options was set
                is_static = False
                if action == 'import_role':
                    is_static = True

                elif ir.static is not None:
                    display.deprecated(
                        "The use of 'static' for 'include_role' has been deprecated. "
                        "Use 'import_role' for static inclusion, or 'include_role' for dynamic inclusion",
                        version='2.12')
                    is_static = ir.static

                if is_static:
                    if ir.loop is not None:
                        if action == 'import_role':
                            raise AnsibleParserError(
                                "You cannot use loops on 'import_role' statements. You should use 'include_role' instead.",
                                obj=task_ds)
                        else:
                            raise AnsibleParserError(
                                "You cannot use 'static' on an include_role with a loop",
                                obj=task_ds)

                    # we set a flag to indicate this include was static
                    ir.statically_loaded = True

                    # template the role name now, if needed
                    all_vars = variable_manager.get_vars(play=play, task=ir)
                    templar = Templar(loader=loader, variables=all_vars)
                    if templar._contains_vars(ir._role_name):
                        ir._role_name = templar.template(ir._role_name)

                    # uses compiled list from object
                    blocks, _ = ir.get_block_list(
                        variable_manager=variable_manager, loader=loader)
                    task_list.extend(blocks)
                else:
                    # passes task object itself for latter generation of list
                    task_list.append(ir)
            else:
                if use_handlers:
                    t = Handler.load(task_ds,
                                     block=block,
                                     role=role,
                                     task_include=task_include,
                                     variable_manager=variable_manager,
                                     loader=loader)
                else:
                    t = Task.load(task_ds,
                                  block=block,
                                  role=role,
                                  task_include=task_include,
                                  variable_manager=variable_manager,
                                  loader=loader)

                task_list.append(t)

    return task_list
Пример #27
0
        #        task_include=task_include,
        #        use_handlers=use_handlers,
        #        loader=loader
        #    )

        #    if cur_basedir and loader:
        #        loader.set_basedir(cur_basedir)
        #else:
        if True:
=======
        if 'block' in task:
            t = Block.load(
                task,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
        else:
>>>>>>> remote
            if use_handlers:
                t = Handler.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
            else:
                t = Task.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)

        task_list.append(t)

    return task_list
Пример #28
0
    def compile(self):
        '''
        Compiles and returns the task list for this play, compiled from the
        roles (which are themselves compiled recursively) and/or the list of
        tasks specified in the play.
        '''

        # create a block containing a single flush handlers meta
        # task, so we can be sure to run handlers at certain points
        # of the playbook execution
        flush_block = Block.load(
            data={'meta': 'flush_handlers'},
            play=self,
            variable_manager=self._variable_manager,
            loader=self._loader
        )

        for task in flush_block.block:
            task.implicit = True

        block_list = []
        if self.force_handlers:
            noop_task = Task()
            noop_task.action = 'meta'
            noop_task.args['_raw_params'] = 'noop'
            noop_task.implicit = True
            noop_task.set_loader(self._loader)

            b = Block(play=self)
            b.block = self.pre_tasks or [noop_task]
            b.always = [flush_block]
            block_list.append(b)

            tasks = self._compile_roles() + self.tasks
            b = Block(play=self)
            b.block = tasks or [noop_task]
            b.always = [flush_block]
            block_list.append(b)

            b = Block(play=self)
            b.block = self.post_tasks or [noop_task]
            b.always = [flush_block]
            block_list.append(b)

            return block_list

        block_list.extend(self.pre_tasks)
        block_list.append(flush_block)
        block_list.extend(self._compile_roles())
        block_list.extend(self.tasks)
        block_list.append(flush_block)
        block_list.extend(self.post_tasks)
        block_list.append(flush_block)

        return block_list
Пример #29
0
 def test_block_compile(self):
     ds = [dict(action='foo')]
     b = Block.load(ds)
     tasks = b.compile()
     self.assertEqual(len(tasks), 1)
     self.assertIsInstance(tasks[0], Task)
Пример #30
0
def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task
    from ansible.playbook.task_include import TaskInclude
    from ansible.playbook.role_include import IncludeRole
    from ansible.playbook.handler_task_include import HandlerTaskInclude
    from ansible.template import Templar

    assert isinstance(ds, list)

    task_list = []
    for task_ds in ds:
        assert isinstance(task_ds, dict)

        if 'block' in task_ds:
            t = Block.load(
                task_ds,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            task_list.append(t)
        else:
            if 'include' in task_ds:
                if use_handlers:
                    include_class = HandlerTaskInclude
                else:
                    include_class = TaskInclude

                t = include_class.load(
                    task_ds,
                    block=block,
                    role=role,
                    task_include=None,
                    variable_manager=variable_manager,
                    loader=loader
                )

                all_vars = variable_manager.get_vars(loader=loader, play=play, task=t)
                templar = Templar(loader=loader, variables=all_vars)

                # check to see if this include is dynamic or static:
                # 1. the user has set the 'static' option to false or true
                # 2. one of the appropriate config options was set
                if t.static is not None:
                    is_static = t.static
                else:
                    is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
                                (use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
                                (not templar._contains_vars(t.args['_raw_params']) and t.all_parents_static() and not t.loop)

                if is_static:
                    if t.loop is not None:
                        raise AnsibleParserError("You cannot use 'static' on an include with a loop", obj=task_ds)

                    # we set a flag to indicate this include was static
                    t.statically_loaded = True

                    # handle relative includes by walking up the list of parent include
                    # tasks and checking the relative result to see if it exists
                    parent_include = block
                    cumulative_path = None

                    found = False
                    subdir = 'tasks'
                    if use_handlers:
                        subdir = 'handlers'
                    while parent_include is not None:
                        if not isinstance(parent_include, TaskInclude):
                            parent_include = parent_include._parent
                            continue
                        parent_include_dir = templar.template(os.path.dirname(parent_include.args.get('_raw_params')))
                        if cumulative_path is None:
                            cumulative_path = parent_include_dir
                        elif not os.path.isabs(cumulative_path):
                            cumulative_path = os.path.join(parent_include_dir, cumulative_path)
                        include_target = templar.template(t.args['_raw_params'])
                        if t._role:
                            new_basedir = os.path.join(t._role._role_path, subdir, cumulative_path)
                            include_file = loader.path_dwim_relative(new_basedir, subdir, include_target)
                        else:
                            include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target)

                        if os.path.exists(include_file):
                            found = True
                            break
                        else:
                            parent_include = parent_include._parent

                    if not found:
                        try:
                            include_target = templar.template(t.args['_raw_params'])
                        except AnsibleUndefinedVariable:
                            raise AnsibleParserError(
                                      "Error when evaluating variable in include name: %s.\n\n" \
                                      "When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n" \
                                      "or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n" \
                                      "sources like group or host vars." % t.args['_raw_params'],
                                      obj=task_ds,
                                      suppress_extended_error=True,
                                  )
                        if t._role:
                            include_file = loader.path_dwim_relative(t._role._role_path, subdir, include_target)
                        else:
                            include_file = loader.path_dwim(include_target)

                    try:
                        data = loader.load_from_file(include_file)
                        if data is None:
                            return []
                        elif not isinstance(data, list):
                            raise AnsibleParserError("included task files must contain a list of tasks", obj=data)

                        # since we can't send callbacks here, we display a message directly in
                        # the same fashion used by the on_include callback. We also do it here,
                        # because the recursive nature of helper methods means we may be loading
                        # nested includes, and we want the include order printed correctly
                        display.vv("statically included: %s" % include_file)
                    except AnsibleFileNotFound:
                        if t.static or \
                           C.DEFAULT_TASK_INCLUDES_STATIC or \
                           C.DEFAULT_HANDLER_INCLUDES_STATIC and use_handlers:
                            raise
                        display.deprecated(
                            "Included file '%s' not found, however since this include is not " \
                            "explicitly marked as 'static: yes', we will try and include it dynamically " \
                            "later. In the future, this will be an error unless 'static: no' is used " \
                            "on the include task. If you do not want missing includes to be considered " \
                            "dynamic, use 'static: yes' on the include or set the global ansible.cfg " \
                            "options to make all inclues static for tasks and/or handlers" % include_file,
                        )
                        task_list.append(t)
                        continue

                    included_blocks = load_list_of_blocks(
                        data,
                        play=play,
                        parent_block=None,
                        task_include=t.copy(),
                        role=role,
                        use_handlers=use_handlers,
                        loader=loader,
                        variable_manager=variable_manager,
                    )

                    # pop tags out of the include args, if they were specified there, and assign
                    # them to the include. If the include already had tags specified, we raise an
                    # error so that users know not to specify them both ways
                    tags = t.vars.pop('tags', [])
                    if isinstance(tags, string_types):
                        tags = tags.split(',')

                    if len(tags) > 0:
                        if len(t.tags) > 0:
                            raise AnsibleParserError(
                                "Include tasks should not specify tags in more than one way (both via args and directly on the task). " \
                                "Mixing styles in which tags are specified is prohibited for whole import hierarchy, not only for single import statement",
                                obj=task_ds,
                                suppress_extended_error=True,
                            )
                        display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
                    else:
                        tags = t.tags[:]

                    # now we extend the tags on each of the included blocks
                    for b in included_blocks:
                        b.tags = list(set(b.tags).union(tags))
                    # END FIXME

                    # FIXME: handlers shouldn't need this special handling, but do
                    #        right now because they don't iterate blocks correctly
                    if use_handlers:
                        for b in included_blocks:
                            task_list.extend(b.block)
                    else:
                        task_list.extend(included_blocks)
                else:
                    task_list.append(t)

            elif 'include_role' in task_ds:

                ir = IncludeRole.load(
                            task_ds,
                            block=block,
                            role=role,
                            task_include=None,
                            variable_manager=variable_manager,
                            loader=loader
                     )

                #   1. the user has set the 'static' option to false or true
                #   2. one of the appropriate config options was set
                if ir.static is not None:
                    is_static = ir.static
                else:
                    display.debug('Determine if include_role is static')
                    # Check to see if this include is dynamic or static:
                    all_vars = variable_manager.get_vars(loader=loader, play=play, task=ir)
                    templar = Templar(loader=loader, variables=all_vars)
                    needs_templating = False
                    for param in ir.args:
                        if templar._contains_vars(ir.args[param]):
                            if not templar.templatable(ir.args[param]):
                                needs_templating = True
                                break
                    is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
                                (use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC)  or \
                                (not needs_templating and ir.all_parents_static() and not ir.loop)
                    display.debug('Determined that if include_role static is %s' % str(is_static))
                if is_static:
                    # uses compiled list from object
                    t = task_list.extend(ir.get_block_list(variable_manager=variable_manager, loader=loader))
                else:
                    # passes task object itself for latter generation of list
                    t = task_list.append(ir)
            else:
                if use_handlers:
                    t = Handler.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
                else:
                    t = Task.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)

                task_list.append(t)

    return task_list
Пример #31
0
    def run(self, iterator, play_context):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iteratate over each task, while there is one left to run
        result = True
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                self._display.debug(
                    "getting the remaining hosts for this loop")
                hosts_left = self._inventory.get_hosts(iterator._play.hosts)
                self._display.debug(
                    "done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                # skip control
                skip_rest = False
                choose_step = True

                for (host, task) in host_tasks:
                    if not task:
                        continue

                    run_once = False
                    work_to_do = True

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action,
                                                   class_only=True)
                        if task.run_once or getattr(action, 'BYPASS_HOST_LOOP',
                                                    False):
                            run_once = True
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        pass

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run(host):
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            self._display.debug(
                                "'%s' skipped because role has already run" %
                                task)
                            continue

                    if task.action == 'meta':
                        self._execute_meta(task, play_context, iterator)
                    else:
                        # handle step if needed, skip meta actions as they are used internally
                        if self._step and choose_step:
                            if self._take_step(task):
                                choose_step = False
                            else:
                                skip_rest = True
                                break

                        self._display.debug("getting variables")
                        task_vars = self._variable_manager.get_vars(
                            loader=self._loader,
                            play=iterator._play,
                            host=host,
                            task=task)
                        task_vars = self.add_tqm_variables(task_vars,
                                                           play=iterator._play)
                        templar = Templar(loader=self._loader,
                                          variables=task_vars)
                        self._display.debug("done getting variables")

                        if not callback_sent:
                            temp_task = task.copy()
                            try:
                                temp_task.name = unicode(
                                    templar.template(temp_task.name,
                                                     fail_on_undefined=False))
                            except:
                                # just ignore any errors during task name templating,
                                # we don't care if it just shows the raw name
                                pass
                            self._tqm.send_callback(
                                'v2_playbook_on_task_start',
                                temp_task,
                                is_conditional=False)
                            callback_sent = True

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars, play_context)

                    results = self._process_pending_results(iterator)
                    host_results.extend(results)

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                # go to next host/task group
                if skip_rest:
                    continue

                self._display.debug(
                    "done queuing things up, now waiting for results queue to drain"
                )
                results = self._wait_on_pending_results(iterator)
                host_results.extend(results)

                if not work_to_do and len(iterator.get_failed_hosts()) > 0:
                    self._display.debug("out of hosts to run on")
                    self._tqm.send_callback(
                        'v2_playbook_on_no_hosts_remaining')
                    result = False
                    break

                try:
                    included_files = IncludedFile.process_include_results(
                        host_results,
                        self._tqm,
                        iterator=iterator,
                        loader=self._loader,
                        variable_manager=self._variable_manager)
                except AnsibleError, e:
                    return False

                if len(included_files) > 0:
                    noop_task = Task()
                    noop_task.action = 'meta'
                    noop_task.args['_raw_params'] = 'noop'
                    noop_task.set_loader(iterator._play._loader)

                    all_blocks = dict((host, []) for host in hosts_left)
                    for included_file in included_files:
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(
                                included_file, iterator=iterator)
                        except AnsibleError, e:
                            for host in included_file._hosts:
                                iterator.mark_host_failed(host)
                            self._display.warning(str(e))
                            continue

                        for new_block in new_blocks:
                            noop_block = Block(parent_block=task._block)
                            noop_block.block = [
                                noop_task for t in new_block.block
                            ]
                            noop_block.always = [
                                noop_task for t in new_block.always
                            ]
                            noop_block.rescue = [
                                noop_task for t in new_block.rescue
                            ]
                            for host in hosts_left:
                                if host in included_file._hosts:
                                    task_vars = self._variable_manager.get_vars(
                                        loader=self._loader,
                                        play=iterator._play,
                                        host=host,
                                        task=included_file._task)
                                    final_block = new_block.filter_tagged_tasks(
                                        play_context, task_vars)
                                    all_blocks[host].append(final_block)
                                else:
                                    all_blocks[host].append(noop_block)

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                self._display.debug("results queue empty")
Пример #32
0
def load_list_of_tasks(ds,
                       play,
                       block=None,
                       role=None,
                       task_include=None,
                       use_handlers=False,
                       variable_manager=None,
                       loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task
    from ansible.playbook.task_include import TaskInclude
    from ansible.template import Templar

    assert isinstance(ds, list)

    task_list = []
    for task_ds in ds:
        assert isinstance(task_ds, dict)

        if 'block' in task_ds:
            t = Block.load(
                task_ds,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            task_list.append(t)
        else:
            if 'include' in task_ds:
                t = TaskInclude.load(task_ds,
                                     block=block,
                                     role=role,
                                     task_include=task_include,
                                     variable_manager=variable_manager,
                                     loader=loader)

                all_vars = variable_manager.get_vars(loader=loader,
                                                     play=play,
                                                     task=t)
                templar = Templar(loader=loader, variables=all_vars)

                # check to see if this include is static, which can be true if:
                # 1. the user set the 'static' option to true
                # 2. one of the appropriate config options was set
                # 3. the included file name contains no variables, and has no loop
                is_static = t.static or \
                            C.DEFAULT_TASK_INCLUDES_STATIC or \
                            (use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
                            not templar._contains_vars(t.args.get('_raw_params')) and t.loop is None

                if is_static:
                    if t.loop is not None:
                        raise AnsibleParserError(
                            "You cannot use 'static' on an include with a loop",
                            obj=task_ds)

                    # FIXME: all of this code is very similar (if not identical) to that in
                    #        plugins/strategy/__init__.py, and should be unified to avoid
                    #        patches only being applied to one or the other location
                    if task_include:
                        # handle relative includes by walking up the list of parent include
                        # tasks and checking the relative result to see if it exists
                        parent_include = task_include
                        cumulative_path = None
                        while parent_include is not None:
                            parent_include_dir = templar.template(
                                os.path.dirname(
                                    parent_include.args.get('_raw_params')))
                            if cumulative_path is None:
                                cumulative_path = parent_include_dir
                            elif not os.path.isabs(cumulative_path):
                                cumulative_path = os.path.join(
                                    parent_include_dir, cumulative_path)
                            include_target = templar.template(
                                t.args['_raw_params'])
                            if t._role:
                                new_basedir = os.path.join(
                                    t._role._role_path, 'tasks',
                                    cumulative_path)
                                include_file = loader.path_dwim_relative(
                                    new_basedir, 'tasks', include_target)
                            else:
                                include_file = loader.path_dwim_relative(
                                    loader.get_basedir(), cumulative_path,
                                    include_target)

                            if os.path.exists(include_file):
                                break
                            else:
                                parent_include = parent_include._task_include
                    else:
                        try:
                            include_target = templar.template(
                                t.args['_raw_params'])
                        except AnsibleUndefinedVariable as e:
                            raise AnsibleParserError(
                                      "Error when evaluating variable in include name: %s.\n\n" \
                                      "When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n" \
                                      "or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n" \
                                      "sources like group or host vars." % t.args['_raw_params'],
                                      obj=task_ds,
                                      suppress_extended_error=True,
                                  )
                        if t._role:
                            if use_handlers:
                                include_file = loader.path_dwim_relative(
                                    t._role._role_path, 'handlers',
                                    include_target)
                            else:
                                include_file = loader.path_dwim_relative(
                                    t._role._role_path, 'tasks',
                                    include_target)
                        else:
                            include_file = loader.path_dwim(include_target)

                    data = loader.load_from_file(include_file)
                    if data is None:
                        return []
                    elif not isinstance(data, list):
                        raise AnsibleError(
                            "included task files must contain a list of tasks",
                            obj=data)

                    included_blocks = load_list_of_blocks(
                        data,
                        play=play,
                        parent_block=block,
                        task_include=t,
                        role=role,
                        use_handlers=use_handlers,
                        loader=loader,
                        variable_manager=variable_manager,
                    )

                    # Remove the raw params field from the module args, so it won't show up
                    # later when getting the vars for this task/childen
                    t.args.pop('_raw_params', None)

                    # pop tags out of the include args, if they were specified there, and assign
                    # them to the include. If the include already had tags specified, we raise an
                    # error so that users know not to specify them both ways
                    tags = t.vars.pop('tags', [])
                    if isinstance(tags, string_types):
                        tags = tags.split(',')

                    if len(tags) > 0:
                        if len(t.tags) > 0:
                            raise AnsibleParserError(
                                "Include tasks should not specify tags in more than one way (both via args and directly on the task)." \
                                " Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
                                obj=task_ds,
                                suppress_extended_error=True,
                            )
                        display.deprecated(
                            "You should not specify tags in the include parameters. All tags should be specified using the task-level option"
                        )
                    else:
                        tags = t.tags[:]

                    # now we extend the tags on each of the included blocks
                    for b in included_blocks:
                        b.tags = list(set(b.tags).union(tags))
                    # END FIXME

                    # FIXME: send callback here somehow...
                    # FIXME: handlers shouldn't need this special handling, but do
                    #        right now because they don't iterate blocks correctly
                    if use_handlers:
                        for b in included_blocks:
                            task_list.extend(b.block)
                    else:
                        task_list.extend(included_blocks)
                else:
                    task_list.append(t)
            elif use_handlers:
                t = Handler.load(task_ds,
                                 block=block,
                                 role=role,
                                 task_include=task_include,
                                 variable_manager=variable_manager,
                                 loader=loader)
                task_list.append(t)
            else:
                t = Task.load(task_ds,
                              block=block,
                              role=role,
                              task_include=task_include,
                              variable_manager=variable_manager,
                              loader=loader)
                task_list.append(t)

    return task_list
 def test_load_implicit_block(self):
     ds = [dict(action='foo')]
     b = Block.load(ds)
     self.assertEqual(len(b.block), 1)
     self.assertIsInstance(b.block[0], Task)
Пример #34
0
def load_list_of_tasks(ds,
                       play,
                       block=None,
                       role=None,
                       task_include=None,
                       use_handlers=False,
                       variable_manager=None,
                       loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task

    if not isinstance(ds, list):
        raise AnsibleAssertionError(
            'The ds (%s) should be a list but was a %s' % (ds, type(ds)))

    task_list = []
    for task_ds in ds:
        if not isinstance(task_ds, dict):
            raise AnsibleAssertionError(
                'The ds (%s) should be a dict but was a %s' % (ds, type(ds)))

        if 'block' in task_ds:
            t = Block.load(
                task_ds,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            task_list.append(t)
        else:
            args_parser = ModuleArgsParser(task_ds)

            try:
                (action, args,
                 delegate_to) = args_parser.parse(skip_action_validation=True)
            except AnsibleParserError as e:
                # if the raises exception was created with obj=ds args, then it includes the detail
                # so we dont need to add it so we can just re raise.
                if e._obj:
                    raise
                # But if it wasn't, we can add the yaml object now to get more detail
                raise AnsibleParserError(to_native(e), obj=task_ds, orig_exc=e)

            if action in ('include', 'import_tasks', 'include_tasks'):
                task_list = _list_action_in_task(
                    task_ds,
                    play,
                    action,
                    task_list,
                    block=block,
                    role=role,
                    use_handlers=use_handlers,
                    variable_manager=variable_manager,
                    loader=loader)

            elif action in ('include_role', 'import_role'):
                task_list = _list_action_in_role(
                    task_ds,
                    play,
                    action,
                    task_list,
                    block=block,
                    role=role,
                    variable_manager=variable_manager,
                    loader=loader)

            else:
                if use_handlers:
                    t = Handler.load(task_ds,
                                     block=block,
                                     role=role,
                                     task_include=task_include,
                                     variable_manager=variable_manager,
                                     loader=loader)
                else:
                    t = Task.load(task_ds,
                                  block=block,
                                  role=role,
                                  task_include=task_include,
                                  variable_manager=variable_manager,
                                  loader=loader)

                task_list.append(t)

    return task_list
Пример #35
0
    def run(self, iterator, play_context):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iteratate over each task, while there is one left to run
        result = True
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                display.debug("getting the remaining hosts for this loop")
                hosts_left = [
                    host
                    for host in self._inventory.get_hosts(iterator._play.hosts)
                    if host.name not in self._tqm._unreachable_hosts
                ]
                display.debug("done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                # skip control
                skip_rest = False
                choose_step = True

                # flag set if task is set to any_errors_fatal
                any_errors_fatal = False

                results = []
                for (host, task) in host_tasks:
                    if not task:
                        continue

                    if self._tqm._terminated:
                        break

                    run_once = False
                    work_to_do = True

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action,
                                                   class_only=True)
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        action = None

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run(host):
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            display.debug(
                                "'%s' skipped because role has already run" %
                                task)
                            continue

                    if task.action == 'meta':
                        self._execute_meta(task, play_context, iterator)
                    else:
                        # handle step if needed, skip meta actions as they are used internally
                        if self._step and choose_step:
                            if self._take_step(task):
                                choose_step = False
                            else:
                                skip_rest = True
                                break

                        display.debug("getting variables")
                        task_vars = self._variable_manager.get_vars(
                            loader=self._loader,
                            play=iterator._play,
                            host=host,
                            task=task)
                        self.add_tqm_variables(task_vars, play=iterator._play)
                        templar = Templar(loader=self._loader,
                                          variables=task_vars)
                        display.debug("done getting variables")

                        run_once = templar.template(
                            task.run_once) or action and getattr(
                                action, 'BYPASS_HOST_LOOP', False)

                        if (task.any_errors_fatal
                                or run_once) and not task.ignore_errors:
                            any_errors_fatal = True

                        if not callback_sent:
                            display.debug(
                                "sending task start callback, copying the task so we can template it temporarily"
                            )
                            saved_name = task.name
                            display.debug(
                                "done copying, going to template now")
                            try:
                                task.name = text_type(
                                    templar.template(task.name,
                                                     fail_on_undefined=False))
                                display.debug("done templating")
                            except:
                                # just ignore any errors during task name templating,
                                # we don't care if it just shows the raw name
                                display.debug(
                                    "templating failed for some reason")
                                pass
                            display.debug("here goes the callback...")
                            self._tqm.send_callback(
                                'v2_playbook_on_task_start',
                                task,
                                is_conditional=False)
                            task.name = saved_name
                            callback_sent = True
                            display.debug("sending task start callback")

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars, play_context)

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                    results += self._process_pending_results(iterator,
                                                             one_pass=True)

                # go to next host/task group
                if skip_rest:
                    continue

                display.debug(
                    "done queuing things up, now waiting for results queue to drain"
                )
                results += self._wait_on_pending_results(iterator)
                host_results.extend(results)

                if not work_to_do and len(iterator.get_failed_hosts()) > 0:
                    display.debug("out of hosts to run on")
                    self._tqm.send_callback(
                        'v2_playbook_on_no_hosts_remaining')
                    result = self._tqm.RUN_ERROR
                    break

                try:
                    included_files = IncludedFile.process_include_results(
                        host_results,
                        self._tqm,
                        iterator=iterator,
                        inventory=self._inventory,
                        loader=self._loader,
                        variable_manager=self._variable_manager)
                except AnsibleError as e:
                    return self._tqm.RUN_ERROR

                include_failure = False
                if len(included_files) > 0:
                    display.debug("we have included files to process")
                    noop_task = Task()
                    noop_task.action = 'meta'
                    noop_task.args['_raw_params'] = 'noop'
                    noop_task.set_loader(iterator._play._loader)

                    display.debug("generating all_blocks data")
                    all_blocks = dict((host, []) for host in hosts_left)
                    display.debug("done generating all_blocks data")
                    for included_file in included_files:
                        display.debug("processing included file: %s" %
                                      included_file._filename)
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(
                                included_file, iterator=iterator)

                            display.debug(
                                "iterating over new_blocks loaded from include file"
                            )
                            for new_block in new_blocks:
                                task_vars = self._variable_manager.get_vars(
                                    loader=self._loader,
                                    play=iterator._play,
                                    task=included_file._task,
                                )
                                display.debug("filtering new block on tags")
                                final_block = new_block.filter_tagged_tasks(
                                    play_context, task_vars)
                                display.debug(
                                    "done filtering new block on tags")

                                noop_block = Block(parent_block=task._block)
                                noop_block.block = [
                                    noop_task for t in new_block.block
                                ]
                                noop_block.always = [
                                    noop_task for t in new_block.always
                                ]
                                noop_block.rescue = [
                                    noop_task for t in new_block.rescue
                                ]

                                for host in hosts_left:
                                    if host in included_file._hosts:
                                        all_blocks[host].append(final_block)
                                    else:
                                        all_blocks[host].append(noop_block)
                            display.debug(
                                "done iterating over new_blocks loaded from include file"
                            )

                        except AnsibleError as e:
                            for host in included_file._hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                            display.error(to_unicode(e), wrap_text=False)
                            include_failure = True
                            continue

                    # finally go through all of the hosts and append the
                    # accumulated blocks to their list of tasks
                    display.debug(
                        "extending task lists for all hosts with included blocks"
                    )

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                    display.debug("done extending task lists")
                    display.debug("done processing included files")

                display.debug("results queue empty")

                display.debug("checking for any_errors_fatal")
                failed_hosts = []
                unreachable_hosts = []
                for res in results:
                    if res.is_failed():
                        failed_hosts.append(res._host.name)
                    elif res.is_unreachable():
                        unreachable_hosts.append(res._host.name)

                # if any_errors_fatal and we had an error, mark all hosts as failed
                if any_errors_fatal and (len(failed_hosts) > 0
                                         or len(unreachable_hosts) > 0):
                    for host in hosts_left:
                        # don't double-mark hosts, or the iterator will potentially
                        # fail them out of the rescue/always states
                        if host.name not in failed_hosts:
                            self._tqm._failed_hosts[host.name] = True
                            iterator.mark_host_failed(host)
                    self._tqm.send_callback(
                        'v2_playbook_on_no_hosts_remaining')
                    return self._tqm.RUN_FAILED_BREAK_PLAY
                display.debug("done checking for any_errors_fatal")

                display.debug("checking for max_fail_percentage")
                if iterator._play.max_fail_percentage is not None and len(
                        results) > 0:
                    percentage = iterator._play.max_fail_percentage / 100.0

                    if (len(self._tqm._failed_hosts) /
                            len(results)) > percentage:
                        for host in hosts_left:
                            # don't double-mark hosts, or the iterator will potentially
                            # fail them out of the rescue/always states
                            if host.name not in failed_hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                        self._tqm.send_callback(
                            'v2_playbook_on_no_hosts_remaining')
                        return self._tqm.RUN_FAILED_BREAK_PLAY
                display.debug("done checking for max_fail_percentage")

            except (IOError, EOFError) as e:
                display.debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return self._tqm.RUN_UNKNOWN_ERROR

        # run the base class run() method, which executes the cleanup function
        # and runs any outstanding handlers which have been triggered

        return super(StrategyModule, self).run(iterator, play_context, result)
Пример #36
0
    def run(self, iterator, play_context):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iteratate over each task, while there is one left to run
        result = self._tqm.RUN_OK
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                display.debug("getting the remaining hosts for this loop")
                hosts_left = self.get_hosts_left(iterator)
                display.debug("done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                # skip control
                skip_rest = False
                choose_step = True

                # flag set if task is set to any_errors_fatal
                any_errors_fatal = False

                succeeded_hosts = {}
                retry_task = True

                while retry_task:

                    results = []
                    for (host, task) in host_tasks:
                        if not task:
                            continue

                        if self._tqm._terminated:
                            break

                        run_once = False
                        work_to_do = True

                        # test to see if the task across all hosts points to an action plugin which
                        # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                        # will only send this task to the first host in the list.

                        try:
                            action = action_loader.get(task.action,
                                                       class_only=True)
                        except KeyError:
                            # we don't care here, because the action may simply not have a
                            # corresponding action plugin
                            action = None

                        # check to see if this task should be skipped, due to it being a member of a
                        # role which has already run (and whether that role allows duplicate execution)
                        if task._role and task._role.has_run(host):
                            # If there is no metadata, the default behavior is to not allow duplicates,
                            # if there is metadata, check to see if the allow_duplicates flag was set to true
                            if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                                display.debug(
                                    "'%s' skipped because role has already run"
                                    % task)
                                continue

                        if task.action == 'meta':
                            # for the linear strategy, we run meta tasks just once and for
                            # all hosts currently being iterated over rather than one host
                            results.extend(
                                self._execute_meta(task, play_context,
                                                   iterator, host))
                            if task.args.get('_raw_params', None) != 'noop':
                                run_once = True
                        else:
                            # handle step if needed, skip meta actions as they are used internally
                            if self._step and choose_step:
                                if self._take_step(task):
                                    choose_step = False
                                else:
                                    skip_rest = True
                                    break

                            display.debug("getting variables")
                            task_vars = self._variable_manager.get_vars(
                                play=iterator._play, host=host, task=task)
                            self.add_tqm_variables(task_vars,
                                                   play=iterator._play)
                            templar = Templar(loader=self._loader,
                                              variables=task_vars)
                            display.debug("done getting variables")

                            run_once = templar.template(
                                task.run_once) or action and getattr(
                                    action, 'BYPASS_HOST_LOOP', False)

                            if (task.any_errors_fatal
                                    or run_once) and not task.ignore_errors:
                                any_errors_fatal = True

                            if not callback_sent:
                                display.debug(
                                    "sending task start callback, copying the task so we can template it temporarily"
                                )
                                saved_name = task.name
                                display.debug(
                                    "done copying, going to template now")
                                try:
                                    task.name = to_text(templar.template(
                                        task.name, fail_on_undefined=False),
                                                        nonstring='empty')
                                    display.debug("done templating")
                                except:
                                    # just ignore any errors during task name templating,
                                    # we don't care if it just shows the raw name
                                    display.debug(
                                        "templating failed for some reason")
                                    pass
                                display.debug("here goes the callback...")
                                self._tqm.send_callback(
                                    'v2_playbook_on_task_start',
                                    task,
                                    is_conditional=False)
                                task.name = saved_name
                                callback_sent = True
                                display.debug("sending task start callback")

                            if host not in succeeded_hosts:
                                self._blocked_hosts[host.get_name()] = True
                                self._queue_task(host, task, task_vars,
                                                 play_context)
                            del task_vars

                        # if we're bypassing the host loop, break out now
                        if run_once:
                            break

                        results += self._process_pending_results(
                            iterator,
                            max_passes=max(1,
                                           int(len(self._tqm._workers) * 0.1)))

                    # go to next host/task group
                    if skip_rest:
                        continue

                    display.debug(
                        "done queuing things up, now waiting for results queue to drain"
                    )
                    if self._pending_results > 0:
                        results += self._wait_on_pending_results(iterator)

                    host_results.extend(results)

                    try:
                        included_files = IncludedFile.process_include_results(
                            host_results,
                            self._tqm,
                            iterator=iterator,
                            inventory=self._inventory,
                            loader=self._loader,
                            variable_manager=self._variable_manager)
                    except AnsibleError as e:
                        # this is a fatal error, so we abort here regardless of block state
                        return self._tqm.RUN_ERROR

                    include_failure = False
                    if len(included_files) > 0:
                        display.debug("we have included files to process")

                        # A noop task for use in padding dynamic includes
                        noop_task = Task()
                        noop_task.action = 'meta'
                        noop_task.args['_raw_params'] = 'noop'
                        noop_task.set_loader(iterator._play._loader)

                        display.debug("generating all_blocks data")
                        all_blocks = dict((host, []) for host in hosts_left)
                        display.debug("done generating all_blocks data")
                        for included_file in included_files:
                            display.debug("processing included file: %s" %
                                          included_file._filename)
                            # included hosts get the task list while those excluded get an equal-length
                            # list of noop tasks, to make sure that they continue running in lock-step
                            try:
                                if included_file._is_role:
                                    new_ir = included_file._task.copy()
                                    new_ir.vars.update(included_file._args)

                                    new_blocks, handler_blocks = new_ir.get_block_list(
                                        play=iterator._play,
                                        variable_manager=self.
                                        _variable_manager,
                                        loader=self._loader,
                                    )
                                    self._tqm.update_handler_list([
                                        handler
                                        for handler_block in handler_blocks
                                        for handler in handler_block.block
                                    ])
                                else:
                                    new_blocks = self._load_included_file(
                                        included_file, iterator=iterator)

                                display.debug(
                                    "iterating over new_blocks loaded from include file"
                                )
                                for new_block in new_blocks:
                                    task_vars = self._variable_manager.get_vars(
                                        play=iterator._play,
                                        task=included_file._task,
                                    )
                                    display.debug(
                                        "filtering new block on tags")
                                    final_block = new_block.filter_tagged_tasks(
                                        play_context, task_vars)
                                    display.debug(
                                        "done filtering new block on tags")

                                    noop_block = Block(
                                        parent_block=task._parent)
                                    noop_block.block = [
                                        noop_task for t in new_block.block
                                    ]
                                    noop_block.always = [
                                        noop_task for t in new_block.always
                                    ]
                                    noop_block.rescue = [
                                        noop_task for t in new_block.rescue
                                    ]

                                    for host in hosts_left:
                                        if host in included_file._hosts:
                                            all_blocks[host].append(
                                                final_block)
                                        else:
                                            all_blocks[host].append(noop_block)
                                display.debug(
                                    "done iterating over new_blocks loaded from include file"
                                )

                            except AnsibleError as e:
                                for host in included_file._hosts:
                                    self._tqm._failed_hosts[host.name] = True
                                    iterator.mark_host_failed(host)
                                display.error(to_text(e), wrap_text=False)
                                include_failure = True
                                continue

                        # finally go through all of the hosts and append the
                        # accumulated blocks to their list of tasks
                        display.debug(
                            "extending task lists for all hosts with included blocks"
                        )

                        for host in hosts_left:
                            iterator.add_tasks(host, all_blocks[host])

                        display.debug("done extending task lists")
                        display.debug("done processing included files")

                    display.debug("results queue empty")

                    display.debug("checking for any_errors_fatal")
                    failed_hosts = []
                    unreachable_hosts = []
                    for res in results:
                        if res.is_failed() and iterator.is_failed(res._host):
                            failed_hosts.append(res._host.name)
                        elif res.is_unreachable():
                            unreachable_hosts.append(res._host.name)
                        else:
                            succeeded_hosts[res._host] = res

                    if (len(failed_hosts) == 0):
                        # No more failures means we do not need to retry anymore.
                        retry_task = False
                    else:

                        msg = 'Host failed, ignore/retry/abort/debug: %s on [%s] (i/r/a/d): ' % (
                            str(task), ' '.join(failed_hosts)) + '\n'

                        resp = 'bogus'
                        while resp.strip().lower()[:1] not in [
                                'i', 'r', 'a', 'd'
                        ]:
                            resp = display.prompt(msg)

                        # Currently the only difference between ignore and repeat is setting the retry_task to 'False'
                        if resp.strip().lower() in ['i', 'ignore']:
                            retry_task = False

                        if resp.strip().lower() in [
                                'r', 'repeat', 'i', 'ignore'
                        ]:
                            # We need to revert the internal failed host states if we want to ignore the errors
                            # The internal state should be identical to that of a successful host
                            for failed_host in failed_hosts:
                                iterator._host_states[
                                    failed_host].run_state = PlayIterator.ITERATING_TASKS
                                iterator._host_states[
                                    failed_host].fail_state = PlayIterator.FAILED_NONE

                                # Fix child states as well
                                if iterator._host_states[
                                        failed_host].tasks_child_state:
                                    iterator._host_states[
                                        failed_host].tasks_child_state.run_state = PlayIterator.ITERATING_TASKS
                                    iterator._host_states[
                                        failed_host].tasks_child_state.fail_state = PlayIterator.FAILED_NONE
                            self._tqm.clear_failed_hosts()

                        elif resp.strip().lower() in ['a', 'abort']:
                            retry_task = False

                            for host in hosts_left:
                                (s, _) = iterator.get_next_task_for_host(
                                    host, peek=True)
                                if s.run_state != iterator.ITERATING_RESCUE or \
                                   s.run_state == iterator.ITERATING_RESCUE and s.fail_state & iterator.FAILED_RESCUE != 0:
                                    self._tqm._failed_hosts[host.name] = True
                                    result |= self._tqm.RUN_FAILED_BREAK_PLAY

                                # don't double-mark hosts, or the iterator will potentially
                                # fail them out of the rescue/always states
                                if host.name not in failed_hosts:
                                    self._tqm._failed_hosts[host.name] = True
                                    iterator.mark_host_failed(host)
                            self._tqm.send_callback(
                                'v2_playbook_on_no_hosts_remaining')
                            result |= self._tqm.RUN_FAILED_BREAK_PLAY

                        elif resp.strip().lower() in ['d', 'debug']:

                            msg = "Running PDB.  Refer to https://pymotw.com/2/pdb/ for instruction.\n\n"
                            msg += "Commmon commands:\n\n"
                            msg += "Print the task's error message: print(failure_debug_message)\n"
                            msg += 'Add a variable to a host: host_to_edit = hosts_left[0]; task._variable_manager.set_host_variable(host_to_edit,"new_var_key","new_var_value")\n'
                            msg += 'Print task vars: print(task_vars["var_key_name"])\n'

                            display.display(msg,
                                            color='blue',
                                            stderr=False,
                                            screen_only=False,
                                            log_only=False)
                            pdb.set_trace()

            except (IOError, EOFError) as e:
                display.debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return self._tqm.RUN_UNKNOWN_ERROR

        # run the base class run() method, which executes the cleanup function
        # and runs any outstanding handlers which have been triggered

        return super(StrategyModule, self).run(iterator, play_context, result)
Пример #37
0
    def __init__(self,
                 inventory,
                 play,
                 play_context,
                 variable_manager,
                 all_vars,
                 start_at_done=False):
        self._play = play
        self._blocks = []
        self._variable_manager = variable_manager

        setup_block = Block(play=self._play)
        # Gathering facts with run_once would copy the facts from one host to
        # the others.
        setup_block.run_once = False
        setup_task = Task(block=setup_block)
        setup_task.action = 'gather_facts'
        # TODO: hardcoded resolution here, but should use actual resolution code in the end,
        #       in case of 'legacy' mismatch
        setup_task.resolved_action = 'ansible.builtin.gather_facts'
        setup_task.name = 'Gathering Facts'
        setup_task.args = {}

        # Unless play is specifically tagged, gathering should 'always' run
        if not self._play.tags:
            setup_task.tags = ['always']

        # Default options to gather
        for option in ('gather_subset', 'gather_timeout', 'fact_path'):
            value = getattr(self._play, option, None)
            if value is not None:
                setup_task.args[option] = value

        setup_task.set_loader(self._play._loader)
        # short circuit fact gathering if the entire playbook is conditional
        if self._play._included_conditional is not None:
            setup_task.when = self._play._included_conditional[:]
        setup_block.block = [setup_task]

        setup_block = setup_block.filter_tagged_tasks(all_vars)
        self._blocks.append(setup_block)

        # keep flatten (no blocks) list of all tasks from the play
        # used for the lockstep mechanism in the linear strategy
        self.all_tasks = setup_block.get_tasks()

        for block in self._play.compile():
            new_block = block.filter_tagged_tasks(all_vars)
            if new_block.has_tasks():
                self._blocks.append(new_block)
                self.all_tasks.extend(new_block.get_tasks())

        # keep list of all handlers, it is copied into each HostState
        # at the beginning of IteratingStates.HANDLERS
        # the copy happens at each flush in order to restore the original
        # list and remove any included handlers that might not be notified
        # at the particular flush
        self.handlers = [h for b in self._play.handlers for h in b.block]

        self._host_states = {}
        start_at_matched = False
        batch = inventory.get_hosts(self._play.hosts, order=self._play.order)
        self.batch_size = len(batch)
        for host in batch:
            self.set_state_for_host(host.name, HostState(blocks=self._blocks))
            # if we're looking to start at a specific task, iterate through
            # the tasks for this host until we find the specified task
            if play_context.start_at_task is not None and not start_at_done:
                while True:
                    (s, task) = self.get_next_task_for_host(host, peek=True)
                    if s.run_state == IteratingStates.COMPLETE:
                        break
                    if task.name == play_context.start_at_task or (task.name and fnmatch.fnmatch(task.name, play_context.start_at_task)) or \
                       task.get_name() == play_context.start_at_task or fnmatch.fnmatch(task.get_name(), play_context.start_at_task):
                        start_at_matched = True
                        break
                    self.set_state_for_host(host.name, s)

                # finally, reset the host's state to IteratingStates.SETUP
                if start_at_matched:
                    self._host_states[host.name].did_start_at_task = True
                    self._host_states[
                        host.name].run_state = IteratingStates.SETUP

        if start_at_matched:
            # we have our match, so clear the start_at_task field on the
            # play context to flag that we've started at a task (and future
            # plays won't try to advance)
            play_context.start_at_task = None

        self.end_play = False
        self.cur_task = 0
Пример #38
0
    def run(self, iterator, connection_info):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iteratate over each task, while there is one left to run
        result     = True
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                debug("getting the remaining hosts for this loop")
                hosts_left = self._inventory.get_hosts(iterator._play.hosts)
                debug("done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                for (host, task) in host_tasks:
                    if not task:
                        continue

                    run_once = False
                    work_to_do = True

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action, class_only=True)
                        if task.run_once or getattr(action, 'BYPASS_HOST_LOOP', False):
                            run_once = True
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        pass

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run():
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            debug("'%s' skipped because role has already run" % task)
                            continue

                    if task.action == 'meta':
                        # meta tasks store their args in the _raw_params field of args,
                        # since they do not use k=v pairs, so get that
                        meta_action = task.args.get('_raw_params')
                        if meta_action == 'noop':
                            # FIXME: issue a callback for the noop here?
                            continue
                        elif meta_action == 'flush_handlers':
                            self.run_handlers(iterator, connection_info)
                        else:
                            raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
                    else:
                        debug("getting variables")
                        task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=task)
                        task_vars = self.add_tqm_variables(task_vars, play=iterator._play)
                        templar = Templar(loader=self._loader, variables=task_vars)
                        debug("done getting variables")

                        if not callback_sent:
                            temp_task = task.copy()
                            temp_task.name = templar.template(temp_task.get_name(), fail_on_undefined=False)
                            self._tqm.send_callback('v2_playbook_on_task_start', temp_task, is_conditional=False)
                            callback_sent = True

                        self._blocked_hosts[host.get_name()] = True

                       #Code for Distributed ansible.
                        self._pending_results += 1
                        dtask = {}
                        (worker_prc, main_q, rslt_q) = self._workers[self._cur_worker]
                        t_uuid              = task._uuid
                        task._uuid          = None
                        dtask['host']       = jsonpickle.encode(host)
                        dtask['task_vars']  = jsonpickle.encode(task_vars)
                        dtask['task']       = jsonpickle.encode(task)
                        dtask['conn_info']  = jsonpickle.encode(connection_info)
                        dtask['base_dir']   = self._loader.get_basedir()
#                       dtask['uuid']       = t_uuid 
                        final_task          = json.dumps(dtask)
                        remote_task         = TaskRpcClient()
                        task_response        = remote_task.put(final_task)
                    
#                        self._queue_task(host, task, task_vars, connection_info)

                    task_result = json.loads(task_response)
                    rslt_q.put(jsonpickle.decode(task_result),block=False)
                    results = self._process_pending_results(iterator)
                    host_results.extend(results)

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                debug("done queuing things up, now waiting for results queue to drain")
                results = self._wait_on_pending_results(iterator)
                host_results.extend(results)

                if not work_to_do and len(iterator.get_failed_hosts()) > 0:
                    debug("out of hosts to run on")
                    self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
                    result = False
                    break

                try:
                    included_files = IncludedFile.process_include_results(host_results, self._tqm, iterator=iterator, loader=self._loader, variable_manager=self._variable_manager)
                except AnsibleError, e:
                    return False

                if len(included_files) > 0:
                    noop_task = Task()
                    noop_task.action = 'meta'
                    noop_task.args['_raw_params'] = 'noop'
                    noop_task.set_loader(iterator._play._loader)

                    all_blocks = dict((host, []) for host in hosts_left)
                    for included_file in included_files:
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(included_file, iterator=iterator)
                        except AnsibleError, e:
                            for host in included_file._hosts:
                                iterator.mark_host_failed(host)
                            # FIXME: callback here?
                            print(e)
                            continue

                        for new_block in new_blocks:
                            noop_block = Block(parent_block=task._block)
                            noop_block.block  = [noop_task for t in new_block.block]
                            noop_block.always = [noop_task for t in new_block.always]
                            noop_block.rescue = [noop_task for t in new_block.rescue]
                            for host in hosts_left:
                                if host in included_file._hosts:
                                    task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=included_file._task)
                                    final_block = new_block.filter_tagged_tasks(connection_info, task_vars)
                                    all_blocks[host].append(final_block)
                                else:
                                    all_blocks[host].append(noop_block)

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                debug("results queue empty")
Пример #39
0
    def run(self, iterator, play_context):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iteratate over each task, while there is one left to run
        result = self._tqm.RUN_OK
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                display.debug("getting the remaining hosts for this loop")
                hosts_left = [host for host in self._inventory.get_hosts(iterator._play.hosts) if host.name not in self._tqm._unreachable_hosts]
                display.debug("done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                # skip control
                skip_rest   = False
                choose_step = True

                # flag set if task is set to any_errors_fatal
                any_errors_fatal = False

                results = []
                for (host, task) in host_tasks:
                    if not task:
                        continue

                    if self._tqm._terminated:
                        break

                    run_once = False
                    work_to_do = True

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action, class_only=True)
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        action = None

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run(host):
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            display.debug("'%s' skipped because role has already run" % task)
                            continue

                    if task.action == 'meta':
                        # for the linear strategy, we run meta tasks just once and for
                        # all hosts currently being iterated over rather than one host
                        results.extend(self._execute_meta(task, play_context, iterator))
                        if task.args.get('_raw_params', None) != 'noop':
                            run_once = True
                    else:
                        # handle step if needed, skip meta actions as they are used internally
                        if self._step and choose_step:
                            if self._take_step(task):
                                choose_step = False
                            else:
                                skip_rest = True
                                break

                        display.debug("getting variables")
                        task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=task)
                        self.add_tqm_variables(task_vars, play=iterator._play)
                        templar = Templar(loader=self._loader, variables=task_vars)
                        display.debug("done getting variables")

                        run_once = templar.template(task.run_once) or action and getattr(action, 'BYPASS_HOST_LOOP', False)

                        if (task.any_errors_fatal or run_once) and not task.ignore_errors:
                            any_errors_fatal = True

                        if not callback_sent:
                            display.debug("sending task start callback, copying the task so we can template it temporarily")
                            saved_name = task.name
                            display.debug("done copying, going to template now")
                            try:
                                task.name = to_text(templar.template(task.name, fail_on_undefined=False), nonstring='empty')
                                display.debug("done templating")
                            except:
                                # just ignore any errors during task name templating,
                                # we don't care if it just shows the raw name
                                display.debug("templating failed for some reason")
                                pass
                            display.debug("here goes the callback...")
                            self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
                            task.name = saved_name
                            callback_sent = True
                            display.debug("sending task start callback")

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars, play_context)
                        del task_vars

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                    results += self._process_pending_results(iterator, max_passes=max(1, int(len(self._tqm._workers) * 0.1)))

                # go to next host/task group
                if skip_rest:
                    continue

                display.debug("done queuing things up, now waiting for results queue to drain")
                if self._pending_results > 0:
                    results += self._wait_on_pending_results(iterator)
                host_results.extend(results)

                all_role_blocks = []
                for hr in results:
                    # handle include_role
                    if hr._task.action == 'include_role':
                        loop_var = None
                        if hr._task.loop:
                            loop_var = 'item'
                            if hr._task.loop_control:
                                loop_var = hr._task.loop_control.loop_var or 'item'
                            include_results = hr._result.get('results', [])
                        else:
                            include_results = [ hr._result ]

                        for include_result in include_results:
                            if 'skipped' in include_result and include_result['skipped'] or 'failed' in include_result and include_result['failed']:
                                continue

                            display.debug("generating all_blocks data for role")
                            new_ir = hr._task.copy()
                            new_ir.vars.update(include_result.get('include_variables', dict()))
                            if loop_var and loop_var in include_result:
                                new_ir.vars[loop_var] = include_result[loop_var]

                            all_role_blocks.extend(new_ir.get_block_list(play=iterator._play, variable_manager=self._variable_manager, loader=self._loader))

                if len(all_role_blocks) > 0:
                    for host in hosts_left:
                        iterator.add_tasks(host, all_role_blocks)

                try:
                    included_files = IncludedFile.process_include_results(
                        host_results,
                        self._tqm,
                        iterator=iterator,
                        inventory=self._inventory,
                        loader=self._loader,
                        variable_manager=self._variable_manager
                    )
                except AnsibleError as e:
                    # this is a fatal error, so we abort here regardless of block state
                    return self._tqm.RUN_ERROR

                include_failure = False
                if len(included_files) > 0:
                    display.debug("we have included files to process")
                    noop_task = Task()
                    noop_task.action = 'meta'
                    noop_task.args['_raw_params'] = 'noop'
                    noop_task.set_loader(iterator._play._loader)

                    display.debug("generating all_blocks data")
                    all_blocks = dict((host, []) for host in hosts_left)
                    display.debug("done generating all_blocks data")
                    for included_file in included_files:
                        display.debug("processing included file: %s" % included_file._filename)
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(included_file, iterator=iterator)

                            display.debug("iterating over new_blocks loaded from include file")
                            for new_block in new_blocks:
                                task_vars = self._variable_manager.get_vars(
                                    loader=self._loader,
                                    play=iterator._play,
                                    task=included_file._task,
                                )
                                display.debug("filtering new block on tags")
                                final_block = new_block.filter_tagged_tasks(play_context, task_vars)
                                display.debug("done filtering new block on tags")

                                noop_block = Block(parent_block=task._parent)
                                noop_block.block  = [noop_task for t in new_block.block]
                                noop_block.always = [noop_task for t in new_block.always]
                                noop_block.rescue = [noop_task for t in new_block.rescue]

                                for host in hosts_left:
                                    if host in included_file._hosts:
                                        all_blocks[host].append(final_block)
                                    else:
                                        all_blocks[host].append(noop_block)
                            display.debug("done iterating over new_blocks loaded from include file")

                        except AnsibleError as e:
                            for host in included_file._hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                            display.error(to_text(e), wrap_text=False)
                            include_failure = True
                            continue

                    # finally go through all of the hosts and append the
                    # accumulated blocks to their list of tasks
                    display.debug("extending task lists for all hosts with included blocks")

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                    display.debug("done extending task lists")
                    display.debug("done processing included files")

                display.debug("results queue empty")

                display.debug("checking for any_errors_fatal")
                failed_hosts = []
                unreachable_hosts = []
                for res in results:
                    if res.is_failed():
                        failed_hosts.append(res._host.name)
                    elif res.is_unreachable():
                        unreachable_hosts.append(res._host.name)

                # if any_errors_fatal and we had an error, mark all hosts as failed
                if any_errors_fatal and (len(failed_hosts) > 0 or len(unreachable_hosts) > 0):
                    for host in hosts_left:
                        (s, _) = iterator.get_next_task_for_host(host, peek=True)
                        if s.run_state != iterator.ITERATING_RESCUE or \
                           s.run_state == iterator.ITERATING_RESCUE and s.fail_state & iterator.FAILED_RESCUE != 0:
                            self._tqm._failed_hosts[host.name] = True
                            result |= self._tqm.RUN_FAILED_BREAK_PLAY
                display.debug("done checking for any_errors_fatal")

                display.debug("checking for max_fail_percentage")
                if iterator._play.max_fail_percentage is not None and len(results) > 0:
                    percentage = iterator._play.max_fail_percentage / 100.0

                    if (len(self._tqm._failed_hosts) / len(results)) > percentage:
                        for host in hosts_left:
                            # don't double-mark hosts, or the iterator will potentially
                            # fail them out of the rescue/always states
                            if host.name not in failed_hosts:
                                self._tqm._failed_hosts[host.name] = True
                                iterator.mark_host_failed(host)
                        self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
                        result |= self._tqm.RUN_FAILED_BREAK_PLAY
                display.debug("done checking for max_fail_percentage")

                display.debug("checking to see if all hosts have failed and the running result is not ok")
                if result != self._tqm.RUN_OK and len(self._tqm._failed_hosts) >= len(hosts_left):
                    display.debug("^ not ok, so returning result now")
                    self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
                    return result
                display.debug("done checking to see if all hosts have failed")

            except (IOError, EOFError) as e:
                display.debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return self._tqm.RUN_UNKNOWN_ERROR

        # run the base class run() method, which executes the cleanup function
        # and runs any outstanding handlers which have been triggered

        return super(StrategyModule, self).run(iterator, play_context, result)
Пример #40
0
    def __init__(self, inventory, play, play_context, variable_manager, all_vars, start_at_done=False):
        self._play = play
        self._blocks = []
        self._variable_manager = variable_manager

        self._task_uuid_cache = dict()

        # Default options to gather
        gather_subset = play_context.gather_subset
        gather_timeout = play_context.gather_timeout
        fact_path = play_context.fact_path

        # Retrieve subset to gather
        if self._play.gather_subset is not None:
            gather_subset = self._play.gather_subset
        # Retrieve timeout for gather
        if self._play.gather_timeout is not None:
            gather_timeout = self._play.gather_timeout
        # Retrieve fact_path
        if self._play.fact_path is not None:
            fact_path = self._play.fact_path

        setup_block = Block(play=self._play)
        setup_task = Task(block=setup_block)
        setup_task.action = 'setup'
        setup_task.name = 'Gathering Facts'
        setup_task.tags = ['always']
        setup_task.args = {
            'gather_subset': gather_subset,
        }
        if gather_timeout:
            setup_task.args['gather_timeout'] = gather_timeout
        if fact_path:
            setup_task.args['fact_path'] = fact_path
        setup_task.set_loader(self._play._loader)
        # short circuit fact gathering if the entire playbook is conditional
        if self._play._included_conditional is not None:
            setup_task.when = self._play._included_conditional[:]
        setup_block.block = [setup_task]

        setup_block = setup_block.filter_tagged_tasks(play_context, all_vars)
        self._blocks.append(setup_block)
        self.cache_block_tasks(setup_block)

        for block in self._play.compile():
            new_block = block.filter_tagged_tasks(play_context, all_vars)
            if new_block.has_tasks():
                self.cache_block_tasks(new_block)
                self._blocks.append(new_block)

        for handler_block in self._play.handlers:
            self.cache_block_tasks(handler_block)

        self._host_states = {}
        start_at_matched = False
        for host in inventory.get_hosts(self._play.hosts):
            self._host_states[host.name] = HostState(blocks=self._blocks)
            # if we're looking to start at a specific task, iterate through
            # the tasks for this host until we find the specified task
            if play_context.start_at_task is not None and not start_at_done:
                while True:
                    (s, task) = self.get_next_task_for_host(host, peek=True)
                    if s.run_state == self.ITERATING_COMPLETE:
                        break
                    if task.name == play_context.start_at_task or fnmatch.fnmatch(task.name, play_context.start_at_task) or \
                       task.get_name() == play_context.start_at_task or fnmatch.fnmatch(task.get_name(), play_context.start_at_task):
                        start_at_matched = True
                        break
                    else:
                        self.get_next_task_for_host(host)

                # finally, reset the host's state to ITERATING_SETUP
                if start_at_matched:
                    self._host_states[host.name].did_start_at_task = True
                    self._host_states[host.name].run_state = self.ITERATING_SETUP

        if start_at_matched:
            # we have our match, so clear the start_at_task field on the
            # play context to flag that we've started at a task (and future
            # plays won't try to advance)
            play_context.start_at_task = None
Пример #41
0
    def __init__(self, inventory, play, play_context, variable_manager, all_vars, start_at_done=False):
        self._play = play
        self._blocks = []

        self._task_uuid_cache = dict()

        # Default options to gather
        gather_subset = C.DEFAULT_GATHER_SUBSET
        gather_timeout = C.DEFAULT_GATHER_TIMEOUT

        # Retrieve subset to gather
        if self._play.gather_subset is not None:
            gather_subset = self._play.gather_subset
        # Retrieve timeout for gather
        if self._play.gather_timeout is not None:
            gather_timeout = self._play.gather_timeout

        setup_block = Block(play=self._play)
        setup_task = Task(block=setup_block)
        setup_task.action = "setup"
        setup_task.name = "Gathering Facts"
        setup_task.tags = ["always"]
        setup_task.args = {"gather_subset": gather_subset}
        if gather_timeout:
            setup_task.args["gather_timeout"] = gather_timeout
        setup_task.set_loader(self._play._loader)
        setup_block.block = [setup_task]

        setup_block = setup_block.filter_tagged_tasks(play_context, all_vars)
        self._blocks.append(setup_block)
        self.cache_block_tasks(setup_block)

        for block in self._play.compile():
            new_block = block.filter_tagged_tasks(play_context, all_vars)
            if new_block.has_tasks():
                self.cache_block_tasks(new_block)
                self._blocks.append(new_block)

        for handler_block in self._play.handlers:
            self.cache_block_tasks(handler_block)

        self._host_states = {}
        start_at_matched = False
        for host in inventory.get_hosts(self._play.hosts):
            self._host_states[host.name] = HostState(blocks=self._blocks)
            # if the host's name is in the variable manager's fact cache, then set
            # its _gathered_facts flag to true for smart gathering tests later
            if host.name in variable_manager._fact_cache and variable_manager._fact_cache.get("module_setup", False):
                host._gathered_facts = True
            # if we're looking to start at a specific task, iterate through
            # the tasks for this host until we find the specified task
            if play_context.start_at_task is not None and not start_at_done:
                while True:
                    (s, task) = self.get_next_task_for_host(host, peek=True)
                    if s.run_state == self.ITERATING_COMPLETE:
                        break
                    if (
                        task.name == play_context.start_at_task
                        or fnmatch.fnmatch(task.name, play_context.start_at_task)
                        or task.get_name() == play_context.start_at_task
                        or fnmatch.fnmatch(task.get_name(), play_context.start_at_task)
                    ):
                        start_at_matched = True
                        break
                    else:
                        self.get_next_task_for_host(host)

                # finally, reset the host's state to ITERATING_SETUP
                if start_at_matched:
                    self._host_states[host.name].did_start_at_task = True
                    self._host_states[host.name].run_state = self.ITERATING_SETUP

        if start_at_matched:
            # we have our match, so clear the start_at_task field on the
            # play context to flag that we've started at a task (and future
            # plays won't try to advance)
            play_context.start_at_task = None
def test_collection_invalid_data_block():
    """Test that collection as a dict at the block level fails with parser error"""
    collection_name = {'name': 'foo'}
    with pytest.raises(AnsibleParserError):
        Block.load(
            dict(block=[dict(name="test task", collections=collection_name)]))
Пример #43
0
    def run(self, iterator, connection_info):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        result = True

        # iteratate over each task, while there is one left to run
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                debug("getting the remaining hosts for this loop")
                self._tqm._failed_hosts = iterator.get_failed_hosts()
                hosts_left = self.get_hosts_remaining(iterator._play)
                debug("done getting the remaining hosts for this loop")
                if len(hosts_left) == 0:
                    debug("out of hosts to run on")
                    self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
                    result = False
                    break

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)
                for (host, task) in host_tasks:
                    if not task:
                        continue

                    run_once = False
                    work_to_do = True

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action, class_only=True)
                        if task.run_once or getattr(action, 'BYPASS_HOST_LOOP', False):
                            run_once = True
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        pass

                    debug("getting variables")
                    task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=task)
                    debug("done getting variables")

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run():
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            debug("'%s' skipped because role has already run" % task)
                            continue

                    if task.action == 'meta':
                        # meta tasks store their args in the _raw_params field of args,
                        # since they do not use k=v pairs, so get that
                        meta_action = task.args.get('_raw_params')
                        if meta_action == 'noop':
                            # FIXME: issue a callback for the noop here?
                            continue
                        elif meta_action == 'flush_handlers':
                            self.run_handlers(iterator, connection_info)
                        else:
                            raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
                    else:
                        if not callback_sent:
                            self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
                            callback_sent = True

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars, connection_info)

                    results = self._process_pending_results(iterator)
                    host_results.extend(results)

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                debug("done queuing things up, now waiting for results queue to drain")
                results = self._wait_on_pending_results(iterator)
                host_results.extend(results)

                # FIXME: this needs to be somewhere else
                class IncludedFile:
                    def __init__(self, filename, args, task):
                        self._filename = filename
                        self._args     = args
                        self._task     = task
                        self._hosts    = []
                    def add_host(self, host):
                        if host not in self._hosts:
                            self._hosts.append(host)
                    def __eq__(self, other):
                        return other._filename == self._filename and other._args == self._args
                    def __repr__(self):
                        return "%s (%s): %s" % (self._filename, self._args, self._hosts)

                # FIXME: this should also be moved to the base class in a method
                included_files = []
                for res in host_results:
                    if res._task.action == 'include':
                        if res._task.loop:
                            include_results = res._result['results']
                        else:
                            include_results = [ res._result ]

                        for include_result in include_results:
                            # if the task result was skipped or failed, continue
                            if 'skipped' in include_result and include_result['skipped'] or 'failed' in include_result:
                                continue

                            original_task = iterator.get_original_task(res._host, res._task)
                            if original_task and original_task._role:
                                include_file = self._loader.path_dwim_relative(original_task._role._role_path, 'tasks', include_result['include'])
                            else:
                                include_file = self._loader.path_dwim(res._task.args.get('_raw_params'))

                            include_variables = include_result.get('include_variables', dict())
                            if 'item' in include_result:
                                include_variables['item'] = include_result['item']

                            inc_file = IncludedFile(include_file, include_variables, original_task)

                            try:
                                pos = included_files.index(inc_file)
                                inc_file = included_files[pos]
                            except ValueError:
                                included_files.append(inc_file)

                            inc_file.add_host(res._host)

                # FIXME: should this be moved into the iterator class? Main downside would be
                #        that accessing the TQM's callback member would be more difficult, if
                #        we do want to send callbacks from here
                if len(included_files) > 0:
                    noop_task = Task()
                    noop_task.action = 'meta'
                    noop_task.args['_raw_params'] = 'noop'
                    noop_task.set_loader(iterator._play._loader)

                    all_blocks = dict((host, []) for host in hosts_left)
                    for included_file in included_files:
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(included_file)
                        except AnsibleError, e:
                            for host in included_file._hosts:
                                iterator.mark_host_failed(host)
                            # FIXME: callback here?
                            print(e)

                        for new_block in new_blocks:
                            noop_block = Block(parent_block=task._block)
                            noop_block.block  = [noop_task for t in new_block.block]
                            noop_block.always = [noop_task for t in new_block.always]
                            noop_block.rescue = [noop_task for t in new_block.rescue]
                            for host in hosts_left:
                                if host in included_file._hosts:
                                    task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=included_file._task)
                                    final_block = new_block.filter_tagged_tasks(connection_info, task_vars)
                                    all_blocks[host].append(final_block)
                                else:
                                    all_blocks[host].append(noop_block)

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                debug("results queue empty")
            except (IOError, EOFError), e:
                debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return 1
Пример #44
0
    def __init__(self,
                 inventory,
                 play,
                 play_context,
                 variable_manager,
                 all_vars,
                 start_at_done=False):
        self._play = play
        self._blocks = []
        self._variable_manager = variable_manager

        # Default options to gather
        gather_subset = self._play.gather_subset
        gather_timeout = self._play.gather_timeout
        fact_path = self._play.fact_path

        setup_block = Block(play=self._play)
        # Gathering facts with run_once would copy the facts from one host to
        # the others.
        setup_block.run_once = False
        setup_task = Task(block=setup_block)
        setup_task.action = 'gather_facts'
        setup_task.name = 'Gathering Facts'
        setup_task.args = {
            'gather_subset': gather_subset,
        }

        # Unless play is specifically tagged, gathering should 'always' run
        if not self._play.tags:
            setup_task.tags = ['always']

        if gather_timeout:
            setup_task.args['gather_timeout'] = gather_timeout
        if fact_path:
            setup_task.args['fact_path'] = fact_path
        setup_task.set_loader(self._play._loader)
        # short circuit fact gathering if the entire playbook is conditional
        if self._play._included_conditional is not None:
            setup_task.when = self._play._included_conditional[:]
        setup_block.block = [setup_task]

        setup_block = setup_block.filter_tagged_tasks(all_vars)
        self._blocks.append(setup_block)

        for block in self._play.compile():
            new_block = block.filter_tagged_tasks(all_vars)
            if new_block.has_tasks():
                self._blocks.append(new_block)

        self._host_states = {}
        start_at_matched = False
        batch = inventory.get_hosts(self._play.hosts, order=self._play.order)
        self.batch_size = len(batch)
        for host in batch:
            self.set_state_for_host(host.name, HostState(blocks=self._blocks))
            # if we're looking to start at a specific task, iterate through
            # the tasks for this host until we find the specified task
            if play_context.start_at_task is not None and not start_at_done:
                while True:
                    (s, task) = self.get_next_task_for_host(host, peek=True)
                    if s.run_state == IteratingStates.COMPLETE:
                        break
                    if task.name == play_context.start_at_task or (task.name and fnmatch.fnmatch(task.name, play_context.start_at_task)) or \
                       task.get_name() == play_context.start_at_task or fnmatch.fnmatch(task.get_name(), play_context.start_at_task):
                        start_at_matched = True
                        break
                    self.set_state_for_host(host.name, s)

                # finally, reset the host's state to IteratingStates.SETUP
                if start_at_matched:
                    self._host_states[host.name].did_start_at_task = True
                    self._host_states[
                        host.name].run_state = IteratingStates.SETUP

        if start_at_matched:
            # we have our match, so clear the start_at_task field on the
            # play context to flag that we've started at a task (and future
            # plays won't try to advance)
            play_context.start_at_task = None

        self.end_play = False
Пример #45
0
    def __init__(self,
                 inventory,
                 play,
                 play_context,
                 variable_manager,
                 all_vars,
                 start_at_done=False):
        self._play = play
        self._blocks = []

        # Default options to gather
        gather_subset = C.DEFAULT_GATHER_SUBSET

        # Retrieve subset to gather
        if self._play.gather_subset is not None:
            gather_subset = self._play.gather_subset

        setup_block = Block(play=self._play)
        setup_task = Task(block=setup_block)
        setup_task.action = 'setup'
        setup_task.tags = ['always']
        setup_task.args = {
            'gather_subset': gather_subset,
        }
        setup_task.set_loader(self._play._loader)
        setup_block.block = [setup_task]

        setup_block = setup_block.filter_tagged_tasks(play_context, all_vars)
        self._blocks.append(setup_block)

        for block in self._play.compile():
            new_block = block.filter_tagged_tasks(play_context, all_vars)
            if new_block.has_tasks():
                self._blocks.append(new_block)

        self._host_states = {}
        start_at_matched = False
        for host in inventory.get_hosts(self._play.hosts):
            self._host_states[host.name] = HostState(blocks=self._blocks)
            # if the host's name is in the variable manager's fact cache, then set
            # its _gathered_facts flag to true for smart gathering tests later
            if host.name in variable_manager._fact_cache:
                host._gathered_facts = True
            # if we're looking to start at a specific task, iterate through
            # the tasks for this host until we find the specified task
            if play_context.start_at_task is not None and not start_at_done:
                while True:
                    (s, task) = self.get_next_task_for_host(host, peek=True)
                    if s.run_state == self.ITERATING_COMPLETE:
                        break
                    if task.name == play_context.start_at_task or fnmatch.fnmatch(task.name, play_context.start_at_task) or \
                       task.get_name() == play_context.start_at_task or fnmatch.fnmatch(task.get_name(), play_context.start_at_task):
                        start_at_matched = True
                        break
                    else:
                        self.get_next_task_for_host(host)

                # finally, reset the host's state to ITERATING_SETUP
                if start_at_matched:
                    self._host_states[host.name].did_start_at_task = True
                    self._host_states[
                        host.name].run_state = self.ITERATING_SETUP

        if start_at_matched:
            # we have our match, so clear the start_at_task field on the
            # play context to flag that we've started at a task (and future
            # plays won't try to advance)
            play_context.start_at_task = None

        # Extend the play handlers list to include the handlers defined in roles
        self._play.handlers.extend(play.compile_roles_handlers())
Пример #46
0
def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task
    from ansible.playbook.task_include import TaskInclude
    from ansible.template import Templar

    assert isinstance(ds, list)

    task_list = []
    for task_ds in ds:
        assert isinstance(task_ds, dict)

        if 'block' in task_ds:
            t = Block.load(
                task_ds,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
            task_list.append(t)
        else:
            if 'include' in task_ds:
                t = TaskInclude.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)

                all_vars = variable_manager.get_vars(loader=loader, play=play, task=t)
                templar = Templar(loader=loader, variables=all_vars)

                # check to see if this include is static, which can be true if:
                # 1. the user set the 'static' option to true
                # 2. one of the appropriate config options was set
                # 3. the included file name contains no variables, and has no loop
                is_static = t.static or \
                            C.DEFAULT_TASK_INCLUDES_STATIC or \
                            (use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
                            not templar._contains_vars(t.args.get('_raw_params')) and t.loop is None

                if is_static:
                    if t.loop is not None:
                        raise AnsibleParserError("You cannot use 'static' on an include with a loop", obj=task_ds)

                    # FIXME: all of this code is very similar (if not identical) to that in 
                    #        plugins/strategy/__init__.py, and should be unified to avoid
                    #        patches only being applied to one or the other location
                    if task_include:
                        # handle relative includes by walking up the list of parent include
                        # tasks and checking the relative result to see if it exists
                        parent_include = task_include
                        cumulative_path = None
                        while parent_include is not None:
                            parent_include_dir = templar.template(os.path.dirname(parent_include.args.get('_raw_params')))
                            if cumulative_path is None:
                                cumulative_path = parent_include_dir
                            elif not os.path.isabs(cumulative_path):
                                cumulative_path = os.path.join(parent_include_dir, cumulative_path)
                            include_target = templar.template(t.args['_raw_params'])
                            if t._role:
                                new_basedir = os.path.join(t._role._role_path, 'tasks', cumulative_path)
                                include_file = loader.path_dwim_relative(new_basedir, 'tasks', include_target)
                            else:
                                include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target)

                            if os.path.exists(include_file):
                                break
                            else:
                                parent_include = parent_include._task_include
                    else:
                        try:
                            include_target = templar.template(t.args['_raw_params'])
                        except AnsibleUndefinedVariable as e:
                            raise AnsibleParserError(
                                      "Error when evaluating variable in include name: %s.\n\n" \
                                      "When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n" \
                                      "or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n" \
                                      "sources like group or host vars." % t.args['_raw_params'],
                                      obj=task_ds,
                                      suppress_extended_error=True,
                                  )
                        if t._role:
                            if use_handlers:
                                include_file = loader.path_dwim_relative(t._role._role_path, 'handlers', include_target)
                            else:
                                include_file = loader.path_dwim_relative(t._role._role_path, 'tasks', include_target)
                        else:
                            include_file = loader.path_dwim(include_target)

                    try:
                        data = loader.load_from_file(include_file)
                        if data is None:
                            return []
                        elif not isinstance(data, list):
                            raise AnsibleError("included task files must contain a list of tasks", obj=data)
                    except AnsibleFileNotFound as e:
                        if t.static or \
                           C.DEFAULT_TASK_INCLUDES_STATIC or \
                           C.DEFAULT_HANDLER_INCLUDES_STATIC and use_handlers:
                            raise
                        display.deprecated(
                            "Included file '%s' not found, however since this include is not " \
                            "explicitly marked as 'static: yes', we will try and include it dynamically " \
                            "later. In the future, this will be an error unless 'static: no' is used " \
                            "on the include task. If you do not want missing includes to be considered " \
                            "dynamic, use 'static: yes' on the include or set the global ansible.cfg " \
                            "options to make all inclues static for tasks and/or handlers" % include_file,
                        )
                        task_list.append(t)
                        continue

                    included_blocks = load_list_of_blocks(
                        data,
                        play=play,
                        parent_block=block,
                        task_include=t,
                        role=role,
                        use_handlers=use_handlers,
                        loader=loader,
                        variable_manager=variable_manager,
                    )

                    # pop tags out of the include args, if they were specified there, and assign
                    # them to the include. If the include already had tags specified, we raise an
                    # error so that users know not to specify them both ways
                    tags = t.vars.pop('tags', [])
                    if isinstance(tags, string_types):
                        tags = tags.split(',')

                    if len(tags) > 0:
                        if len(t.tags) > 0:
                            raise AnsibleParserError(
                                "Include tasks should not specify tags in more than one way (both via args and directly on the task)." \
                                " Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
                                obj=task_ds,
                                suppress_extended_error=True,
                            )
                        display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
                    else:
                        tags = t.tags[:]

                    # now we extend the tags on each of the included blocks
                    for b in included_blocks:
                        b.tags = list(set(b.tags).union(tags))
                    # END FIXME

                    # FIXME: send callback here somehow...
                    # FIXME: handlers shouldn't need this special handling, but do
                    #        right now because they don't iterate blocks correctly
                    if use_handlers:
                        for b in included_blocks:
                            task_list.extend(b.block)
                    else:
                        task_list.extend(included_blocks)
                else:
                    task_list.append(t)
            elif use_handlers:
                t = Handler.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
                task_list.append(t)
            else:
                t = Task.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
                task_list.append(t)

    return task_list
Пример #47
0
 def test_load_implicit_block(self):
     ds = [dict(action='foo')]
     b = Block.load(ds)
     self.assertEqual(len(b.block), 1)
     self.assertIsInstance(b.block[0], Task)
Пример #48
0
    def run(self, iterator, connection_info):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        result = True

        # iteratate over each task, while there is one left to run
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                debug("getting the remaining hosts for this loop")
                self._tqm._failed_hosts = iterator.get_failed_hosts()
                hosts_left = self.get_hosts_remaining(iterator._play)
                debug("done getting the remaining hosts for this loop")
                if len(hosts_left) == 0:
                    debug("out of hosts to run on")
                    self._tqm.send_callback(
                        'v2_playbook_on_no_hosts_remaining')
                    result = False
                    break

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)
                for (host, task) in host_tasks:
                    if not task:
                        continue

                    run_once = False
                    work_to_do = True

                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action,
                                                   class_only=True)
                        if task.run_once or getattr(action, 'BYPASS_HOST_LOOP',
                                                    False):
                            run_once = True
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        pass

                    debug("getting variables")
                    task_vars = self._variable_manager.get_vars(
                        loader=self._loader,
                        play=iterator._play,
                        host=host,
                        task=task)
                    debug("done getting variables")

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run():
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            debug("'%s' skipped because role has already run" %
                                  task)
                            continue

                    if task.action == 'meta':
                        # meta tasks store their args in the _raw_params field of args,
                        # since they do not use k=v pairs, so get that
                        meta_action = task.args.get('_raw_params')
                        if meta_action == 'noop':
                            # FIXME: issue a callback for the noop here?
                            continue
                        elif meta_action == 'flush_handlers':
                            self.run_handlers(iterator, connection_info)
                        else:
                            raise AnsibleError(
                                "invalid meta action requested: %s" %
                                meta_action,
                                obj=task._ds)
                    else:
                        if not callback_sent:
                            self._tqm.send_callback(
                                'v2_playbook_on_task_start',
                                task,
                                is_conditional=False)
                            callback_sent = True

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars,
                                         connection_info)

                    results = self._process_pending_results(iterator)
                    host_results.extend(results)

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                debug(
                    "done queuing things up, now waiting for results queue to drain"
                )
                results = self._wait_on_pending_results(iterator)
                host_results.extend(results)

                # FIXME: this needs to be somewhere else
                class IncludedFile:
                    def __init__(self, filename, args, task):
                        self._filename = filename
                        self._args = args
                        self._task = task
                        self._hosts = []

                    def add_host(self, host):
                        if host not in self._hosts:
                            self._hosts.append(host)

                    def __eq__(self, other):
                        return other._filename == self._filename and other._args == self._args

                    def __repr__(self):
                        return "%s (%s): %s" % (self._filename, self._args,
                                                self._hosts)

                # FIXME: this should also be moved to the base class in a method
                included_files = []
                for res in host_results:
                    if res._task.action == 'include':
                        if res._task.loop:
                            include_results = res._result['results']
                        else:
                            include_results = [res._result]

                        for include_result in include_results:
                            # if the task result was skipped or failed, continue
                            if 'skipped' in include_result and include_result[
                                    'skipped'] or 'failed' in include_result:
                                continue

                            original_task = iterator.get_original_task(
                                res._host, res._task)
                            if original_task and original_task._role:
                                include_file = self._loader.path_dwim_relative(
                                    original_task._role._role_path, 'tasks',
                                    include_result['include'])
                            else:
                                include_file = self._loader.path_dwim(
                                    res._task.args.get('_raw_params'))

                            include_variables = include_result.get(
                                'include_variables', dict())
                            if 'item' in include_result:
                                include_variables['item'] = include_result[
                                    'item']

                            inc_file = IncludedFile(include_file,
                                                    include_variables,
                                                    original_task)

                            try:
                                pos = included_files.index(inc_file)
                                inc_file = included_files[pos]
                            except ValueError:
                                included_files.append(inc_file)

                            inc_file.add_host(res._host)

                # FIXME: should this be moved into the iterator class? Main downside would be
                #        that accessing the TQM's callback member would be more difficult, if
                #        we do want to send callbacks from here
                if len(included_files) > 0:
                    noop_task = Task()
                    noop_task.action = 'meta'
                    noop_task.args['_raw_params'] = 'noop'
                    noop_task.set_loader(iterator._play._loader)

                    all_blocks = dict((host, []) for host in hosts_left)
                    for included_file in included_files:
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(
                                included_file)
                        except AnsibleError, e:
                            for host in included_file._hosts:
                                iterator.mark_host_failed(host)
                            # FIXME: callback here?
                            print(e)

                        for new_block in new_blocks:
                            noop_block = Block(parent_block=task._block)
                            noop_block.block = [
                                noop_task for t in new_block.block
                            ]
                            noop_block.always = [
                                noop_task for t in new_block.always
                            ]
                            noop_block.rescue = [
                                noop_task for t in new_block.rescue
                            ]
                            for host in hosts_left:
                                if host in included_file._hosts:
                                    task_vars = self._variable_manager.get_vars(
                                        loader=self._loader,
                                        play=iterator._play,
                                        host=host,
                                        task=included_file._task)
                                    final_block = new_block.filter_tagged_tasks(
                                        connection_info, task_vars)
                                    all_blocks[host].append(final_block)
                                else:
                                    all_blocks[host].append(noop_block)

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                debug("results queue empty")
            except (IOError, EOFError), e:
                debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return 1
Пример #49
0
    def run(self, iterator, play_context):
        '''
        The linear strategy is simple - get the next task and queue
        it for all hosts, then wait for the queue to drain before
        moving on to the next task
        '''

        # iteratate over each task, while there is one left to run
        result     = True
        work_to_do = True
        while work_to_do and not self._tqm._terminated:

            try:
                self._display.debug("getting the remaining hosts for this loop")
                hosts_left = [host for host in self._inventory.get_hosts(iterator._play.hosts) if host.name not in self._tqm._unreachable_hosts]
                self._display.debug("done getting the remaining hosts for this loop")

                # queue up this task for each host in the inventory
                callback_sent = False
                work_to_do = False

                host_results = []
                host_tasks = self._get_next_task_lockstep(hosts_left, iterator)

                # skip control
                skip_rest   = False
                choose_step = True

                for (host, task) in host_tasks:
                    if not task:
                        continue

                    run_once = False
                    work_to_do = True


                    # test to see if the task across all hosts points to an action plugin which
                    # sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
                    # will only send this task to the first host in the list.

                    try:
                        action = action_loader.get(task.action, class_only=True)
                        if task.run_once or getattr(action, 'BYPASS_HOST_LOOP', False):
                            run_once = True
                    except KeyError:
                        # we don't care here, because the action may simply not have a
                        # corresponding action plugin
                        pass

                    # check to see if this task should be skipped, due to it being a member of a
                    # role which has already run (and whether that role allows duplicate execution)
                    if task._role and task._role.has_run(host):
                        # If there is no metadata, the default behavior is to not allow duplicates,
                        # if there is metadata, check to see if the allow_duplicates flag was set to true
                        if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
                            self._display.debug("'%s' skipped because role has already run" % task)
                            continue

                    if task.action == 'meta':
                        self._execute_meta(task, play_context, iterator)
                    else:
                        # handle step if needed, skip meta actions as they are used internally
                        if self._step and choose_step:
                            if self._take_step(task):
                                choose_step = False
                            else:
                                skip_rest = True
                                break

                        self._display.debug("getting variables")
                        task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=task)
                        task_vars = self.add_tqm_variables(task_vars, play=iterator._play)
                        templar = Templar(loader=self._loader, variables=task_vars)
                        self._display.debug("done getting variables")

                        if not callback_sent:
                            display.debug("sending task start callback, copying the task so we can template it temporarily")
                            saved_name = task.name
                            display.debug("done copying, going to template now")
                            try:
                                task.name = text_type(templar.template(task.name, fail_on_undefined=False))
                                display.debug("done templating")
                            except:
                                # just ignore any errors during task name templating,
                                # we don't care if it just shows the raw name
                                display.debug("templating failed for some reason")
                                pass
                            display.debug("here goes the callback...")
                            self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
                            task.name = saved_name
                            callback_sent = True
                            display.debug("sending task start callback")

                        self._blocked_hosts[host.get_name()] = True
                        self._queue_task(host, task, task_vars, play_context)

                    results = self._process_pending_results(iterator)
                    host_results.extend(results)

                    # if we're bypassing the host loop, break out now
                    if run_once:
                        break

                # go to next host/task group
                if skip_rest:
                    continue

                self._display.debug("done queuing things up, now waiting for results queue to drain")
                results = self._wait_on_pending_results(iterator)
                host_results.extend(results)

                if not work_to_do and len(iterator.get_failed_hosts()) > 0:
                    self._display.debug("out of hosts to run on")
                    self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
                    result = False
                    break

                try:
                    included_files = IncludedFile.process_include_results(host_results, self._tqm, iterator=iterator, loader=self._loader, variable_manager=self._variable_manager)
                except AnsibleError as e:
                    return False

                if len(included_files) > 0:
                    noop_task = Task()
                    noop_task.action = 'meta'
                    noop_task.args['_raw_params'] = 'noop'
                    noop_task.set_loader(iterator._play._loader)

                    all_blocks = dict((host, []) for host in hosts_left)
                    for included_file in included_files:
                        # included hosts get the task list while those excluded get an equal-length
                        # list of noop tasks, to make sure that they continue running in lock-step
                        try:
                            new_blocks = self._load_included_file(included_file, iterator=iterator)
                        except AnsibleError as e:
                            for host in included_file._hosts:
                                iterator.mark_host_failed(host)
                            self._display.warning(str(e))
                            continue

                        for new_block in new_blocks:
                            noop_block = Block(parent_block=task._block)
                            noop_block.block  = [noop_task for t in new_block.block]
                            noop_block.always = [noop_task for t in new_block.always]
                            noop_block.rescue = [noop_task for t in new_block.rescue]
                            for host in hosts_left:
                                if host in included_file._hosts:
                                    task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=included_file._task)
                                    final_block = new_block.filter_tagged_tasks(play_context, task_vars)
                                    all_blocks[host].append(final_block)
                                else:
                                    all_blocks[host].append(noop_block)

                    for host in hosts_left:
                        iterator.add_tasks(host, all_blocks[host])

                self._display.debug("results queue empty")
            except (IOError, EOFError) as e:
                self._display.debug("got IOError/EOFError in task loop: %s" % e)
                # most likely an abort, return failed
                return False

        # run the base class run() method, which executes the cleanup function
        # and runs any outstanding handlers which have been triggered

        return super(StrategyModule, self).run(iterator, play_context, result)
Пример #50
0
def load_list_of_tasks(ds,
                       play,
                       block=None,
                       role=None,
                       task_include=None,
                       use_handlers=False,
                       variable_manager=None,
                       loader=None):
    '''
    Given a list of task datastructures (parsed from YAML),
    return a list of Task() or TaskInclude() objects.
    '''

    # we import here to prevent a circular dependency with imports
    from ansible.playbook.block import Block
    from ansible.playbook.handler import Handler
    from ansible.playbook.task import Task

    if not isinstance(ds, list):
        raise AnsibleParserError('task has bad type: "%s". Expected "list"' %
                                 type(ds).__name__,
                                 obj=ds)

    task_list = []
    for task in ds:
        if not isinstance(task, dict):
            raise AnsibleParserError(
                'task/handler has bad type: "%s". Expected "dict"' %
                type(task).__name__,
                obj=task)

        if 'block' in task:
            t = Block.load(
                task,
                play=play,
                parent_block=block,
                role=role,
                task_include=task_include,
                use_handlers=use_handlers,
                variable_manager=variable_manager,
                loader=loader,
            )
        else:
            if use_handlers:
                t = Handler.load(task,
                                 block=block,
                                 role=role,
                                 task_include=task_include,
                                 variable_manager=variable_manager,
                                 loader=loader)
            else:
                t = Task.load(task,
                              block=block,
                              role=role,
                              task_include=task_include,
                              variable_manager=variable_manager,
                              loader=loader)

        task_list.append(t)

    return task_list