Exemple #1
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            if 'include' in x:
                task_vars = self.vars.copy()
                tokens = shlex.split(x['include'])
                if 'with_items' in x:
                    items = utils.varReplaceWithItems(self.basedir, x['with_items'], task_vars)
                else:
                    items = ['']
                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    for t in tokens[1:]:
                        (k,v) = t.split("=", 1)
                        mv[k] = utils.varReplaceWithItems(self.basedir, v, mv)
                    include_file = utils.template(self.basedir, tokens[0], mv)
                    data = utils.parse_yaml_from_file(utils.path_dwim(self.basedir, include_file))
                    for y in data:
                         results.append(Task(self,y,module_vars=mv.copy()))
            elif type(x) == dict:
                task_vars = self.vars.copy()
                results.append(Task(self,x,module_vars=task_vars))
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Exemple #2
0
    def _poll_async_result(self, result, templar):
        '''
        Polls for the specified JID to be complete
        '''

        async_jid = result.get('ansible_job_id')
        if async_jid is None:
            return dict(failed=True, msg="No job id was returned by the async task")

        # Create a new psuedo-task to run the async_status module, and run
        # that (with a sleep for "poll" seconds between each retry) until the
        # async time limit is exceeded.

        async_task = Task().load(dict(action='async_status jid=%s' % async_jid))

        # Because this is an async task, the action handler is async. However,
        # we need the 'normal' action handler for the status check, so get it
        # now via the action_loader
        normal_handler = action_loader.get(
            'normal',
            task=async_task,
            connection=self._connection,
            connection_info=self._connection_info,
            loader=self._loader,
            templar=templar,
            shared_loader_obj=self._shared_loader_obj,
        )

        time_left = self._task.async
Exemple #3
0
    def deserialize(self, data):
        '''
        Override of the default deserialize method, to match the above overridden
        serialize method
        '''

        from ansible.playbook.task import Task

        # we don't want the full set of attributes (the task lists), as that
        # would lead to a serialize/deserialize loop
        for attr in self._get_base_attributes():
            if attr in data and attr not in ('block', 'rescue', 'always'):
                setattr(self, attr, data.get(attr))

        self._dep_chain = data.get('dep_chain', [])

        # if there was a serialized role, unpack it too
        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r

        # if there was a serialized task include, unpack it too
        ti_data = data.get('task_include')
        if ti_data:
            ti = Task()
            ti.deserialize(ti_data)
            self._task_include = ti
Exemple #4
0
 def test_conga_basedir_required(self):
     hostvars = {}
     task_vars = dict(TASK_VARS)
     task_vars['hostvars'] = hostvars
     result = MockModule(Task()).run(task_vars)
     self.assertTrue(result.get('failed'))
     self.assertIn('required', result.get('msg'))
Exemple #5
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            task_vars = self.vars.copy()
            if 'include' in x:
                tokens = shlex.split(x['include'])

                for t in tokens[1:]:
                    (k, v) = t.split("=", 1)
                    task_vars[k] = v
                include_file = tokens[0]
                data = utils.parse_yaml_from_file(
                    utils.path_dwim(self.playbook.basedir, include_file))
            elif type(x) == dict:
                data = [x]
            else:
                raise Exception("unexpected task type")
            for y in data:
                items = y.get('with_items', None)
                if items is None:
                    items = ['']
                elif isinstance(items, basestring):
                    items = utils.varLookup(items, task_vars)
                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    results.append(Task(self, y, module_vars=mv))
        return results
Exemple #6
0
    def _prepare_and_create_noop_block_from(self, original_block, parent, iterator):
        self.noop_task = Task()
        self.noop_task.action = 'meta'
        self.noop_task.args['_raw_params'] = 'noop'
        self.noop_task.set_loader(iterator._play._loader)

        return self._create_noop_block_from(original_block, parent)
    def setUp(self, mock_socket):
        # TODO: this python version validation won't be needed as long as the _time_ns call is mocked.
        if sys.version_info < OPENTELEMETRY_MINIMUM_PYTHON_VERSION:
            self.skipTest(
                "Python %s+ is needed for OpenTelemetry" %
                ",".join(map(str, OPENTELEMETRY_MINIMUM_PYTHON_VERSION)))

        mock_socket.gethostname.return_value = 'my-host'
        mock_socket.gethostbyname.return_value = '1.2.3.4'
        self.opentelemetry = OpenTelemetrySource(display=None)
        self.task_fields = {'args': {}}
        self.mock_host = Mock('MockHost')
        self.mock_host.name = 'myhost'
        self.mock_host._uuid = 'myhost_uuid'
        self.mock_task = Task()
        self.mock_task.action = 'myaction'
        self.mock_task.no_log = False
        self.mock_task._role = 'myrole'
        self.mock_task._uuid = 'myuuid'
        self.mock_task.args = {}
        self.mock_task.get_name = MagicMock(return_value='mytask')
        self.mock_task.get_path = MagicMock(return_value='/mypath')
        self.my_task = TaskData('myuuid', 'mytask', '/mypath', 'myplay',
                                'myaction', '')
        self.my_task_result = TaskResult(host=self.mock_host,
                                         task=self.mock_task,
                                         return_data={},
                                         task_fields=self.task_fields)
Exemple #8
0
    def get_next_task_for_host(self, host, peek=False):

        s = self.get_host_state(host)

        task = None
        if s.run_state == self.ITERATING_COMPLETE:
            return None
        elif s.run_state == self.ITERATING_SETUP:
            s.run_state = self.ITERATING_TASKS
            s.pending_setup = True
            if self._play.gather_facts == 'smart' and not host._gathered_facts or boolean(self._play.gather_facts):
                if not peek:
                    # mark the host as having gathered facts
                    host.set_gathered_facts(True)

                task = Task()
                task.action = 'setup'
                task.args   = {}
                task.set_loader(self._play._loader)
            else:
                s.pending_setup = False

        if not task:
            (s, task) = self._get_next_task_from_state(s, peek=peek)

        if task and task._role:
            # if we had a current role, mark that role as completed
            if s.cur_role and task._role != s.cur_role and s.cur_role._had_task_run and not peek:
                s.cur_role._completed = True
            s.cur_role = task._role

        if not peek:
            self._host_states[host.name] = s

        return (s, task)
Exemple #9
0
 def test_variable_interpolation(self):
     task_vars = dict(TASK_VARS)
     task_vars['conga_role_mapping'] = "{{ my_role }}"
     mock_module = MockModule(Task())
     mock_module.templar.set_available_variables({'my_role': 'cms'})
     facts = mock_module.get_facts(task_vars)
     self.assertEqual("cms", facts.get('conga_role'))
 def setUp(self, mock_socket):
     if sys.version_info < ELASTIC_MINIMUM_PYTHON_VERSION:
         self.skipTest("Python %s+ is needed for Elastic" %
                       ",".join(map(str, ELASTIC_MINIMUM_PYTHON_VERSION)))
     mock_socket.gethostname.return_value = 'my-host'
     mock_socket.gethostbyname.return_value = '1.2.3.4'
     self.elastic = ElasticSource(display=None)
     self.task_fields = {'args': {}}
     self.mock_host = Mock('MockHost')
     self.mock_host.name = 'myhost'
     self.mock_host._uuid = 'myhost_uuid'
     self.mock_task = Task()
     self.mock_task.action = 'myaction'
     self.mock_task.no_log = False
     self.mock_task._role = 'myrole'
     self.mock_task._uuid = 'myuuid'
     self.mock_task.args = {}
     self.mock_task.get_name = MagicMock(return_value='mytask')
     self.mock_task.get_path = MagicMock(return_value='/mypath')
     self.my_task = TaskData('myuuid', 'mytask', '/mypath', 'myplay',
                             'myaction', '')
     self.my_task_result = TaskResult(host=self.mock_host,
                                      task=self.mock_task,
                                      return_data={},
                                      task_fields=self.task_fields)
Exemple #11
0
    def deserialize(self, data):
        '''
        Override of the default deserialize method, to match the above overridden
        serialize method
        '''

        from ansible.playbook.task import Task

        # unpack the when attribute, which is the only one we want
        self.when = data.get('when')
        self._dep_chain = data.get('dep_chain', [])

        # if there was a serialized role, unpack it too
        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r

        # if there was a serialized task include, unpack it too
        ti_data = data.get('task_include')
        if ti_data:
            ti = Task()
            ti.deserialize(ti_data)
            self._task_include = ti
Exemple #12
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            task_vars = self.vars.copy()
            if 'include' in x:
                tokens = shlex.split(x['include'])
                for t in tokens[1:]:
                    (k, v) = t.split("=", 1)
                    task_vars[k] = utils.template(v, task_vars)
                include_file = utils.template(tokens[0], task_vars)
                data = utils.parse_yaml_from_file(
                    utils.path_dwim(self.basedir, include_file))
            elif type(x) == dict:
                data = [x]
            else:
                raise Exception("unexpected task type")

            for y in data:
                mv = task_vars.copy()
                results.append(Task(self, y, module_vars=mv))

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
Exemple #13
0
    def _poll_async_result(self, result, templar, task_vars=None):
        '''
        Polls for the specified JID to be complete
        '''

        if task_vars is None:
            task_vars = self._job_vars

        async_jid = result.get('ansible_job_id')
        if async_jid is None:
            return dict(failed=True,
                        msg="No job id was returned by the async task")

        # Create a new pseudo-task to run the async_status module, and run
        # that (with a sleep for "poll" seconds between each retry) until the
        # async time limit is exceeded.

        async_task = Task().load(dict(action='async_status jid=%s' %
                                      async_jid))

        # FIXME: this is no longer the case, normal takes care of all, see if this can just be generalized
        # Because this is an async task, the action handler is async. However,
        # we need the 'normal' action handler for the status check, so get it
        # now via the action_loader
        normal_handler = self._shared_loader_obj.action_loader.get(
            'normal',
            task=async_task,
            connection=self._connection,
            play_context=self._play_context,
            loader=self._loader,
            templar=templar,
            shared_loader_obj=self._shared_loader_obj,
        )

        time_left = self._task. async
Exemple #14
0
    def __init__(self,
                 inventory,
                 play,
                 play_context,
                 variable_manager,
                 all_vars,
                 start_at_done=False):
        self._play = play
        self._blocks = []

        setup_block = Block(play=self._play)
        setup_task = Task(block=setup_block)
        setup_task.action = 'setup'
        setup_task.tags = ['always']
        setup_task.args = {}
        setup_task.set_loader(self._play._loader)
        setup_block.block = [setup_task]

        setup_block = setup_block.filter_tagged_tasks(play_context, all_vars)
        self._blocks.append(setup_block)

        for block in self._play.compile():
            new_block = block.filter_tagged_tasks(play_context, all_vars)
            if new_block.has_tasks():
                self._blocks.append(new_block)

        self._host_states = {}
        start_at_matched = False
        for host in inventory.get_hosts(self._play.hosts):
            self._host_states[host.name] = HostState(blocks=self._blocks)
            # if the host's name is in the variable manager's fact cache, then set
            # its _gathered_facts flag to true for smart gathering tests later
            if host.name in variable_manager._fact_cache:
                host._gathered_facts = True
            # if we're looking to start at a specific task, iterate through
            # the tasks for this host until we find the specified task
            if play_context.start_at_task is not None and not start_at_done:
                while True:
                    (s, task) = self.get_next_task_for_host(host, peek=True)
                    if s.run_state == self.ITERATING_COMPLETE:
                        break
                    if task.name == play_context.start_at_task or fnmatch.fnmatch(task.name, play_context.start_at_task) or \
                       task.get_name() == play_context.start_at_task or fnmatch.fnmatch(task.get_name(), play_context.start_at_task):
                        start_at_matched = True
                        break
                    else:
                        self.get_next_task_for_host(host)

                # finally, reset the host's state to ITERATING_SETUP
                self._host_states[host.name].run_state = self.ITERATING_SETUP

        if start_at_matched:
            # we have our match, so clear the start_at_task field on the
            # play context to flag that we've started at a task (and future
            # plays won't try to advance)
            play_context.start_at_task = None

        # Extend the play handlers list to include the handlers defined in roles
        self._play.handlers.extend(play.compile_roles_handlers())
Exemple #15
0
 def _create_noop_task(self):
     """Create noop task"""
     self._debug('_create_noop_task...')
     noop_task = Task()
     noop_task.action = 'meta'
     noop_task.args['_raw_params'] = 'noop'
     noop_task.set_loader(self._iterator._play._loader)
     return noop_task
Exemple #16
0
 def test_conga_role_multiple_matches_use_first_and_warn(self):
     task = Task(None, MockRole("dispatcher"), None)
     with patch(
             'action_plugins.conga_facts.display.warning') as mock_warning:
         facts = MockModule(task).get_facts()
         self.assertIn('author', facts.get('conga_variants'))
         self.assertNotIn('publish', facts.get('conga_variants'))
         mock_warning.assert_called()
    def test_v2_runner_on_failed(self):
        """
        Verify failed results uses the logger.
        """
        result = TaskResult('127.0.0.1', Task(), {'exception': 'error'})

        self.logforward.v2_runner_on_failed(result)
        self.assertEqual(1, self.logforward.log.warn.call_count)
Exemple #18
0
 def test_conga_config_path_default(self):
     task = Task(None, MockRole('db'), None)
     task_vars = dict(TASK_VARS)
     task_vars.pop("conga_target_path")
     task_vars.pop("conga_node")
     facts = MockModule(task).get_facts(task_vars)
     self.assertEqual("basedir/target/configuration/environment/hostname",
                      facts.get('conga_config_path'))
Exemple #19
0
 def test_conga_config_path_custom(self):
     task = Task(None, MockRole('db'), None)
     facts = MockModule(task).get_facts()
     self.assertEqual("basedir/target_path/environment/node",
                      facts.get('conga_config_path'))
     self.assertEqual("basedir", facts.get('conga_basedir'))
     self.assertDictEqual({"path": "/opt/db"}, facts.get('conga_config'))
     self.assertEqual([{'tenant': 'tenant1'}], facts.get('conga_tenants'))
Exemple #20
0
    def _load_tasks(self, ds, keyname):
        ''' handle task and handler include statements '''

        tasks = ds.get(keyname, [])
        results = []
        for x in tasks:
            if 'include' in x:
                task_vars = self.vars.copy()
                tokens = shlex.split(x['include'])
                items = ['']
                for k in x:
                    if not k.startswith("with_"):
                        continue
                    plugin_name = k[5:]
                    if plugin_name not in utils.plugins.lookup_loader:
                        raise errors.AnsibleError(
                            "cannot find lookup plugin named %s for usage in with_%s"
                            % (plugin_name, plugin_name))
                    terms = utils.template_ds(self.basedir, x[k], task_vars)
                    items = utils.plugins.lookup_loader.get(
                        plugin_name, basedir=self.basedir,
                        runner=None).run(terms, inject=task_vars)

                for item in items:
                    mv = task_vars.copy()
                    mv['item'] = item
                    for t in tokens[1:]:
                        (k, v) = t.split("=", 1)
                        mv[k] = utils.template_ds(self.basedir, v, mv)
                    include_file = utils.template(self.basedir, tokens[0], mv)
                    data = utils.parse_yaml_from_file(
                        utils.path_dwim(self.basedir, include_file))
                    for y in data:
                        results.append(Task(self, y, module_vars=mv.copy()))
            elif type(x) == dict:
                task_vars = self.vars.copy()
                results.append(Task(self, x, module_vars=task_vars))
            else:
                raise Exception("unexpected task type")

        for x in results:
            if self.tags is not None:
                x.tags.extend(self.tags)

        return results
    def test_v2_runner_on_unreachable(self):
        """
        Verify UNREACHABLE results uses the logger.
        """
        result = TaskResult('127.0.0.1', Task(), {})
        result._host = Host('127.0.0.1')

        self.logforward.v2_runner_on_unreachable(result)
        self.assertEqual(1, self.logforward.log.warn.call_count)
    def test_v2_runner_on_skipped(self):
        """
        Verify SKIPPED results uses the logger.
        """
        result = TaskResult('127.0.0.1', Task(), {})
        result._host = Host('127.0.0.1')

        self.logforward.v2_runner_on_skipped(result)
        self.assertEqual(1, self.logforward.log.warn.call_count)
    def test_v2_runner_on_ok(self):
        """
        Verify OK results uses the logger.
        """
        result = TaskResult('127.0.0.1', Task(), {})
        result._host = Host('127.0.0.1')

        self.logforward.v2_runner_on_ok(result)
        self.assertEqual(1, self.logforward.log.info.call_count)
    def test_v2_runner_on_failed(self):
        """
        Verify failed results uses the logger.
        """
        result = TaskResult('127.0.0.1', Task(), {'exception': 'error'})
        result._host = MagicMock()
        result._host.get_name.return_value = '127.0.0.1'

        self.logforward.v2_runner_on_failed(result)
        self.assertEqual(1, self.logforward.log.warn.call_count)
Exemple #25
0
 def test_conga_role_multiple_matches_with_variant_mapping(self):
     task = Task(None, MockRole("dispatcher"), None)
     task_vars = dict(TASK_VARS)
     task_vars['conga_variant_mapping'] = "publish"
     with patch(
             'action_plugins.conga_facts.display.warning') as mock_warning:
         facts = MockModule(task).get_facts(task_vars)
         self.assertIn('publish', facts.get('conga_variants'))
         self.assertNotIn('author', facts.get('conga_variants'))
         mock_warning.assert_not_called()
Exemple #26
0
    def compile(self):
        '''
        Compiles and returns the task list for this play, compiled from the
        roles (which are themselves compiled recursively) and/or the list of
        tasks specified in the play.
        '''

        # create a block containing a single flush handlers meta
        # task, so we can be sure to run handlers at certain points
        # of the playbook execution
        flush_block = Block.load(
            data={'meta': 'flush_handlers'},
            play=self,
            variable_manager=self._variable_manager,
            loader=self._loader
        )

        for task in flush_block.block:
            task.implicit = True

        block_list = []
        if self.force_handlers:
            noop_task = Task()
            noop_task.action = 'meta'
            noop_task.args['_raw_params'] = 'noop'
            noop_task.implicit = True
            noop_task.set_loader(self._loader)

            b = Block(play=self)
            b.block = self.pre_tasks or [noop_task]
            b.always = [flush_block]
            block_list.append(b)

            tasks = self._compile_roles() + self.tasks
            b = Block(play=self)
            b.block = tasks or [noop_task]
            b.always = [flush_block]
            block_list.append(b)

            b = Block(play=self)
            b.block = self.post_tasks or [noop_task]
            b.always = [flush_block]
            block_list.append(b)

            return block_list

        block_list.extend(self.pre_tasks)
        block_list.append(flush_block)
        block_list.extend(self._compile_roles())
        block_list.extend(self.tasks)
        block_list.append(flush_block)
        block_list.extend(self.post_tasks)
        block_list.append(flush_block)

        return block_list
Exemple #27
0
    def __init__(self):
        initial_dir = os.getcwd()
        ansible_basedir = os.path.join(
            os.environ.get("PROJECT_ENVIRONMENT_FILES_PATH"), "ansible")

        # Move to project directory
        os.chdir(os.environ.get("PROJECT_ENVIRONMENT_FILES_PATH"))

        # Load list of inventories from config:w
        config = ConfigManager('/etc/ansible/ansible.cfg')
        sources = config.data.get_setting('DEFAULT_HOST_LIST').value

        loader = CustomLoader()
        loader.set_basedir(ansible_basedir)

        # load the inventory, set the basic playbook directory
        self._inventory = CustomInventoryManager(loader=loader,
                                                 sources=sources)
        var_manager = VariableManager(loader=loader, inventory=self._inventory)
        play = Play.load(dict(hosts=['all']),
                         loader=loader,
                         variable_manager=var_manager)

        # Move back to directory of origin
        os.chdir(initial_dir)

        control_host = None
        if 'control' in self._inventory.groups:
            control_group = self._inventory.groups['control']

            if len(control_group.get_hosts()) > 0:
                control_host = control_group.get_hosts()[0]

        # Hostvars
        hostvars = {}
        for host in self._inventory.get_hosts():
            hostvars[host.name] = host.vars

        # make sure we load all magic variables on top of the global variables
        self._vars = combine_vars(
            var_manager.get_vars(play=play, task=Task(), host=control_host), {
                'hostvars': hostvars,
                'env': os.environ
            })

        # create the template renderer
        self._templar = Templar(loader=loader, variables=self._vars)

        # setup some easy variables that we use a lot
        self._vars['control_ip'] = self.get_var(
            "hostvars[groups['control'][0]]['ansible_host']")
        self._vars['edge_ip'] = self.get_var(
            "hostvars[groups['edge'][0]]['ansible_host']")
        self._vars['monitor_ip'] = self.get_var(
            "hostvars[groups['monitor'][0]]['ansible_host']")
Exemple #28
0
    def get_next_task_for_host(self, host, peek=False):

        display.debug("getting the next task for host %s" % host.name)
        s = self.get_host_state(host)

        task = None
        if s.run_state == self.ITERATING_COMPLETE:
            display.debug("host %s is done iterating, returning" % host.name)
            return (None, None)
        elif s.run_state == self.ITERATING_SETUP:
            s.run_state = self.ITERATING_TASKS
            s.pending_setup = True

            # Gather facts if the default is 'smart' and we have not yet
            # done it for this host; or if 'explicit' and the play sets
            # gather_facts to True; or if 'implicit' and the play does
            # NOT explicitly set gather_facts to False.

            gathering = C.DEFAULT_GATHERING
            implied = self._play.gather_facts is None or boolean(
                self._play.gather_facts)

            if (gathering == 'implicit' and implied) or \
               (gathering == 'explicit' and boolean(self._play.gather_facts)) or \
               (gathering == 'smart' and implied and not host._gathered_facts):
                if not peek:
                    # mark the host as having gathered facts
                    host.set_gathered_facts(True)

                task = Task()
                task.action = 'setup'
                task.args = {}
                task.set_loader(self._play._loader)
            else:
                s.pending_setup = False

        if not task:
            (s, task) = self._get_next_task_from_state(s, peek=peek)

        if task and task._role:
            # if we had a current role, mark that role as completed
            if s.cur_role and task._role != s.cur_role and host.name in s.cur_role._had_task_run and not peek:
                s.cur_role._completed[host.name] = True
            s.cur_role = task._role

        if not peek:
            self._host_states[host.name] = s

        display.debug("done getting next task for host %s" % host.name)
        display.debug(" ^ task is: %s" % task)
        display.debug(" ^ state is: %s" % s)
        return (s, task)
    def _load_tasks(self, tasks, vars={}, default_vars={}, sudo_vars={}, additional_conditions=[], original_file=None):
        ''' handle task and handler include statements '''

        results = []
        if tasks is None:
            # support empty handler files, and the like.
            tasks = []

        for x in tasks:
            if not isinstance(x, dict):
                raise errors.AnsibleError("expecting dict; got: %s" % x)

            # evaluate sudo vars for current and child tasks 
            included_sudo_vars = {}
            for k in ["sudo", "sudo_user"]:
                if k in x:
                    included_sudo_vars[k] = x[k]
                elif k in sudo_vars:
                    included_sudo_vars[k] = sudo_vars[k]
                    x[k] = sudo_vars[k]

            if 'meta' in x:
                if x['meta'] == 'flush_handlers':
                    results.append(Task(self,x))
                    continue

            task_vars = self.vars.copy()
            task_vars.update(vars)
            if original_file:
                task_vars['_original_file'] = original_file

            if 'include' in x:
                tokens = shlex.split(str(x['include']))
                items = ['']
                included_additional_conditions = list(additional_conditions)
                include_vars = {}
                for k in x:
                    if k.startswith("with_"):
                        plugin_name = k[5:]
                        if plugin_name not in utils.plugins.lookup_loader:
                            raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))
                        terms = template(self.basedir, x[k], task_vars)
                        items = utils.plugins.lookup_loader.get(plugin_name, basedir=self.basedir, runner=None).run(terms, inject=task_vars)
                    elif k.startswith("when_"):
                        included_additional_conditions.insert(0, utils.compile_when_to_only_if("%s %s" % (k[5:], x[k])))
                    elif k == 'when':
                        included_additional_conditions.insert(0, utils.compile_when_to_only_if("jinja2_compare %s" % x[k]))
                    elif k in ("include", "vars", "default_vars", "only_if", "sudo", "sudo_user"):
                        continue
                    else:
                        include_vars[k] = x[k]
Exemple #30
0
    def compile(self, play, dep_chain=None):
        '''
        Returns the task list for this role, which is created by first
        recursively compiling the tasks for all direct dependencies, and
        then adding on the tasks for this role.

        The role compile() also remembers and saves the dependency chain
        with each task, so tasks know by which route they were found, and
        can correctly take their parent's tags/conditionals into account.
        '''
        from ansible.playbook.block import Block
        from ansible.playbook.task import Task

        block_list = []

        # update the dependency chain here
        if dep_chain is None:
            dep_chain = []
        new_dep_chain = dep_chain + [self]

        deps = self.get_direct_dependencies()
        for dep in deps:
            dep_blocks = dep.compile(play=play, dep_chain=new_dep_chain)
            block_list.extend(dep_blocks)

        for task_block in self._task_blocks:
            new_task_block = task_block.copy()
            new_task_block._dep_chain = new_dep_chain
            new_task_block._play = play
            block_list.append(new_task_block)

        eor_block = Block(play=play)
        eor_block._loader = self._loader
        eor_block._role = self
        eor_block._variable_manager = self._variable_manager
        eor_block.run_once = False

        eor_task = Task(block=eor_block)
        eor_task._role = self
        eor_task.action = 'meta'
        eor_task.args = {'_raw_params': 'role_complete'}
        eor_task.implicit = True
        eor_task.tags = ['always']
        eor_task.when = True

        eor_block.block = [eor_task]
        block_list.append(eor_block)

        return block_list