Example #1
0
class IncludeRole(TaskInclude):

    """
    A Role include is derived from a regular role to handle the special
    circumstances related to the `- include_role: ...`
    """

    BASE = ('name', 'role')  # type: t.Tuple[str, ...]  # directly assigned
    FROM_ARGS = ('tasks_from', 'vars_from', 'defaults_from', 'handlers_from')  # type: t.Tuple[str, ...]  # used to populate from dict in role
    OTHER_ARGS = ('apply', 'public', 'allow_duplicates', 'rolespec_validate')  # type: t.Tuple[str, ...]  # assigned to matching property
    VALID_ARGS = tuple(frozenset(BASE + FROM_ARGS + OTHER_ARGS))  # all valid args

    # =================================================================================
    # ATTRIBUTES

    # private as this is a 'module options' vs a task property
    _allow_duplicates = FieldAttribute(isa='bool', default=True, private=True)
    _public = FieldAttribute(isa='bool', default=False, private=True)
    _rolespec_validate = FieldAttribute(isa='bool', default=True)

    def __init__(self, block=None, role=None, task_include=None):

        super(IncludeRole, self).__init__(block=block, role=role, task_include=task_include)

        self._from_files = {}
        self._parent_role = role
        self._role_name = None
        self._role_path = None

    def get_name(self):
        ''' return the name of the task '''
        return self.name or "%s : %s" % (self.action, self._role_name)

    def get_block_list(self, play=None, variable_manager=None, loader=None):

        # only need play passed in when dynamic
        if play is None:
            myplay = self._parent._play
        else:
            myplay = play

        ri = RoleInclude.load(self._role_name, play=myplay, variable_manager=variable_manager, loader=loader, collection_list=self.collections)
        ri.vars.update(self.vars)

        if variable_manager is not None:
            available_variables = variable_manager.get_vars(play=myplay, task=self)
        else:
            available_variables = {}
        templar = Templar(loader=loader, variables=available_variables)
        from_files = templar.template(self._from_files)

        # build role
        actual_role = Role.load(ri, myplay, parent_role=self._parent_role, from_files=from_files,
                                from_include=True, validate=self.rolespec_validate)
        actual_role._metadata.allow_duplicates = self.allow_duplicates

        if self.statically_loaded or self.public:
            myplay.roles.append(actual_role)

        # save this for later use
        self._role_path = actual_role._role_path

        # compile role with parent roles as dependencies to ensure they inherit
        # variables
        if not self._parent_role:
            dep_chain = []
        else:
            dep_chain = list(self._parent_role._parents)
            dep_chain.append(self._parent_role)

        p_block = self.build_parent_block()

        # collections value is not inherited; override with the value we calculated during role setup
        p_block.collections = actual_role.collections

        blocks = actual_role.compile(play=myplay, dep_chain=dep_chain)
        for b in blocks:
            b._parent = p_block
            # HACK: parent inheritance doesn't seem to have a way to handle this intermediate override until squashed/finalized
            b.collections = actual_role.collections

        # updated available handlers in play
        handlers = actual_role.get_handler_blocks(play=myplay)
        for h in handlers:
            h._parent = p_block
        myplay.handlers = myplay.handlers + handlers
        return blocks, handlers

    @staticmethod
    def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):

        ir = IncludeRole(block, role, task_include=task_include).load_data(data, variable_manager=variable_manager, loader=loader)

        # Validate options
        my_arg_names = frozenset(ir.args.keys())

        # name is needed, or use role as alias
        ir._role_name = ir.args.get('name', ir.args.get('role'))
        if ir._role_name is None:
            raise AnsibleParserError("'name' is a required field for %s." % ir.action, obj=data)

        if 'public' in ir.args and ir.action not in C._ACTION_INCLUDE_ROLE:
            raise AnsibleParserError('Invalid options for %s: public' % ir.action, obj=data)

        # validate bad args, otherwise we silently ignore
        bad_opts = my_arg_names.difference(IncludeRole.VALID_ARGS)
        if bad_opts:
            raise AnsibleParserError('Invalid options for %s: %s' % (ir.action, ','.join(list(bad_opts))), obj=data)

        # build options for role includes
        for key in my_arg_names.intersection(IncludeRole.FROM_ARGS):
            from_key = key.replace('_from', '')
            args_value = ir.args.get(key)
            if not isinstance(args_value, string_types):
                raise AnsibleParserError('Expected a string for %s but got %s instead' % (key, type(args_value)))
            ir._from_files[from_key] = basename(args_value)

        apply_attrs = ir.args.get('apply', {})
        if apply_attrs and ir.action not in C._ACTION_INCLUDE_ROLE:
            raise AnsibleParserError('Invalid options for %s: apply' % ir.action, obj=data)
        elif not isinstance(apply_attrs, dict):
            raise AnsibleParserError('Expected a dict for apply but got %s instead' % type(apply_attrs), obj=data)

        # manual list as otherwise the options would set other task parameters we don't want.
        for option in my_arg_names.intersection(IncludeRole.OTHER_ARGS):
            setattr(ir, option, ir.args.get(option))

        return ir

    def copy(self, exclude_parent=False, exclude_tasks=False):

        new_me = super(IncludeRole, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
        new_me.statically_loaded = self.statically_loaded
        new_me._from_files = self._from_files.copy()
        new_me._parent_role = self._parent_role
        new_me._role_name = self._role_name
        new_me._role_path = self._role_path

        return new_me

    def get_include_params(self):
        v = super(IncludeRole, self).get_include_params()
        if self._parent_role:
            v.update(self._parent_role.get_role_params())
            v.setdefault('ansible_parent_role_names', []).insert(0, self._parent_role.get_name())
            v.setdefault('ansible_parent_role_paths', []).insert(0, self._parent_role._role_path)
        return v
Example #2
0
class PlayContext(Base):

    '''
    This class is used to consolidate the connection information for
    hosts in a play and child tasks, where the task may override some
    connection/authentication information.
    '''

    # base
    _module_compression = FieldAttribute(isa='string', default=C.DEFAULT_MODULE_COMPRESSION)
    _shell = FieldAttribute(isa='string')
    _executable = FieldAttribute(isa='string', default=C.DEFAULT_EXECUTABLE)

    # connection fields, some are inherited from Base:
    # (connection, port, remote_user, environment, no_log)
    _remote_addr = FieldAttribute(isa='string')
    _password = FieldAttribute(isa='string')
    _timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
    _connection_user = FieldAttribute(isa='string')
    _private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE)
    _pipelining = FieldAttribute(isa='bool', default=C.ANSIBLE_PIPELINING)

    # networking modules
    _network_os = FieldAttribute(isa='string')

    # docker FIXME: remove these
    _docker_extra_args = FieldAttribute(isa='string')

    # ssh # FIXME: remove these
    _ssh_executable = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_EXECUTABLE)
    _ssh_args = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_ARGS)
    _ssh_common_args = FieldAttribute(isa='string')
    _sftp_extra_args = FieldAttribute(isa='string')
    _scp_extra_args = FieldAttribute(isa='string')
    _ssh_extra_args = FieldAttribute(isa='string')
    _ssh_transfer_method = FieldAttribute(isa='string', default=C.DEFAULT_SSH_TRANSFER_METHOD)

    # ???
    _connection_lockfd = FieldAttribute(isa='int')

    # privilege escalation fields
    _become = FieldAttribute(isa='bool')
    _become_method = FieldAttribute(isa='string')
    _become_user = FieldAttribute(isa='string')
    _become_pass = FieldAttribute(isa='string')
    _become_exe = FieldAttribute(isa='string', default=C.DEFAULT_BECOME_EXE)
    _become_flags = FieldAttribute(isa='string', default=C.DEFAULT_BECOME_FLAGS)
    _prompt = FieldAttribute(isa='string')

    # general flags
    _verbosity = FieldAttribute(isa='int', default=0)
    _only_tags = FieldAttribute(isa='set', default=set)
    _skip_tags = FieldAttribute(isa='set', default=set)

    _start_at_task = FieldAttribute(isa='string')
    _step = FieldAttribute(isa='bool', default=False)

    # "PlayContext.force_handlers should not be used, the calling code should be using play itself instead"
    _force_handlers = FieldAttribute(isa='bool', default=False)

    def __init__(self, play=None, passwords=None, connection_lockfd=None):
        # Note: play is really not optional.  The only time it could be omitted is when we create
        # a PlayContext just so we can invoke its deserialize method to load it from a serialized
        # data source.

        super(PlayContext, self).__init__()

        if passwords is None:
            passwords = {}

        self.password = passwords.get('conn_pass', '')
        self.become_pass = passwords.get('become_pass', '')

        self._become_plugin = None

        self.prompt = ''
        self.success_key = ''

        # a file descriptor to be used during locking operations
        self.connection_lockfd = connection_lockfd

        # set options before play to allow play to override them
        if context.CLIARGS:
            self.set_attributes_from_cli()

        if play:
            self.set_attributes_from_play(play)

    def set_attributes_from_plugin(self, plugin):
        # generic derived from connection plugin, temporary for backwards compat, in the end we should not set play_context properties

        # get options for plugins
        options = C.config.get_configuration_definitions(get_plugin_class(plugin), plugin._load_name)
        for option in options:
            if option:
                flag = options[option].get('name')
                if flag:
                    setattr(self, flag, self.connection.get_option(flag))

    def set_attributes_from_play(self, play):
        self.force_handlers = play.force_handlers

    def set_attributes_from_cli(self):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        lower precedence than those set on the play or host.
        '''
        if context.CLIARGS.get('timeout', False):
            self.timeout = int(context.CLIARGS['timeout'])

        # From the command line.  These should probably be used directly by plugins instead
        # For now, they are likely to be moved to FieldAttribute defaults
        self.private_key_file = context.CLIARGS.get('private_key_file')  # Else default
        self.verbosity = context.CLIARGS.get('verbosity')  # Else default
        self.ssh_common_args = context.CLIARGS.get('ssh_common_args')  # Else default
        self.ssh_extra_args = context.CLIARGS.get('ssh_extra_args')  # Else default
        self.sftp_extra_args = context.CLIARGS.get('sftp_extra_args')  # Else default
        self.scp_extra_args = context.CLIARGS.get('scp_extra_args')  # Else default

        # Not every cli that uses PlayContext has these command line args so have a default
        self.start_at_task = context.CLIARGS.get('start_at_task', None)  # Else default

    def set_task_and_variable_override(self, task, variables, templar):
        '''
        Sets attributes from the task if they are set, which will override
        those from the play.

        :arg task: the task object with the parameters that were set on it
        :arg variables: variables from inventory
        :arg templar: templar instance if templating variables is needed
        '''

        new_info = self.copy()

        # loop through a subset of attributes on the task object and set
        # connection fields based on their values
        for attr in TASK_ATTRIBUTE_OVERRIDES:
            if hasattr(task, attr):
                attr_val = getattr(task, attr)
                if attr_val is not None:
                    setattr(new_info, attr, attr_val)

        # next, use the MAGIC_VARIABLE_MAPPING dictionary to update this
        # connection info object with 'magic' variables from the variable list.
        # If the value 'ansible_delegated_vars' is in the variables, it means
        # we have a delegated-to host, so we check there first before looking
        # at the variables in general
        if task.delegate_to is not None:
            # In the case of a loop, the delegated_to host may have been
            # templated based on the loop variable, so we try and locate
            # the host name in the delegated variable dictionary here
            delegated_host_name = templar.template(task.delegate_to)
            delegated_vars = variables.get('ansible_delegated_vars', dict()).get(delegated_host_name, dict())

            delegated_transport = C.DEFAULT_TRANSPORT
            for transport_var in C.MAGIC_VARIABLE_MAPPING.get('connection'):
                if transport_var in delegated_vars:
                    delegated_transport = delegated_vars[transport_var]
                    break

            # make sure this delegated_to host has something set for its remote
            # address, otherwise we default to connecting to it by name. This
            # may happen when users put an IP entry into their inventory, or if
            # they rely on DNS for a non-inventory hostname
            for address_var in ('ansible_%s_host' % delegated_transport,) + C.MAGIC_VARIABLE_MAPPING.get('remote_addr'):
                if address_var in delegated_vars:
                    break
            else:
                display.debug("no remote address found for delegated host %s\nusing its name, so success depends on DNS resolution" % delegated_host_name)
                delegated_vars['ansible_host'] = delegated_host_name

            # reset the port back to the default if none was specified, to prevent
            # the delegated host from inheriting the original host's setting
            for port_var in ('ansible_%s_port' % delegated_transport,) + C.MAGIC_VARIABLE_MAPPING.get('port'):
                if port_var in delegated_vars:
                    break
            else:
                if delegated_transport == 'winrm':
                    delegated_vars['ansible_port'] = 5986
                else:
                    delegated_vars['ansible_port'] = C.DEFAULT_REMOTE_PORT

            # and likewise for the remote user
            for user_var in ('ansible_%s_user' % delegated_transport,) + C.MAGIC_VARIABLE_MAPPING.get('remote_user'):
                if user_var in delegated_vars and delegated_vars[user_var]:
                    break
            else:
                delegated_vars['ansible_user'] = task.remote_user or self.remote_user
        else:
            delegated_vars = dict()

            # setup shell
            for exe_var in C.MAGIC_VARIABLE_MAPPING.get('executable'):
                if exe_var in variables:
                    setattr(new_info, 'executable', variables.get(exe_var))

        attrs_considered = []
        for (attr, variable_names) in iteritems(C.MAGIC_VARIABLE_MAPPING):
            for variable_name in variable_names:
                if attr in attrs_considered:
                    continue
                # if delegation task ONLY use delegated host vars, avoid delegated FOR host vars
                if task.delegate_to is not None:
                    if isinstance(delegated_vars, dict) and variable_name in delegated_vars:
                        setattr(new_info, attr, delegated_vars[variable_name])
                        attrs_considered.append(attr)
                elif variable_name in variables:
                    setattr(new_info, attr, variables[variable_name])
                    attrs_considered.append(attr)
                # no else, as no other vars should be considered

        # become legacy updates -- from inventory file (inventory overrides
        # commandline)
        for become_pass_name in C.MAGIC_VARIABLE_MAPPING.get('become_pass'):
            if become_pass_name in variables:
                break

        # make sure we get port defaults if needed
        if new_info.port is None and C.DEFAULT_REMOTE_PORT is not None:
            new_info.port = int(C.DEFAULT_REMOTE_PORT)

        # special overrides for the connection setting
        if len(delegated_vars) > 0:
            # in the event that we were using local before make sure to reset the
            # connection type to the default transport for the delegated-to host,
            # if not otherwise specified
            for connection_type in C.MAGIC_VARIABLE_MAPPING.get('connection'):
                if connection_type in delegated_vars:
                    break
            else:
                remote_addr_local = new_info.remote_addr in C.LOCALHOST
                inv_hostname_local = delegated_vars.get('inventory_hostname') in C.LOCALHOST
                if remote_addr_local and inv_hostname_local:
                    setattr(new_info, 'connection', 'local')
                elif getattr(new_info, 'connection', None) == 'local' and (not remote_addr_local or not inv_hostname_local):
                    setattr(new_info, 'connection', C.DEFAULT_TRANSPORT)

        # if the final connection type is local, reset the remote_user value to that of the currently logged in user
        # this ensures any become settings are obeyed correctly
        # we store original in 'connection_user' for use of network/other modules that fallback to it as login user
        # connection_user to be deprecated once connection=local is removed for
        # network modules
        if new_info.connection == 'local':
            if not new_info.connection_user:
                new_info.connection_user = new_info.remote_user
            new_info.remote_user = pwd.getpwuid(os.getuid()).pw_name

        # set no_log to default if it was not previously set
        if new_info.no_log is None:
            new_info.no_log = C.DEFAULT_NO_LOG

        if task.check_mode is not None:
            new_info.check_mode = task.check_mode

        if task.diff is not None:
            new_info.diff = task.diff

        return new_info

    def set_become_plugin(self, plugin):
        self._become_plugin = plugin

    def make_become_cmd(self, cmd, executable=None):
        """ helper function to create privilege escalation commands """
        display.deprecated(
            "PlayContext.make_become_cmd should not be used, the calling code should be using become plugins instead",
            version="ansible.builtin:2.12"
        )

        if not cmd or not self.become:
            return cmd

        become_method = self.become_method

        # load/call become plugins here
        plugin = self._become_plugin

        if plugin:
            options = {
                'become_exe': self.become_exe or become_method,
                'become_flags': self.become_flags or '',
                'become_user': self.become_user,
                'become_pass': self.become_pass
            }
            plugin.set_options(direct=options)

            if not executable:
                executable = self.executable

            shell = get_shell_plugin(executable=executable)
            cmd = plugin.build_become_command(cmd, shell)
            # for backwards compat:
            if self.become_pass:
                self.prompt = plugin.prompt
        else:
            raise AnsibleError("Privilege escalation method not found: %s" % become_method)

        return cmd

    def update_vars(self, variables):
        '''
        Adds 'magic' variables relating to connections to the variable dictionary provided.
        In case users need to access from the play, this is a legacy from runner.
        '''

        for prop, var_list in C.MAGIC_VARIABLE_MAPPING.items():
            try:
                if 'become' in prop:
                    continue

                var_val = getattr(self, prop)
                for var_opt in var_list:
                    if var_opt not in variables and var_val is not None:
                        variables[var_opt] = var_val
            except AttributeError:
                continue

    def _get_attr_connection(self):
        ''' connections are special, this takes care of responding correctly '''
        conn_type = None
        if self._attributes['connection'] == 'smart':
            conn_type = 'ssh'
            # see if SSH can support ControlPersist if not use paramiko
            if not check_for_controlpersist(self.ssh_executable) and paramiko is not None:
                conn_type = "paramiko"

        # if someone did `connection: persistent`, default it to using a persistent paramiko connection to avoid problems
        elif self._attributes['connection'] == 'persistent' and paramiko is not None:
            conn_type = 'paramiko'

        if conn_type:
            self.connection = conn_type

        return self._attributes['connection']
Example #3
0
class RoleDefinition(Base, Become, Conditional, Taggable):

    _role = FieldAttribute(isa='string')

    def __init__(self,
                 play=None,
                 role_basedir=None,
                 variable_manager=None,
                 loader=None):
        self._play = play
        self._variable_manager = variable_manager
        self._loader = loader

        self._role_path = None
        self._role_basedir = role_basedir
        self._role_params = dict()
        super(RoleDefinition, self).__init__()

    #def __repr__(self):
    #    return 'ROLEDEF: ' + self._attributes.get('role', '<no name set>')

    @staticmethod
    def load(data, variable_manager=None, loader=None):
        raise AnsibleError("not implemented")

    def preprocess_data(self, ds):
        # role names that are simply numbers can be parsed by PyYAML
        # as integers even when quoted, so turn it into a string type
        if isinstance(ds, int):
            ds = "%s" % ds

        assert isinstance(ds, dict) or isinstance(
            ds, string_types) or isinstance(ds, AnsibleBaseYAMLObject)

        if isinstance(ds, dict):
            ds = super(RoleDefinition, self).preprocess_data(ds)

        # save the original ds for use later
        self._ds = ds

        # we create a new data structure here, using the same
        # object used internally by the YAML parsing code so we
        # can preserve file:line:column information if it exists
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.ansible_pos = ds.ansible_pos

        # first we pull the role name out of the data structure,
        # and then use that to determine the role path (which may
        # result in a new role name, if it was a file path)
        role_name = self._load_role_name(ds)
        (role_name, role_path) = self._load_role_path(role_name)

        # next, we split the role params out from the valid role
        # attributes and update the new datastructure with that
        # result and the role name
        if isinstance(ds, dict):
            (new_role_def, role_params) = self._split_role_params(ds)
            new_ds.update(new_role_def)
            self._role_params = role_params

        # set the role name in the new ds
        new_ds['role'] = role_name

        # we store the role path internally
        self._role_path = role_path

        # and return the cleaned-up data structure
        return new_ds

    def _load_role_name(self, ds):
        '''
        Returns the role name (either the role: or name: field) from
        the role definition, or (when the role definition is a simple
        string), just that string
        '''

        if isinstance(ds, string_types):
            return ds

        role_name = ds.get('role', ds.get('name'))
        if not role_name or not isinstance(role_name, string_types):
            raise AnsibleError('role definitions must contain a role name',
                               obj=ds)

        # if we have the required datastructures, and if the role_name
        # contains a variable, try and template it now
        if self._variable_manager:
            all_vars = self._variable_manager.get_vars(loader=self._loader,
                                                       play=self._play)
            templar = Templar(loader=self._loader, variables=all_vars)
            if templar._contains_vars(role_name):
                role_name = templar.template(role_name)

        return role_name

    def _load_role_path(self, role_name):
        '''
        the 'role', as specified in the ds (or as a bare string), can either
        be a simple name or a full path. If it is a full path, we use the
        basename as the role name, otherwise we take the name as-given and
        append it to the default role path
        '''

        role_path = unfrackpath(role_name)

        if self._loader.path_exists(role_path):
            role_name = os.path.basename(role_name)
            return (role_name, role_path)
        else:
            # we always start the search for roles in the base directory of the playbook
            role_search_paths = [
                os.path.join(self._loader.get_basedir(), u'roles'), u'./roles',
                self._loader.get_basedir(), u'./'
            ]

            # also search in the configured roles path
            if C.DEFAULT_ROLES_PATH:
                configured_paths = C.DEFAULT_ROLES_PATH.split(os.pathsep)
                role_search_paths.extend(configured_paths)

            # finally, append the roles basedir, if it was set, so we can
            # search relative to that directory for dependent roles
            if self._role_basedir:
                role_search_paths.append(self._role_basedir)

            # create a templar class to template the dependency names, in
            # case they contain variables
            if self._variable_manager is not None:
                all_vars = self._variable_manager.get_vars(loader=self._loader,
                                                           play=self._play)
            else:
                all_vars = dict()

            templar = Templar(loader=self._loader, variables=all_vars)
            role_name = templar.template(role_name)

            # now iterate through the possible paths and return the first one we find
            for path in role_search_paths:
                path = templar.template(path)
                role_path = unfrackpath(os.path.join(path, role_name))
                if self._loader.path_exists(role_path):
                    return (role_name, role_path)

        raise AnsibleError("the role '%s' was not found in %s" %
                           (role_name, ":".join(role_search_paths)),
                           obj=self._ds)

    def _split_role_params(self, ds):
        '''
        Splits any random role params off from the role spec and store
        them in a dictionary of params for parsing later
        '''

        role_def = dict()
        role_params = dict()
        base_attribute_names = frozenset(self._get_base_attributes().keys())
        for (key, value) in iteritems(ds):
            # use the list of FieldAttribute values to determine what is and is not
            # an extra parameter for this role (or sub-class of this role)
            if key not in base_attribute_names:
                # this key does not match a field attribute, so it must be a role param
                role_params[key] = value
            else:
                # this is a field attribute, so copy it over directly
                role_def[key] = value

        return (role_def, role_params)

    def get_role_params(self):
        return self._role_params.copy()

    def get_role_path(self):
        return self._role_path
Example #4
0
class Block(Base, Conditional, Taggable):

    _block = FieldAttribute(isa='list')
    _rescue = FieldAttribute(isa='list')
    _always = FieldAttribute(isa='list')

    # for future consideration? this would be functionally
    # similar to the 'else' clause for exceptions
    #_otherwise = FieldAttribute(isa='list')

    def __init__(self,
                 parent_block=None,
                 role=None,
                 task_include=None,
                 use_handlers=False):
        self._parent_block = parent_block
        self._role = role
        self._task_include = task_include
        self._use_handlers = use_handlers

        super(Block, self).__init__()

    def get_vars(self):
        '''
        Blocks do not store variables directly, however they may be a member
        of a role or task include which does, so return those if present.
        '''

        all_vars = dict()

        if self._role:
            all_vars.update(self._role.get_vars())
        if self._task_include:
            all_vars.update(self._task_include.get_vars())

        return all_vars

    @staticmethod
    def load(data,
             parent_block=None,
             role=None,
             task_include=None,
             use_handlers=False,
             variable_manager=None,
             loader=None):
        b = Block(parent_block=parent_block,
                  role=role,
                  task_include=task_include,
                  use_handlers=use_handlers)
        return b.load_data(data,
                           variable_manager=variable_manager,
                           loader=loader)

    def munge(self, ds):
        '''
        If a simple task is given, an implicit block for that single task
        is created, which goes in the main portion of the block
        '''
        is_block = False
        for attr in ('block', 'rescue', 'always'):
            if attr in ds:
                is_block = True
                break
        if not is_block:
            if isinstance(ds, list):
                return dict(block=ds)
            else:
                return dict(block=[ds])
        return ds

    def _load_block(self, attr, ds):
        return load_list_of_tasks(
            ds,
            block=self,
            role=self._role,
            task_include=self._task_include,
            variable_manager=self._variable_manager,
            loader=self._loader,
            use_handlers=self._use_handlers,
        )

    def _load_rescue(self, attr, ds):
        return load_list_of_tasks(
            ds,
            block=self,
            role=self._role,
            task_include=self._task_include,
            variable_manager=self._variable_manager,
            loader=self._loader,
            use_handlers=self._use_handlers,
        )

    def _load_always(self, attr, ds):
        return load_list_of_tasks(
            ds,
            block=self,
            role=self._role,
            task_include=self._task_include,
            variable_manager=self._variable_manager,
            loader=self._loader,
            use_handlers=self._use_handlers,
        )

    # not currently used
    #def _load_otherwise(self, attr, ds):
    #    return load_list_of_tasks(
    #        ds,
    #        block=self,
    #        role=self._role,
    #        task_include=self._task_include,
    #        variable_manager=self._variable_manager,
    #        loader=self._loader,
    #        use_handlers=self._use_handlers,
    #    )

    def compile(self):
        '''
        Returns the task list for this object
        '''

        task_list = []
        for task in self.block:
            # FIXME: evaulate task tags/conditionals here
            task_list.extend(task.compile())

        return task_list

    def copy(self):
        new_me = super(Block, self).copy()
        new_me._use_handlers = self._use_handlers

        new_me._parent_block = None
        if self._parent_block:
            new_me._parent_block = self._parent_block.copy()

        new_me._role = None
        if self._role:
            new_me._role = self._role

        new_me._task_include = None
        if self._task_include:
            new_me._task_include = self._task_include.copy()

        return new_me

    def serialize(self):
        '''
        Override of the default serialize method, since when we're serializing
        a task we don't want to include the attribute list of tasks.
        '''

        data = dict(when=self.when)

        if self._role is not None:
            data['role'] = self._role.serialize()
        if self._task_include is not None:
            data['task_include'] = self._task_include.serialize()

        return data

    def deserialize(self, data):
        '''
        Override of the default deserialize method, to match the above overridden
        serialize method
        '''

        #from ansible.playbook.task_include import TaskInclude
        from ansible.playbook.task import Task

        # unpack the when attribute, which is the only one we want
        self.when = data.get('when')

        # if there was a serialized role, unpack it too
        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r

        # if there was a serialized task include, unpack it too
        ti_data = data.get('task_include')
        if ti_data:
            ti = Task()
            ti.deserialize(ti_data)
            self._task_include = ti

    def evaluate_conditional(self, all_vars):
        if self._task_include is not None:
            if not self._task_include.evaluate_conditional(all_vars):
                return False
        if self._parent_block is not None:
            if not self._parent_block.evaluate_conditional(all_vars):
                return False
        elif self._role is not None:
            if not self._role.evaluate_conditional(all_vars):
                return False
        return super(Block, self).evaluate_conditional(all_vars)

    def evaluate_tags(self, only_tags, skip_tags, all_vars):
        result = False
        if self._parent_block is not None:
            result |= self._parent_block.evaluate_tags(only_tags=only_tags,
                                                       skip_tags=skip_tags,
                                                       all_vars=all_vars)
        elif self._role is not None:
            result |= self._role.evaluate_tags(only_tags=only_tags,
                                               skip_tags=skip_tags,
                                               all_vars=all_vars)
        return result | super(Block, self).evaluate_tags(
            only_tags=only_tags, skip_tags=skip_tags, all_vars=all_vars)

    def set_loader(self, loader):
        self._loader = loader
        if self._parent_block:
            self._parent_block.set_loader(loader)
        elif self._role:
            self._role.set_loader(loader)

        if self._task_include:
            self._task_include.set_loader(loader)
Example #5
0
class PlaybookInclude(Base, Conditional, Taggable):

    import_playbook = FieldAttribute(isa='string')
    vars_val = FieldAttribute(isa='dict', default=dict, alias='vars')

    @staticmethod
    def load(data, basedir, variable_manager=None, loader=None):
        return PlaybookInclude().load_data(ds=data,
                                           basedir=basedir,
                                           variable_manager=variable_manager,
                                           loader=loader)

    def load_data(self, ds, basedir, variable_manager=None, loader=None):
        '''
        Overrides the base load_data(), as we're actually going to return a new
        Playbook() object rather than a PlaybookInclude object
        '''

        # import here to avoid a dependency loop
        from ansible.playbook import Playbook
        from ansible.playbook.play import Play

        # first, we use the original parent method to correctly load the object
        # via the load_data/preprocess_data system we normally use for other
        # playbook objects
        new_obj = super(PlaybookInclude,
                        self).load_data(ds, variable_manager, loader)

        all_vars = self.vars.copy()
        if variable_manager:
            all_vars.update(variable_manager.get_vars())

        templar = Templar(loader=loader, variables=all_vars)

        # then we use the object to load a Playbook
        pb = Playbook(loader=loader)

        file_name = templar.template(new_obj.import_playbook)

        # check for FQCN
        resource = _get_collection_playbook_path(file_name)
        if resource is not None:
            playbook = resource[1]
            playbook_collection = resource[2]
        else:
            # not FQCN try path
            playbook = file_name
            if not os.path.isabs(playbook):
                playbook = os.path.join(basedir, playbook)

            # might still be collection playbook
            playbook_collection = _get_collection_name_from_path(playbook)

        if playbook_collection:
            # it is a collection playbook, setup default collections
            AnsibleCollectionConfig.default_collection = playbook_collection
        else:
            # it is NOT a collection playbook, setup adjecent paths
            AnsibleCollectionConfig.playbook_paths.append(
                os.path.dirname(
                    os.path.abspath(
                        to_bytes(playbook, errors='surrogate_or_strict'))))

        pb._load_playbook_data(file_name=playbook,
                               variable_manager=variable_manager,
                               vars=self.vars.copy())

        # finally, update each loaded playbook entry with any variables specified
        # on the included playbook and/or any tags which may have been set
        for entry in pb._entries:

            # conditional includes on a playbook need a marker to skip gathering
            if new_obj.when and isinstance(entry, Play):
                entry._included_conditional = new_obj.when[:]

            temp_vars = entry.vars.copy()
            temp_vars.update(new_obj.vars)
            param_tags = temp_vars.pop('tags', None)
            if param_tags is not None:
                entry.tags.extend(param_tags.split(','))
            entry.vars = temp_vars
            entry.tags = list(set(entry.tags).union(new_obj.tags))
            if entry._included_path is None:
                entry._included_path = os.path.dirname(playbook)

            # Check to see if we need to forward the conditionals on to the included
            # plays. If so, we can take a shortcut here and simply prepend them to
            # those attached to each block (if any)
            if new_obj.when:
                for task_block in (entry.pre_tasks + entry.roles +
                                   entry.tasks + entry.post_tasks):
                    task_block._when = new_obj.when[:] + task_block.when[:]

        return pb

    def preprocess_data(self, ds):
        '''
        Regorganizes the data for a PlaybookInclude datastructure to line
        up with what we expect the proper attributes to be
        '''

        if not isinstance(ds, dict):
            raise AnsibleAssertionError(
                'ds (%s) should be a dict but was a %s' % (ds, type(ds)))

        # the new, cleaned datastructure, which will have legacy
        # items reduced to a standard structure
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.ansible_pos = ds.ansible_pos

        for (k, v) in ds.items():
            if k in C._ACTION_IMPORT_PLAYBOOK:
                self._preprocess_import(ds, new_ds, k, v)
            else:
                # some basic error checking, to make sure vars are properly
                # formatted and do not conflict with k=v parameters
                if k == 'vars':
                    if 'vars' in new_ds:
                        raise AnsibleParserError(
                            "import_playbook parameters cannot be mixed with 'vars' entries for import statements",
                            obj=ds)
                    elif not isinstance(v, dict):
                        raise AnsibleParserError(
                            "vars for import_playbook statements must be specified as a dictionary",
                            obj=ds)
                new_ds[k] = v

        return super(PlaybookInclude, self).preprocess_data(new_ds)

    def _preprocess_import(self, ds, new_ds, k, v):
        '''
        Splits the playbook import line up into filename and parameters
        '''
        if v is None:
            raise AnsibleParserError("playbook import parameter is missing",
                                     obj=ds)
        elif not isinstance(v, string_types):
            raise AnsibleParserError(
                "playbook import parameter must be a string indicating a file path, got %s instead"
                % type(v),
                obj=ds)

        # The import_playbook line must include at least one item, which is the filename
        # to import. Anything after that should be regarded as a parameter to the import
        items = split_args(v)
        if len(items) == 0:
            raise AnsibleParserError(
                "import_playbook statements must specify the file name to import",
                obj=ds)

        new_ds['import_playbook'] = items[0].strip()
class PlaybookInclude(Base, Conditional, Taggable):

    _name = FieldAttribute(isa='string')
    _include = FieldAttribute(isa='string')
    _vars = FieldAttribute(isa='dict', default=dict())

    @staticmethod
    def load(data, basedir, variable_manager=None, loader=None):
        return PlaybookInclude().load_data(ds=data,
                                           basedir=basedir,
                                           variable_manager=variable_manager,
                                           loader=loader)

    def load_data(self, ds, basedir, variable_manager=None, loader=None):
        '''
        Overrides the base load_data(), as we're actually going to return a new
        Playbook() object rather than a PlaybookInclude object
        '''

        # import here to avoid a dependency loop
        from ansible.playbook import Playbook

        # first, we use the original parent method to correctly load the object
        # via the load_data/preprocess_data system we normally use for other
        # playbook objects
        new_obj = super(PlaybookInclude,
                        self).load_data(ds, variable_manager, loader)

        all_vars = self.vars.copy()
        if variable_manager:
            all_vars.update(variable_manager.get_vars(loader=loader))

        templar = Templar(loader=loader, variables=all_vars)

        # then we use the object to load a Playbook
        pb = Playbook(loader=loader)

        file_name = templar.template(new_obj.include)
        if not os.path.isabs(file_name):
            file_name = os.path.join(basedir, file_name)

        pb._load_playbook_data(file_name=file_name,
                               variable_manager=variable_manager)

        # finally, update each loaded playbook entry with any variables specified
        # on the included playbook and/or any tags which may have been set
        for entry in pb._entries:
            temp_vars = entry.vars.copy()
            temp_vars.update(new_obj.vars)
            param_tags = temp_vars.pop('tags', None)
            if param_tags is not None:
                entry.tags.extend(param_tags.split(','))
            entry.vars = temp_vars
            entry.tags = list(set(entry.tags).union(new_obj.tags))
            if entry._included_path is None:
                entry._included_path = os.path.dirname(file_name)

            # Check to see if we need to forward the conditionals on to the included
            # plays. If so, we can take a shortcut here and simply prepend them to
            # those attached to each block (if any)
            if new_obj.when:
                for task_block in (entry.pre_tasks + entry.roles +
                                   entry.tasks + entry.post_tasks):
                    task_block._attributes[
                        'when'] = new_obj.when[:] + task_block.when[:]

        return pb

    def preprocess_data(self, ds):
        '''
        Regorganizes the data for a PlaybookInclude datastructure to line
        up with what we expect the proper attributes to be
        '''

        assert isinstance(ds, dict)

        # the new, cleaned datastructure, which will have legacy
        # items reduced to a standard structure
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.ansible_pos = ds.ansible_pos

        for (k, v) in iteritems(ds):
            if k == 'include':
                self._preprocess_include(ds, new_ds, k, v)
            else:
                # some basic error checking, to make sure vars are properly
                # formatted and do not conflict with k=v parameters
                if k == 'vars':
                    if 'vars' in new_ds:
                        raise AnsibleParserError(
                            "include parameters cannot be mixed with 'vars' entries for include statements",
                            obj=ds)
                    elif not isinstance(v, dict):
                        raise AnsibleParserError(
                            "vars for include statements must be specified as a dictionary",
                            obj=ds)
                new_ds[k] = v

        return super(PlaybookInclude, self).preprocess_data(new_ds)

    def _preprocess_include(self, ds, new_ds, k, v):
        '''
        Splits the include line up into filename and parameters
        '''

        if v is None:
            raise AnsibleParserError("include parameter is missing", obj=ds)

        # The include line must include at least one item, which is the filename
        # to include. Anything after that should be regarded as a parameter to the include
        items = split_args(v)
        if len(items) == 0:
            raise AnsibleParserError(
                "include statements must specify the file name to include",
                obj=ds)
        else:
            new_ds['include'] = items[0]
            if len(items) > 1:
                # rejoin the parameter portion of the arguments and
                # then use parse_kv() to get a dict of params back
                params = parse_kv(" ".join(items[1:]))
                if 'tags' in params:
                    new_ds['tags'] = params.pop('tags')
                if 'vars' in new_ds:
                    raise AnsibleParserError(
                        "include parameters cannot be mixed with 'vars' entries for include statements",
                        obj=ds)
                new_ds['vars'] = params
Example #7
0
class Task(Base):
    """
    A task is a language feature that represents a call to a module, with given arguments and other parameters.
    A handler is a subclass of a task.

    Usage:

       Task.load(datastructure) -> Task
       Task.something(...)
    """

    # =================================================================================
    # ATTRIBUTES
    # load_<attribute_name> and
    # validate_<attribute_name>
    # will be used if defined
    # might be possible to define others

    _args = FieldAttribute(isa='dict')
    _action = FieldAttribute(isa='string')

    _always_run = FieldAttribute(isa='bool')
    _any_errors_fatal = FieldAttribute(isa='bool')
    _async = FieldAttribute(isa='int')
    _connection = FieldAttribute(isa='string')
    _delay = FieldAttribute(isa='int')
    _delegate_to = FieldAttribute(isa='string')
    _environment = FieldAttribute(isa='dict')
    _first_available_file = FieldAttribute(isa='list')
    _ignore_errors = FieldAttribute(isa='bool')

    _loop = FieldAttribute(isa='string', private=True)
    _loop_args = FieldAttribute(isa='list', private=True)
    _local_action = FieldAttribute(isa='string')

    # FIXME: this should not be a Task
    _meta = FieldAttribute(isa='string')

    _name = FieldAttribute(isa='string')

    _no_log = FieldAttribute(isa='bool')
    _notify = FieldAttribute(isa='list')
    _poll = FieldAttribute(isa='integer')
    _register = FieldAttribute(isa='string')
    _remote_user = FieldAttribute(isa='string')
    _retries = FieldAttribute(isa='integer')
    _run_once = FieldAttribute(isa='bool')
    _su = FieldAttribute(isa='bool')
    _su_pass = FieldAttribute(isa='string')
    _su_user = FieldAttribute(isa='string')
    _sudo = FieldAttribute(isa='bool')
    _sudo_user = FieldAttribute(isa='string')
    _sudo_pass = FieldAttribute(isa='string')
    _transport = FieldAttribute(isa='string')
    _until = FieldAttribute(isa='list')  # ?

    def __init__(self, block=None, role=None):
        ''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
        self._block = block
        self._role = role
        super(Task, self).__init__()

    def get_name(self):
        ''' return the name of the task '''

        if self._role and self.name:
            return "%s : %s" % (self._role.name, self.name)
        elif self.name:
            return self.name
        else:
            flattened_args = self._merge_kv(self.args)
            return "%s %s" % (self.action, flattened_args)

    def _merge_kv(self, ds):
        if ds is None:
            return ""
        elif isinstance(ds, basestring):
            return ds
        elif isinstance(ds, dict):
            buf = ""
            for (k, v) in ds.iteritems():
                if k.startswith('_'):
                    continue
                buf = buf + "%s=%s " % (k, v)
            buf = buf.strip()
            return buf

    @staticmethod
    def load(data, block=None, role=None):
        t = Task(block=block, role=role)
        return t.load_data(data)

    def __repr__(self):
        ''' returns a human readable representation of the task '''
        return "TASK: %s" % self.get_name()

    def _munge_loop(self, ds, new_ds, k, v):
        ''' take a lookup plugin name and store it correctly '''

        if self._loop.value is not None:
            raise AnsibleError("duplicate loop in task: %s" % k)
        new_ds['loop'] = k
        new_ds['loop_args'] = v

    def munge(self, ds):
        '''
        tasks are especially complex arguments so need pre-processing.
        keep it short.
        '''

        assert isinstance(ds, dict)

        # the new, cleaned datastructure, which will have legacy
        # items reduced to a standard structure suitable for the
        # attributes of the task class
        new_ds = dict()

        # use the args parsing class to determine the action, args,
        # and the delegate_to value from the various possible forms
        # supported as legacy
        args_parser = ModuleArgsParser()
        (action, args, delegate_to) = args_parser.parse(ds)

        new_ds['action'] = action
        new_ds['args'] = args
        new_ds['delegate_to'] = delegate_to

        for (k, v) in ds.iteritems():
            if k in ('action', 'local_action', 'args',
                     'delegate_to') or k == action:
                # we don't want to re-assign these values, which were
                # determined by the ModuleArgsParser() above
                continue
            elif "with_%s" % k in lookup_finder:
                self._munge_loop(ds, new_ds, k, v)
            else:
                new_ds[k] = v

        return new_ds
Example #8
0
class TaskInclude(Base):

    '''
    A class used to wrap the use of `include: /some/other/file.yml`
    within a task list, which may return a list of Task objects and/or
    more TaskInclude objects.
    '''

    # the description field is used mainly internally to
    # show a nice reprsentation of this class, rather than
    # simply using __class__.__name__

    __desc__ = "task include statement"


    #-----------------------------------------------------------------
    # Attributes

    _include   = FieldAttribute(isa='string')
    _loop      = FieldAttribute(isa='string', private=True)
    _loop_args = FieldAttribute(isa='list', private=True)
    _tags      = FieldAttribute(isa='list', default=[])
    _vars      = FieldAttribute(isa='dict', default=dict())
    _when      = FieldAttribute(isa='list', default=[])

    def __init__(self, block=None, role=None, task_include=None):
        self._block        = block
        self._role         = role
        self._task_include = task_include

        self._task_blocks  = []

        super(TaskInclude, self).__init__()

    @staticmethod
    def load(data, block=None, role=None, task_include=None, loader=None):
        ti = TaskInclude(block=block, role=role, task_include=None)
        return ti.load_data(data, loader=loader)

    def munge(self, ds):
        '''
        Regorganizes the data for a TaskInclude datastructure to line
        up with what we expect the proper attributes to be
        '''

        assert isinstance(ds, dict)

        # the new, cleaned datastructure, which will have legacy
        # items reduced to a standard structure
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.copy_position_info(ds)

        for (k,v) in ds.iteritems():
            if k == 'include':
                self._munge_include(ds, new_ds, k, v)
            elif k.replace("with_", "") in lookup_finder:
                self._munge_loop(ds, new_ds, k, v)
            else:
                # some basic error checking, to make sure vars are properly
                # formatted and do not conflict with k=v parameters
                # FIXME: we could merge these instead, but controlling the order
                #        in which they're encountered could be difficult
                if k == 'vars':
                    if 'vars' in new_ds:
                        raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
                    elif not isinstance(v, dict):
                        raise AnsibleParserError("vars for include statements must be specified as a dictionary", obj=ds)
                new_ds[k] = v

        return new_ds

    def _munge_include(self, ds, new_ds, k, v):
        '''
        Splits the include line up into filename and parameters
        '''

        # The include line must include at least one item, which is the filename
        # to include. Anything after that should be regarded as a parameter to the include
        items = split_args(v)
        if len(items) == 0:
            raise AnsibleParserError("include statements must specify the file name to include", obj=ds)
        else:
            # FIXME/TODO: validate that items[0] is a file, which also
            #             exists and is readable 
            new_ds['include'] = items[0]
            if len(items) > 1:
                # rejoin the parameter portion of the arguments and
                # then use parse_kv() to get a dict of params back
                params = parse_kv(" ".join(items[1:]))
                if 'vars' in new_ds:
                    # FIXME: see fixme above regarding merging vars
                    raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
                new_ds['vars'] = params

    def _munge_loop(self, ds, new_ds, k, v):
        ''' take a lookup plugin name and store it correctly '''

        loop_name = k.replace("with_", "")
        if new_ds.get('loop') is not None:
            raise AnsibleError("duplicate loop in task: %s" % loop_name)
        new_ds['loop'] = loop_name
        new_ds['loop_args'] = v


    def _load_include(self, attr, ds):
        ''' loads the file name specified in the ds and returns a list of blocks '''

        data = self._loader.load_from_file(ds)
        if not isinstance(data, list):
            raise AnsibleParsingError("included task files must contain a list of tasks", obj=ds)

        self._task_blocks = load_list_of_blocks(
                                data,
                                parent_block=self._block,
                                task_include=self,
                                role=self._role,
                                loader=self._loader
                            )
        return ds

    def compile(self):
        '''
        Returns the task list for the included tasks.
        '''

        task_list = []
        task_list.extend(compile_block_list(self._task_blocks))
        return task_list
Example #9
0
class RoleMetadata(Base, CollectionSearch):
    '''
    This class wraps the parsing and validation of the optional metadata
    within each Role (meta/main.yml).
    '''

    _allow_duplicates = FieldAttribute(isa='bool', default=False)
    _dependencies = FieldAttribute(isa='list', default=list)
    _galaxy_info = FieldAttribute(isa='GalaxyInfo')

    def __init__(self, owner=None):
        self._owner = owner
        super(RoleMetadata, self).__init__()

    @staticmethod
    def load(data, owner, variable_manager=None, loader=None):
        '''
        Returns a new RoleMetadata object based on the datastructure passed in.
        '''

        if not isinstance(data, dict):
            raise AnsibleParserError(
                "the 'meta/main.yml' for role %s is not a dictionary" %
                owner.get_name())

        m = RoleMetadata(owner=owner).load_data(
            data, variable_manager=variable_manager, loader=loader)
        return m

    def _load_dependencies(self, attr, ds):
        '''
        This is a helper loading function for the dependencies list,
        which returns a list of RoleInclude objects
        '''

        roles = []
        if ds:
            if not isinstance(ds, list):
                raise AnsibleParserError(
                    "Expected role dependencies to be a list.", obj=self._ds)

            for role_def in ds:
                if isinstance(role_def, string_types
                              ) or 'role' in role_def or 'name' in role_def:
                    roles.append(role_def)
                    continue
                try:
                    # role_def is new style: { src: 'galaxy.role,version,name', other_vars: "here" }
                    def_parsed = RoleRequirement.role_yaml_parse(role_def)
                    if def_parsed.get('name'):
                        role_def['name'] = def_parsed['name']
                    roles.append(role_def)
                except AnsibleError as exc:
                    raise AnsibleParserError(to_native(exc),
                                             obj=role_def,
                                             orig_exc=exc)

        current_role_path = None
        if self._owner:
            current_role_path = os.path.dirname(self._owner._role_path)

        try:
            return load_list_of_roles(roles,
                                      play=self._owner._play,
                                      current_role_path=current_role_path,
                                      variable_manager=self._variable_manager,
                                      loader=self._loader)
        except AssertionError as e:
            raise AnsibleParserError(
                "A malformed list of role dependencies was encountered.",
                obj=self._ds,
                orig_exc=e)

    def _load_galaxy_info(self, attr, ds):
        '''
        This is a helper loading function for the galaxy info entry
        in the metadata, which returns a GalaxyInfo object rather than
        a simple dictionary.
        '''

        return ds

    def serialize(self):
        return dict(allow_duplicates=self._allow_duplicates,
                    dependencies=self._dependencies)

    def deserialize(self, data):
        setattr(self, 'allow_duplicates', data.get('allow_duplicates', False))
        setattr(self, 'dependencies', data.get('dependencies', []))
Example #10
0
class Task(Base, Conditional, Taggable, Become):
    """
    A task is a language feature that represents a call to a module, with given arguments and other parameters.
    A handler is a subclass of a task.

    Usage:

       Task.load(datastructure) -> Task
       Task.something(...)
    """

    # =================================================================================
    # ATTRIBUTES
    # load_<attribute_name> and
    # validate_<attribute_name>
    # will be used if defined
    # might be possible to define others

    _args = FieldAttribute(isa='dict', default=dict())
    _action = FieldAttribute(isa='string')

    _any_errors_fatal = FieldAttribute(isa='bool')
    _async = FieldAttribute(isa='int', default=0)
    _changed_when = FieldAttribute(isa='string')
    _delay = FieldAttribute(isa='int', default=5)
    _delegate_to = FieldAttribute(isa='string')
    _delegate_facts = FieldAttribute(isa='bool', default=False)
    _failed_when = FieldAttribute(isa='string')
    _first_available_file = FieldAttribute(isa='list')
    _loop = FieldAttribute(isa='string', private=True)
    _loop_args = FieldAttribute(isa='list', private=True)
    _name = FieldAttribute(isa='string', default='')
    _notify = FieldAttribute(isa='list')
    _poll = FieldAttribute(isa='int')
    _register = FieldAttribute(isa='string')
    _retries = FieldAttribute(isa='int', default=3)
    _until = FieldAttribute(isa='list')

    def __init__(self, block=None, role=None, task_include=None):
        ''' constructors a task, without the Task.load classmethod, it will be pretty blank '''

        self._block = block
        self._role = role
        self._task_include = task_include

        super(Task, self).__init__()

    def get_path(self):
        ''' return the absolute path of the task with its line number '''

        if hasattr(self, '_ds'):
            return "%s:%s" % (self._ds._data_source, self._ds._line_number)

    def get_name(self):
        ''' return the name of the task '''

        if self._role and self.name:
            return "%s : %s" % (self._role.get_name(), self.name)
        elif self.name:
            return self.name
        else:
            flattened_args = self._merge_kv(self.args)
            if self._role:
                return "%s : %s %s" % (self._role.get_name(), self.action,
                                       flattened_args)
            else:
                return "%s %s" % (self.action, flattened_args)

    def _merge_kv(self, ds):
        if ds is None:
            return ""
        elif isinstance(ds, string_types):
            return ds
        elif isinstance(ds, dict):
            buf = ""
            for (k, v) in iteritems(ds):
                if k.startswith('_'):
                    continue
                buf = buf + "%s=%s " % (k, v)
            buf = buf.strip()
            return buf

    @staticmethod
    def load(data,
             block=None,
             role=None,
             task_include=None,
             variable_manager=None,
             loader=None):
        t = Task(block=block, role=role, task_include=task_include)
        return t.load_data(data,
                           variable_manager=variable_manager,
                           loader=loader)

    def __repr__(self):
        ''' returns a human readable representation of the task '''
        if self.get_name() == 'meta ':
            return "TASK: meta (%s)" % self.args['_raw_params']
        else:
            return "TASK: %s" % self.get_name()

    def _preprocess_loop(self, ds, new_ds, k, v):
        ''' take a lookup plugin name and store it correctly '''

        loop_name = k.replace("with_", "")
        if new_ds.get('loop') is not None:
            raise AnsibleError("duplicate loop in task: %s" % loop_name,
                               obj=ds)
        if v is None:
            raise AnsibleError("you must specify a value when using %s" % k,
                               obj=ds)
        new_ds['loop'] = loop_name
        new_ds['loop_args'] = v

    def preprocess_data(self, ds):
        '''
        tasks are especially complex arguments so need pre-processing.
        keep it short.
        '''

        assert isinstance(ds, dict)

        # the new, cleaned datastructure, which will have legacy
        # items reduced to a standard structure suitable for the
        # attributes of the task class
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.ansible_pos = ds.ansible_pos

        # use the args parsing class to determine the action, args,
        # and the delegate_to value from the various possible forms
        # supported as legacy
        args_parser = ModuleArgsParser(task_ds=ds)
        (action, args, delegate_to) = args_parser.parse()

        # the command/shell/script modules used to support the `cmd` arg,
        # which corresponds to what we now call _raw_params, so move that
        # value over to _raw_params (assuming it is empty)
        if action in ('command', 'shell', 'script'):
            if 'cmd' in args:
                if args.get('_raw_params', '') != '':
                    raise AnsibleError(
                        "The 'cmd' argument cannot be used when other raw parameters are specified."
                        " Please put everything in one or the other place.",
                        obj=ds)
                args['_raw_params'] = args.pop('cmd')

        new_ds['action'] = action
        new_ds['args'] = args
        new_ds['delegate_to'] = delegate_to

        # we handle any 'vars' specified in the ds here, as we may
        # be adding things to them below (special handling for includes).
        # When that deprecated feature is removed, this can be too.
        if 'vars' in ds:
            # _load_vars is defined in Base, and is used to load a dictionary
            # or list of dictionaries in a standard way
            new_ds['vars'] = self._load_vars(None, ds.pop('vars'))
        else:
            new_ds['vars'] = dict()

        for (k, v) in iteritems(ds):
            if k in ('action', 'local_action', 'args',
                     'delegate_to') or k == action or k == 'shell':
                # we don't want to re-assign these values, which were
                # determined by the ModuleArgsParser() above
                continue
            elif k.replace("with_", "") in lookup_loader:
                self._preprocess_loop(ds, new_ds, k, v)
            else:
                # pre-2.0 syntax allowed variables for include statements at the
                # top level of the task, so we move those into the 'vars' dictionary
                # here, and show a deprecation message as we will remove this at
                # some point in the future.
                if action == 'include' and k not in self._get_base_attributes(
                ) and k not in self.DEPRECATED_ATTRIBUTES:
                    display.deprecated(
                        "Specifying include variables at the top-level of the task is deprecated."
                        " Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
                        " for currently supported syntax regarding included files and variables"
                    )
                    new_ds['vars'][k] = v
                else:
                    new_ds[k] = v

        return super(Task, self).preprocess_data(new_ds)

    def _load_any_errors_fatal(self, attr, value):
        '''
        Exists only to show a deprecation warning, as this attribute is not valid
        at the task level.
        '''
        display.deprecated(
            "Setting any_errors_fatal on a task is no longer supported. This should be set at the play level only"
        )
        return None

    def post_validate(self, templar):
        '''
        Override of base class post_validate, to also do final validation on
        the block and task include (if any) to which this task belongs.
        '''

        if self._block:
            self._block.post_validate(templar)
        if self._task_include:
            self._task_include.post_validate(templar)

        super(Task, self).post_validate(templar)

    def _post_validate_loop_args(self, attr, value, templar):
        '''
        Override post validation for the loop args field, which is templated
        specially in the TaskExecutor class when evaluating loops.
        '''
        return value

    def _post_validate_environment(self, attr, value, templar):
        '''
        Override post validation of vars on the play, as we don't want to
        template these too early.
        '''
        if value is None:
            return dict()

        for env_item in value:
            if isinstance(
                    env_item,
                (string_types, AnsibleUnicode
                 )) and env_item in templar._available_variables.keys():
                display.deprecated(
                    "Using bare variables for environment is deprecated."
                    " Update your playbooks so that the environment value uses the full variable syntax ('{{foo}}')"
                )
                break
        return templar.template(value, convert_bare=True)

    def get_vars(self):
        all_vars = dict()
        if self._block:
            all_vars.update(self._block.get_vars())
        if self._task_include:
            all_vars.update(self._task_include.get_vars())

        all_vars.update(self.vars)

        if 'tags' in all_vars:
            del all_vars['tags']
        if 'when' in all_vars:
            del all_vars['when']

        return all_vars

    def get_include_params(self):
        all_vars = dict()
        if self._task_include:
            all_vars.update(self._task_include.get_include_params())
        if self.action == 'include':
            all_vars.update(self.vars)
        return all_vars

    def copy(self, exclude_block=False):
        new_me = super(Task, self).copy()

        new_me._block = None
        if self._block and not exclude_block:
            new_me._block = self._block.copy()

        new_me._role = None
        if self._role:
            new_me._role = self._role

        new_me._task_include = None
        if self._task_include:
            new_me._task_include = self._task_include.copy(
                exclude_block=exclude_block)

        return new_me

    def serialize(self):
        data = super(Task, self).serialize()

        if self._block:
            data['block'] = self._block.serialize()

        if self._role:
            data['role'] = self._role.serialize()

        if self._task_include:
            data['task_include'] = self._task_include.serialize()

        return data

    def deserialize(self, data):

        # import is here to avoid import loops
        #from ansible.playbook.task_include import TaskInclude

        block_data = data.get('block')

        if block_data:
            b = Block()
            b.deserialize(block_data)
            self._block = b
            del data['block']

        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r
            del data['role']

        ti_data = data.get('task_include')
        if ti_data:
            #ti = TaskInclude()
            ti = Task()
            ti.deserialize(ti_data)
            self._task_include = ti
            del data['task_include']

        super(Task, self).deserialize(data)

    def evaluate_conditional(self, templar, all_vars):
        if self._block is not None:
            if not self._block.evaluate_conditional(templar, all_vars):
                return False
        if self._task_include is not None:
            if not self._task_include.evaluate_conditional(templar, all_vars):
                return False
        return super(Task, self).evaluate_conditional(templar, all_vars)

    def set_loader(self, loader):
        '''
        Sets the loader on this object and recursively on parent, child objects.
        This is used primarily after the Task has been serialized/deserialized, which
        does not preserve the loader.
        '''

        self._loader = loader

        if self._block:
            self._block.set_loader(loader)
        if self._task_include:
            self._task_include.set_loader(loader)

    def _get_parent_attribute(self, attr, extend=False):
        '''
        Generic logic to get the attribute or parent attribute for a task value.
        '''
        value = None
        try:
            value = self._attributes[attr]

            if self._block and (value is None or extend):
                parent_value = getattr(self._block, attr)
                if extend:
                    value = self._extend_value(value, parent_value)
                else:
                    value = parent_value
            if self._task_include and (value is None or extend):
                parent_value = getattr(self._task_include, attr)
                if extend:
                    value = self._extend_value(value, parent_value)
                else:
                    value = parent_value
        except KeyError:
            pass

        return value

    def _get_attr_environment(self):
        '''
        Override for the 'tags' getattr fetcher, used from Base.
        '''
        environment = self._attributes['environment']
        parent_environment = self._get_parent_attribute('environment',
                                                        extend=True)
        if parent_environment is not None:
            environment = self._extend_value(environment, parent_environment)
        return environment
Example #11
0
class Block(Base, Become, Conditional, Taggable):

    _block  = FieldAttribute(isa='list', default=[])
    _rescue = FieldAttribute(isa='list', default=[])
    _always = FieldAttribute(isa='list', default=[])
    _delegate_to = FieldAttribute(isa='list')
    _delegate_facts = FieldAttribute(isa='bool', default=False)
    _any_errors_fatal = FieldAttribute(isa='bool')

    # for future consideration? this would be functionally
    # similar to the 'else' clause for exceptions
    #_otherwise = FieldAttribute(isa='list')

    def __init__(self, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, implicit=False):
        self._play         = play
        self._role         = role
        self._task_include = None
        self._parent_block = None
        self._dep_chain    = None
        self._use_handlers = use_handlers
        self._implicit     = implicit

        if task_include:
            self._task_include = task_include
        elif parent_block:
            self._parent_block = parent_block

        super(Block, self).__init__()

    def get_vars(self):
        '''
        Blocks do not store variables directly, however they may be a member
        of a role or task include which does, so return those if present.
        '''

        all_vars = self.vars.copy()

        if self._parent_block:
            all_vars.update(self._parent_block.get_vars())
        if self._task_include:
            all_vars.update(self._task_include.get_vars())

        return all_vars

    @staticmethod
    def load(data, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
        implicit = not Block.is_block(data)
        b = Block(play=play, parent_block=parent_block, role=role, task_include=task_include, use_handlers=use_handlers, implicit=implicit)
        return b.load_data(data, variable_manager=variable_manager, loader=loader)

    @staticmethod
    def is_block(ds):
        is_block = False
        if isinstance(ds, dict):
            for attr in ('block', 'rescue', 'always'):
                if attr in ds:
                    is_block = True
                    break
        return is_block

    def preprocess_data(self, ds):
        '''
        If a simple task is given, an implicit block for that single task
        is created, which goes in the main portion of the block
        '''

        if not Block.is_block(ds):
            if isinstance(ds, list):
                return super(Block, self).preprocess_data(dict(block=ds))
            else:
                return super(Block, self).preprocess_data(dict(block=[ds]))

        return super(Block, self).preprocess_data(ds)

    def _load_block(self, attr, ds):
        try:
            return load_list_of_tasks(
                ds,
                play=self._play,
                block=self,
                role=self._role,
                task_include=self._task_include,
                variable_manager=self._variable_manager,
                loader=self._loader,
                use_handlers=self._use_handlers,
            )
        except AssertionError:
            raise AnsibleParserError("A malformed block was encountered.", obj=self._ds)

    def _load_rescue(self, attr, ds):
        try:
            return load_list_of_tasks(
                ds,
                play=self._play,
                block=self,
                role=self._role,
                task_include=self._task_include,
                variable_manager=self._variable_manager,
                loader=self._loader,
                use_handlers=self._use_handlers,
            )
        except AssertionError:
            raise AnsibleParserError("A malformed block was encountered.", obj=self._ds)

    def _load_always(self, attr, ds):
        try:
            return load_list_of_tasks(
                ds, 
                play=self._play,
                block=self, 
                role=self._role, 
                task_include=self._task_include,
                variable_manager=self._variable_manager, 
                loader=self._loader, 
                use_handlers=self._use_handlers,
            )
        except AssertionError:
            raise AnsibleParserError("A malformed block was encountered.", obj=self._ds)

    def get_dep_chain(self):
        if self._dep_chain is None:
            if self._parent_block:
                return self._parent_block.get_dep_chain()
            elif self._task_include:
                return self._task_include._block.get_dep_chain()
            else:
                return None
        else:
            return self._dep_chain[:]

    def copy(self, exclude_parent=False, exclude_tasks=False):
        def _dupe_task_list(task_list, new_block):
            new_task_list = []
            for task in task_list:
                if isinstance(task, Block):
                    new_task = task.copy(exclude_parent=True)
                    new_task._parent_block = new_block
                else:
                    new_task = task.copy(exclude_block=True)
                    new_task._block = new_block
                new_task_list.append(new_task)
            return new_task_list

        new_me = super(Block, self).copy()
        new_me._play         = self._play
        new_me._use_handlers = self._use_handlers

        if self._dep_chain:
            new_me._dep_chain = self._dep_chain[:]

        if not exclude_tasks:
            new_me.block  = _dupe_task_list(self.block or [], new_me)
            new_me.rescue = _dupe_task_list(self.rescue or [], new_me)
            new_me.always = _dupe_task_list(self.always or [], new_me)

        new_me._parent_block = None
        if self._parent_block and not exclude_parent:
            new_me._parent_block = self._parent_block.copy(exclude_tasks=exclude_tasks)

        new_me._role = None
        if self._role:
            new_me._role = self._role

        new_me._task_include = None
        if self._task_include:
            new_me._task_include = self._task_include.copy(exclude_block=True)
            new_me._task_include._block = self._task_include._block.copy(exclude_tasks=True)

        return new_me

    def serialize(self):
        '''
        Override of the default serialize method, since when we're serializing
        a task we don't want to include the attribute list of tasks.
        '''

        data = dict()
        for attr in self._get_base_attributes():
            if attr not in ('block', 'rescue', 'always'):
                data[attr] = getattr(self, attr)

        data['dep_chain'] = self.get_dep_chain()

        if self._role is not None:
            data['role'] = self._role.serialize()
        if self._task_include is not None:
            data['task_include'] = self._task_include.serialize()
        if self._parent_block is not None:
            data['parent_block'] = self._parent_block.copy(exclude_tasks=True).serialize()

        return data

    def deserialize(self, data):
        '''
        Override of the default deserialize method, to match the above overridden
        serialize method
        '''

        from ansible.playbook.task import Task

        # we don't want the full set of attributes (the task lists), as that
        # would lead to a serialize/deserialize loop
        for attr in self._get_base_attributes():
            if attr in data and attr not in ('block', 'rescue', 'always'):
                setattr(self, attr, data.get(attr))

        self._dep_chain = data.get('dep_chain', None)

        # if there was a serialized role, unpack it too
        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r

        # if there was a serialized task include, unpack it too
        ti_data = data.get('task_include')
        if ti_data:
            ti = Task()
            ti.deserialize(ti_data)
            self._task_include = ti

        pb_data = data.get('parent_block')
        if pb_data:
            pb = Block()
            pb.deserialize(pb_data)
            self._parent_block = pb
            self._dep_chain = self._parent_block.get_dep_chain()

    def evaluate_conditional(self, templar, all_vars):
        dep_chain = self.get_dep_chain()
        if dep_chain:
            for dep in dep_chain:
                if not dep.evaluate_conditional(templar, all_vars):
                    return False
        if self._task_include is not None:
            if not self._task_include.evaluate_conditional(templar, all_vars):
                return False
        if self._parent_block is not None:
            if not self._parent_block.evaluate_conditional(templar, all_vars):
                return False
        return super(Block, self).evaluate_conditional(templar, all_vars)

    def set_loader(self, loader):
        self._loader = loader
        if self._parent_block:
            self._parent_block.set_loader(loader)
        elif self._role:
            self._role.set_loader(loader)

        if self._task_include:
            self._task_include.set_loader(loader)

        dep_chain = self.get_dep_chain()
        if dep_chain:
            for dep in dep_chain:
                dep.set_loader(loader)

    def _get_parent_attribute(self, attr, extend=False):
        '''
        Generic logic to get the attribute or parent attribute for a block value.
        '''

        value = None
        try:
            value = self._attributes[attr]

            if self._parent_block and (value is None or extend):
                parent_value = getattr(self._parent_block, attr)
                if extend:
                    value = self._extend_value(value, parent_value)
                else:
                    value = parent_value
            if self._task_include and (value is None or extend):
                parent_value = getattr(self._task_include, attr)
                if extend:
                    value = self._extend_value(value, parent_value)
                else:
                    value = parent_value
            if self._role and (value is None or extend) and hasattr(self._role, attr):
                parent_value = getattr(self._role, attr, None)
                if extend:
                    value = self._extend_value(value, parent_value)
                else:
                    value = parent_value

                dep_chain = self.get_dep_chain()
                if dep_chain and (value is None or extend):
                    dep_chain.reverse()
                    for dep in dep_chain:
                        dep_value = getattr(dep, attr, None)
                        if extend:
                            value = self._extend_value(value, dep_value)
                        else:
                            value = dep_value

                        if value is not None and not extend:
                            break
            if self._play and (value is None or extend) and hasattr(self._play, attr):
                parent_value = getattr(self._play, attr, None)
                if extend:
                    value = self._extend_value(value, parent_value)
                else:
                    value = parent_value
        except KeyError as e:
            pass

        return value

    def _get_attr_environment(self):
        '''
        Override for the 'tags' getattr fetcher, used from Base.
        '''
        environment = self._attributes['environment']
        parent_environment = self._get_parent_attribute('environment', extend=True)
        if parent_environment is not None:
            environment = self._extend_value(environment, parent_environment)

        return environment

    def _get_attr_any_errors_fatal(self):
        '''
        Override for the 'tags' getattr fetcher, used from Base.
        '''
        return self._get_parent_attribute('any_errors_fatal')

    def filter_tagged_tasks(self, play_context, all_vars):
        '''
        Creates a new block, with task lists filtered based on the tags contained
        within the play_context object.
        '''

        def evaluate_and_append_task(target):
            tmp_list = []
            for task in target:
                if isinstance(task, Block):
                    tmp_list.append(evaluate_block(task))
                elif task.action == 'meta' \
                or (task.action == 'include' and task.evaluate_tags([], play_context.skip_tags, all_vars=all_vars)) \
                or task.evaluate_tags(play_context.only_tags, play_context.skip_tags, all_vars=all_vars):
                    tmp_list.append(task)
            return tmp_list

        def evaluate_block(block):
            new_block = self.copy(exclude_tasks=True)
            new_block.block  = evaluate_and_append_task(block.block)
            new_block.rescue = evaluate_and_append_task(block.rescue)
            new_block.always = evaluate_and_append_task(block.always)
            return new_block

        return evaluate_block(self)

    def has_tasks(self):
        return len(self.block) > 0 or len(self.rescue) > 0 or len(self.always) > 0
Example #12
0
class Conditional:

    '''
    This is a mix-in class, to be used with Base to allow the object
    to be run conditionally when a condition is met or skipped.
    '''

    when = FieldAttribute(isa='list', default=list, extend=True, prepend=True)

    def __init__(self, loader=None):
        # when used directly, this class needs a loader, but we want to
        # make sure we don't trample on the existing one if this class
        # is used as a mix-in with a playbook base class
        if not hasattr(self, '_loader'):
            if loader is None:
                raise AnsibleError("a loader must be specified when using Conditional() directly")
            else:
                self._loader = loader
        super(Conditional, self).__init__()

    def _validate_when(self, attr, name, value):
        if not isinstance(value, list):
            setattr(self, name, [value])

    def extract_defined_undefined(self, conditional):
        results = []

        cond = conditional
        m = DEFINED_REGEX.search(cond)
        while m:
            results.append(m.groups())
            cond = cond[m.end():]
            m = DEFINED_REGEX.search(cond)

        return results

    def evaluate_conditional(self, templar, all_vars):
        '''
        Loops through the conditionals set on this object, returning
        False if any of them evaluate as such.
        '''

        # since this is a mix-in, it may not have an underlying datastructure
        # associated with it, so we pull it out now in case we need it for
        # error reporting below
        ds = None
        if hasattr(self, '_ds'):
            ds = getattr(self, '_ds')

        result = True
        try:
            for conditional in self.when:

                # do evaluation
                if conditional is None or conditional == '':
                    res = True
                elif isinstance(conditional, bool):
                    res = conditional
                else:
                    res = self._check_conditional(conditional, templar, all_vars)

                # only update if still true, preserve false
                if result:
                    result = res

                display.debug("Evaluated conditional (%s): %s" % (conditional, res))
                if not result:
                    break

        except Exception as e:
            raise AnsibleError("The conditional check '%s' failed. The error was: %s" % (to_native(conditional), to_native(e)), obj=ds)

        return result

    def _check_conditional(self, conditional, templar, all_vars):
        '''
        This method does the low-level evaluation of each conditional
        set on this object, using jinja2 to wrap the conditionals for
        evaluation.
        '''

        original = conditional

        if templar.is_template(conditional):
            display.warning('conditional statements should not include jinja2 '
                            'templating delimiters such as {{ }} or {%% %%}. '
                            'Found: %s' % conditional)

        # make sure the templar is using the variables specified with this method
        templar.available_variables = all_vars

        try:
            # if the conditional is "unsafe", disable lookups
            disable_lookups = hasattr(conditional, '__UNSAFE__')
            conditional = templar.template(conditional, disable_lookups=disable_lookups)

            if not isinstance(conditional, text_type) or conditional == "":
                return conditional

            # update the lookups flag, as the string returned above may now be unsafe
            # and we don't want future templating calls to do unsafe things
            disable_lookups |= hasattr(conditional, '__UNSAFE__')

            # First, we do some low-level jinja2 parsing involving the AST format of the
            # statement to ensure we don't do anything unsafe (using the disable_lookup flag above)
            class CleansingNodeVisitor(ast.NodeVisitor):
                def generic_visit(self, node, inside_call=False, inside_yield=False):
                    if isinstance(node, ast.Call):
                        inside_call = True
                    elif isinstance(node, ast.Yield):
                        inside_yield = True
                    elif isinstance(node, ast.Str):
                        if disable_lookups:
                            if inside_call and node.s.startswith("__"):
                                # calling things with a dunder is generally bad at this point...
                                raise AnsibleError(
                                    "Invalid access found in the conditional: '%s'" % conditional
                                )
                            elif inside_yield:
                                # we're inside a yield, so recursively parse and traverse the AST
                                # of the result to catch forbidden syntax from executing
                                parsed = ast.parse(node.s, mode='exec')
                                cnv = CleansingNodeVisitor()
                                cnv.visit(parsed)
                    # iterate over all child nodes
                    for child_node in ast.iter_child_nodes(node):
                        self.generic_visit(
                            child_node,
                            inside_call=inside_call,
                            inside_yield=inside_yield
                        )
            try:
                res = templar.environment.parse(conditional, None, None)
                res = generate(res, templar.environment, None, None)
                parsed = ast.parse(res, mode='exec')

                cnv = CleansingNodeVisitor()
                cnv.visit(parsed)
            except Exception as e:
                raise AnsibleError("Invalid conditional detected: %s" % to_native(e))

            # and finally we generate and template the presented string and look at the resulting string
            # NOTE The spaces around True and False are intentional to short-circuit literal_eval for
            #      jinja2_native=False and avoid its expensive calls.
            presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional
            val = templar.template(presented, disable_lookups=disable_lookups).strip()
            if val == "True":
                return True
            elif val == "False":
                return False
            else:
                raise AnsibleError("unable to evaluate conditional: %s" % original)
        except (AnsibleUndefinedVariable, UndefinedError) as e:
            # the templating failed, meaning most likely a variable was undefined. If we happened
            # to be looking for an undefined variable, return True, otherwise fail
            try:
                # first we extract the variable name from the error message
                var_name = re.compile(r"'(hostvars\[.+\]|[\w_]+)' is undefined").search(str(e)).groups()[0]
                # next we extract all defined/undefined tests from the conditional string
                def_undef = self.extract_defined_undefined(conditional)
                # then we loop through these, comparing the error variable name against
                # each def/undef test we found above. If there is a match, we determine
                # whether the logic/state mean the variable should exist or not and return
                # the corresponding True/False
                for (du_var, logic, state) in def_undef:
                    # when we compare the var names, normalize quotes because something
                    # like hostvars['foo'] may be tested against hostvars["foo"]
                    if var_name.replace("'", '"') == du_var.replace("'", '"'):
                        # the should exist is a xor test between a negation in the logic portion
                        # against the state (defined or undefined)
                        should_exist = ('not' in logic) != (state == 'defined')
                        if should_exist:
                            return False
                        else:
                            return True
                # as nothing above matched the failed var name, re-raise here to
                # trigger the AnsibleUndefinedVariable exception again below
                raise
            except Exception:
                raise AnsibleUndefinedVariable("error while evaluating conditional (%s): %s" % (original, e))
Example #13
0
class Play(Base, Taggable, Become):
    """
    A play is a language feature that represents a list of roles and/or
    task/handler blocks to execute on a given set of hosts.

    Usage:

       Play.load(datastructure) -> Play
       Play.something(...)
    """

    # =================================================================================
    # Connection-Related Attributes

    # TODO: generalize connection
    _accelerate = FieldAttribute(isa='bool',
                                 default=False,
                                 always_post_validate=True)
    _accelerate_ipv6 = FieldAttribute(isa='bool',
                                      default=False,
                                      always_post_validate=True)
    _accelerate_port = FieldAttribute(isa='int',
                                      default=5099,
                                      always_post_validate=True)

    # Connection
    _gather_facts = FieldAttribute(isa='bool',
                                   default=None,
                                   always_post_validate=True)
    _hosts = FieldAttribute(isa='list',
                            default=[],
                            required=True,
                            listof=string_types,
                            always_post_validate=True)
    _name = FieldAttribute(isa='string', default='', always_post_validate=True)

    # Variable Attributes
    _vars_files = FieldAttribute(isa='list', default=[], priority=99)
    _vars_prompt = FieldAttribute(isa='list',
                                  default=[],
                                  always_post_validate=True)
    _vault_password = FieldAttribute(isa='string', always_post_validate=True)

    # Role Attributes
    _roles = FieldAttribute(isa='list', default=[], priority=90)

    # Block (Task) Lists Attributes
    _handlers = FieldAttribute(isa='list', default=[])
    _pre_tasks = FieldAttribute(isa='list', default=[])
    _post_tasks = FieldAttribute(isa='list', default=[])
    _tasks = FieldAttribute(isa='list', default=[])

    # Flag/Setting Attributes
    _any_errors_fatal = FieldAttribute(isa='bool',
                                       default=False,
                                       always_post_validate=True)
    _force_handlers = FieldAttribute(isa='bool', always_post_validate=True)
    _max_fail_percentage = FieldAttribute(isa='percent',
                                          always_post_validate=True)
    _serial = FieldAttribute(isa='int', default=0, always_post_validate=True)
    _strategy = FieldAttribute(isa='string',
                               default='linear',
                               always_post_validate=True)

    # =================================================================================

    def __init__(self):
        super(Play, self).__init__()

        self.ROLE_CACHE = {}

    def __repr__(self):
        return self.get_name()

    def get_name(self):
        ''' return the name of the Play '''
        return self._attributes.get('name')

    @staticmethod
    def load(data, variable_manager=None, loader=None):
        p = Play()
        return p.load_data(data,
                           variable_manager=variable_manager,
                           loader=loader)

    def preprocess_data(self, ds):
        '''
        Adjusts play datastructure to cleanup old/legacy items
        '''

        assert isinstance(ds, dict)

        # The use of 'user' in the Play datastructure was deprecated to
        # line up with the same change for Tasks, due to the fact that
        # 'user' conflicted with the user module.
        if 'user' in ds:
            # this should never happen, but error out with a helpful message
            # to the user if it does...
            if 'remote_user' in ds:
                raise AnsibleParserError(
                    "both 'user' and 'remote_user' are set for %s. The use of 'user' is deprecated, and should be removed"
                    % self.get_name(),
                    obj=ds)

            ds['remote_user'] = ds['user']
            del ds['user']

        if 'vars_prompt' in ds and not isinstance(ds['vars_prompt'], list):
            ds['vars_prompt'] = [ds['vars_prompt']]

        return super(Play, self).preprocess_data(ds)

    def _load_hosts(self, attr, ds):
        '''
        Loads the hosts from the given datastructure, which might be a list
        or a simple string. We also switch integers in this list back to strings,
        as the YAML parser will turn things that look like numbers into numbers.
        '''

        if isinstance(ds, (string_types, int)):
            ds = [ds]

        if not isinstance(ds, list):
            raise AnsibleParserError(
                "'hosts' must be specified as a list or a single pattern",
                obj=ds)

        # YAML parsing of things that look like numbers may have
        # resulted in integers showing up in the list, so convert
        # them back to strings to prevent problems
        for idx, item in enumerate(ds):
            if isinstance(item, int):
                ds[idx] = "%s" % item

        return ds

    def _load_tasks(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed tasks/blocks.
        Bare tasks outside of a block are given an implicit block.
        '''
        return load_list_of_blocks(ds=ds,
                                   play=self,
                                   variable_manager=self._variable_manager,
                                   loader=self._loader)

    def _load_pre_tasks(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed tasks/blocks.
        Bare tasks outside of a block are given an implicit block.
        '''
        return load_list_of_blocks(ds=ds,
                                   play=self,
                                   variable_manager=self._variable_manager,
                                   loader=self._loader)

    def _load_post_tasks(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed tasks/blocks.
        Bare tasks outside of a block are given an implicit block.
        '''
        return load_list_of_blocks(ds=ds,
                                   play=self,
                                   variable_manager=self._variable_manager,
                                   loader=self._loader)

    def _load_handlers(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed handlers/blocks.
        Bare handlers outside of a block are given an implicit block.
        '''
        return load_list_of_blocks(ds=ds,
                                   play=self,
                                   use_handlers=True,
                                   variable_manager=self._variable_manager,
                                   loader=self._loader)

    def _load_roles(self, attr, ds):
        '''
        Loads and returns a list of RoleInclude objects from the datastructure
        list of role definitions and creates the Role from those objects
        '''

        if ds is None:
            ds = []

        role_includes = load_list_of_roles(
            ds,
            play=self,
            variable_manager=self._variable_manager,
            loader=self._loader)

        roles = []
        for ri in role_includes:
            roles.append(Role.load(ri, play=self))
        return roles

    # FIXME: post_validation needs to ensure that become/su/sudo have only 1 set

    def _compile_roles(self):
        '''
        Handles the role compilation step, returning a flat list of tasks
        with the lowest level dependencies first. For example, if a role R
        has a dependency D1, which also has a dependency D2, the tasks from
        D2 are merged first, followed by D1, and lastly by the tasks from
        the parent role R last. This is done for all roles in the Play.
        '''

        block_list = []

        if len(self.roles) > 0:
            for r in self.roles:
                block_list.extend(r.compile(play=self))

        return block_list

    def compile_roles_handlers(self):
        '''
        Handles the role handler compilation step, returning a flat list of Handlers
        This is done for all roles in the Play.
        '''

        block_list = []

        if len(self.roles) > 0:
            for r in self.roles:
                block_list.extend(r.get_handler_blocks())

        return block_list

    def compile(self):
        '''
        Compiles and returns the task list for this play, compiled from the
        roles (which are themselves compiled recursively) and/or the list of
        tasks specified in the play.
        '''

        # create a block containing a single flush handlers meta
        # task, so we can be sure to run handlers at certain points
        # of the playbook execution
        flush_block = Block.load(data={'meta': 'flush_handlers'},
                                 play=self,
                                 variable_manager=self._variable_manager,
                                 loader=self._loader)

        block_list = []

        block_list.extend(self.pre_tasks)
        block_list.append(flush_block)
        block_list.extend(self._compile_roles())
        block_list.extend(self.tasks)
        block_list.append(flush_block)
        block_list.extend(self.post_tasks)
        block_list.append(flush_block)

        return block_list

    def get_vars(self):
        return self.vars.copy()

    def get_vars_files(self):
        return self.vars_files

    def get_handlers(self):
        return self.handlers[:]

    def get_roles(self):
        return self.roles[:]

    def get_tasks(self):
        tasklist = []
        for task in self.pre_tasks + self.tasks + self.post_tasks:
            if isinstance(task, Block):
                tasklist.append(task.block + task.rescue + task.always)
            else:
                tasklist.append(task)
        return tasklist

    def serialize(self):
        data = super(Play, self).serialize()

        roles = []
        for role in self.get_roles():
            roles.append(role.serialize())
        data['roles'] = roles

        return data

    def deserialize(self, data):
        super(Play, self).deserialize(data)

        if 'roles' in data:
            role_data = data.get('roles', [])
            roles = []
            for role in role_data:
                r = Role()
                r.deserialize(role)
                roles.append(r)

            setattr(self, 'roles', roles)
            del data['roles']

    def copy(self):
        new_me = super(Play, self).copy()
        new_me.ROLE_CACHE = self.ROLE_CACHE.copy()
        return new_me
Example #14
0
class IncludeRole(Task):
    """
    A Role include is derived from a regular role to handle the special
    circumstances related to the `- include_role: ...`
    """

    # =================================================================================
    # ATTRIBUTES

    # private as this is a 'module options' vs a task property
    _allow_duplicates = FieldAttribute(isa='bool', default=True, private=True)
    _private = FieldAttribute(isa='bool', default=None, private=True)

    def __init__(self, block=None, role=None, task_include=None):

        super(IncludeRole, self).__init__(block=block,
                                          role=role,
                                          task_include=task_include)

        self.statically_loaded = False
        self._from_files = {}
        self._parent_role = role
        self._role_name = None

    def get_block_list(self, play=None, variable_manager=None, loader=None):

        # only need play passed in when dynamic
        if play is None:
            myplay = self._parent._play
        else:
            myplay = play

        ri = RoleInclude.load(self._role_name,
                              play=myplay,
                              variable_manager=variable_manager,
                              loader=loader)
        ri.vars.update(self.vars)

        # build role
        actual_role = Role.load(ri,
                                myplay,
                                parent_role=self._parent_role,
                                from_files=self._from_files)
        actual_role._metadata.allow_duplicates = self.allow_duplicates

        # compile role with parent roles as dependencies to ensure they inherit
        # variables
        if not self._parent_role:
            dep_chain = []
        else:
            dep_chain = list(self._parent_role._parents)
            dep_chain.extend(self._parent_role.get_all_dependencies())
            dep_chain.append(self._parent_role)

        blocks = actual_role.compile(play=myplay, dep_chain=dep_chain)
        for b in blocks:
            b._parent = self

        # updated available handlers in play
        myplay.handlers = myplay.handlers + actual_role.get_handler_blocks(
            play=myplay)

        return blocks

    @staticmethod
    def load(data,
             block=None,
             role=None,
             task_include=None,
             variable_manager=None,
             loader=None):

        ir = IncludeRole(block, role, task_include=task_include).load_data(
            data, variable_manager=variable_manager, loader=loader)

        ### Process options
        # name is needed, or use role as alias
        ir._role_name = ir.args.get('name', ir.args.get('role'))
        if ir._role_name is None:
            raise AnsibleParserError(
                "'name' is a required field for include_role.")

        # build options for role includes
        for key in ['tasks', 'vars', 'defaults']:
            from_key = '%s_from' % key
            if ir.args.get(from_key):
                ir._from_files[key] = basename(ir.args.get(from_key))

        #FIXME: find a way to make this list come from object ( attributes does not work as per below)
        # manual list as otherwise the options would set other task parameters we don't want.
        for option in ['private', 'allow_duplicates']:
            if option in ir.args:
                setattr(ir, option, ir.args.get(option))

        return ir.load_data(data,
                            variable_manager=variable_manager,
                            loader=loader)

    def copy(self, exclude_parent=False, exclude_tasks=False):

        new_me = super(IncludeRole, self).copy(exclude_parent=exclude_parent,
                                               exclude_tasks=exclude_tasks)
        new_me.statically_loaded = self.statically_loaded
        new_me._from_files = self._from_files.copy()
        new_me._parent_role = self._parent_role
        new_me._role_name = self._role_name

        return new_me

    def get_include_params(self):
        v = super(IncludeRole, self).get_include_params()
        if self._parent_role:
            v.update(self._parent_role.get_role_params())
        return v
Example #15
0
class Base(FieldAttributeBase):

    _name = FieldAttribute(isa='string',
                           default='',
                           always_post_validate=True,
                           inherit=False)

    # connection/transport
    _connection = FieldAttribute(isa='string')
    _port = FieldAttribute(isa='int')
    _remote_user = FieldAttribute(isa='string')

    # variables
    _vars = FieldAttribute(isa='dict', priority=100, inherit=False)

    # module default params
    _module_defaults = FieldAttribute(isa='list', extend=True, prepend=True)

    # flags and misc. settings
    _environment = FieldAttribute(isa='list', extend=True, prepend=True)
    _no_log = FieldAttribute(isa='bool')
    _run_once = FieldAttribute(isa='bool')
    _ignore_errors = FieldAttribute(isa='bool')
    _ignore_unreachable = FieldAttribute(isa='bool')
    _check_mode = FieldAttribute(isa='bool')
    _diff = FieldAttribute(isa='bool')
    _any_errors_fatal = FieldAttribute(isa='bool')

    # explicitly invoke a debugger on tasks
    _debugger = FieldAttribute(isa='string')

    # param names which have been deprecated/removed
    DEPRECATED_ATTRIBUTES = [
        'sudo',
        'sudo_user',
        'sudo_pass',
        'sudo_exe',
        'sudo_flags',
        'su',
        'su_user',
        'su_pass',
        'su_exe',
        'su_flags',
    ]
Example #16
0
class Role(Base, Become, Conditional, Taggable):

    _delegate_to = FieldAttribute(isa='string')
    _delegate_facts = FieldAttribute(isa='bool', default=False)

    def __init__(self, play=None, from_files=None, from_include=False):
        self._role_name = None
        self._role_path = None
        self._role_params = dict()
        self._loader = None

        self._metadata = None
        self._play = play
        self._parents = []
        self._dependencies = []
        self._task_blocks = []
        self._handler_blocks = []
        self._compiled_handler_blocks = None
        self._default_vars = dict()
        self._role_vars = dict()
        self._had_task_run = dict()
        self._completed = dict()

        if from_files is None:
            from_files = {}
        self._from_files = from_files

        # Indicates whether this role was included via include/import_role
        self.from_include = from_include

        super(Role, self).__init__()

    def __repr__(self):
        return self.get_name()

    def get_name(self):
        return self._role_name

    @staticmethod
    def load(role_include, play, parent_role=None, from_files=None, from_include=False):

        if from_files is None:
            from_files = {}
        try:
            # The ROLE_CACHE is a dictionary of role names, with each entry
            # containing another dictionary corresponding to a set of parameters
            # specified for a role as the key and the Role() object itself.
            # We use frozenset to make the dictionary hashable.

            params = role_include.get_role_params()
            if role_include.when is not None:
                params['when'] = role_include.when
            if role_include.tags is not None:
                params['tags'] = role_include.tags
            if from_files is not None:
                params['from_files'] = from_files
            if role_include.vars:
                params['vars'] = role_include.vars
            hashed_params = hash_params(params)
            if role_include.role in play.ROLE_CACHE:
                for (entry, role_obj) in iteritems(play.ROLE_CACHE[role_include.role]):
                    if hashed_params == entry:
                        if parent_role:
                            role_obj.add_parent(parent_role)
                        return role_obj

            r = Role(play=play, from_files=from_files, from_include=from_include)
            r._load_role_data(role_include, parent_role=parent_role)

            if role_include.role not in play.ROLE_CACHE:
                play.ROLE_CACHE[role_include.role] = dict()

            play.ROLE_CACHE[role_include.role][hashed_params] = r
            return r

        except RuntimeError:
            raise AnsibleError("A recursion loop was detected with the roles specified. Make sure child roles do not have dependencies on parent roles",
                               obj=role_include._ds)

    def _load_role_data(self, role_include, parent_role=None):
        self._role_name = role_include.role
        self._role_path = role_include.get_role_path()
        self._role_params = role_include.get_role_params()
        self._variable_manager = role_include.get_variable_manager()
        self._loader = role_include.get_loader()

        if parent_role:
            self.add_parent(parent_role)

        # copy over all field attributes, except for when and tags, which
        # are special cases and need to preserve pre-existing values
        for (attr_name, _) in iteritems(self._valid_attrs):
            if attr_name not in ('when', 'tags'):
                setattr(self, attr_name, getattr(role_include, attr_name))

        current_when = getattr(self, 'when')[:]
        current_when.extend(role_include.when)
        setattr(self, 'when', current_when)

        current_tags = getattr(self, 'tags')[:]
        current_tags.extend(role_include.tags)
        setattr(self, 'tags', current_tags)

        # dynamically load any plugins from the role directory
        for name, obj in get_all_plugin_loaders():
            if obj.subdir:
                plugin_path = os.path.join(self._role_path, obj.subdir)
                if os.path.isdir(plugin_path):
                    obj.add_directory(plugin_path)

        # vars and default vars are regular dictionaries
        self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'), allow_dir=True)
        if self._role_vars is None:
            self._role_vars = dict()
        elif not isinstance(self._role_vars, dict):
            raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)

        self._default_vars = self._load_role_yaml('defaults', main=self._from_files.get('defaults'), allow_dir=True)
        if self._default_vars is None:
            self._default_vars = dict()
        elif not isinstance(self._default_vars, dict):
            raise AnsibleParserError("The defaults/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)

        # load the role's other files, if they exist
        metadata = self._load_role_yaml('meta')
        if metadata:
            self._metadata = RoleMetadata.load(metadata, owner=self, variable_manager=self._variable_manager, loader=self._loader)
            self._dependencies = self._load_dependencies()
        else:
            self._metadata = RoleMetadata()

        task_data = self._load_role_yaml('tasks', main=self._from_files.get('tasks'))
        if task_data:
            try:
                self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader, variable_manager=self._variable_manager)
            except AssertionError as e:
                raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name,
                                         obj=task_data, orig_exc=e)

        handler_data = self._load_role_yaml('handlers')
        if handler_data:
            try:
                self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader,
                                                           variable_manager=self._variable_manager)
            except AssertionError as e:
                raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name,
                                         obj=handler_data, orig_exc=e)

    def _load_role_yaml(self, subdir, main=None, allow_dir=False):
        file_path = os.path.join(self._role_path, subdir)
        if self._loader.path_exists(file_path) and self._loader.is_directory(file_path):
            # Valid extensions and ordering for roles is hard-coded to maintain
            # role portability
            extensions = ['.yml', '.yaml', '.json']
            # If no <main> is specified by the user, look for files with
            # extensions before bare name. Otherwise, look for bare name first.
            if main is None:
                _main = 'main'
                extensions.append('')
            else:
                _main = main
                extensions.insert(0, '')
            found_files = self._loader.find_vars_files(file_path, _main, extensions, allow_dir)
            if found_files:
                data = {}
                for found in found_files:
                    new_data = self._loader.load_from_file(found)
                    if new_data and allow_dir:
                        data = combine_vars(data, new_data)
                    else:
                        data = new_data
                return data
            elif main is not None:
                raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir, main))
        return None

    def _load_dependencies(self):
        '''
        Recursively loads role dependencies from the metadata list of
        dependencies, if it exists
        '''

        deps = []
        if self._metadata:
            for role_include in self._metadata.dependencies:
                r = Role.load(role_include, play=self._play, parent_role=self)
                deps.append(r)

        return deps

    # other functions

    def add_parent(self, parent_role):
        ''' adds a role to the list of this roles parents '''
        if not isinstance(parent_role, Role):
            raise AnsibleAssertionError()

        if parent_role not in self._parents:
            self._parents.append(parent_role)

    def get_parents(self):
        return self._parents

    def get_default_vars(self, dep_chain=None):
        dep_chain = [] if dep_chain is None else dep_chain

        default_vars = dict()
        for dep in self.get_all_dependencies():
            default_vars = combine_vars(default_vars, dep.get_default_vars())
        if dep_chain:
            for parent in dep_chain:
                default_vars = combine_vars(default_vars, parent._default_vars)
        default_vars = combine_vars(default_vars, self._default_vars)
        return default_vars

    def get_inherited_vars(self, dep_chain=None):
        dep_chain = [] if dep_chain is None else dep_chain

        inherited_vars = dict()

        if dep_chain:
            for parent in dep_chain:
                inherited_vars = combine_vars(inherited_vars, parent._role_vars)
        return inherited_vars

    def get_role_params(self, dep_chain=None):
        dep_chain = [] if dep_chain is None else dep_chain

        params = {}
        if dep_chain:
            for parent in dep_chain:
                params = combine_vars(params, parent._role_params)
        params = combine_vars(params, self._role_params)
        return params

    def get_vars(self, dep_chain=None, include_params=True):
        dep_chain = [] if dep_chain is None else dep_chain

        all_vars = self.get_inherited_vars(dep_chain)

        for dep in self.get_all_dependencies():
            all_vars = combine_vars(all_vars, dep.get_vars(include_params=include_params))

        all_vars = combine_vars(all_vars, self.vars)
        all_vars = combine_vars(all_vars, self._role_vars)
        if include_params:
            all_vars = combine_vars(all_vars, self.get_role_params(dep_chain=dep_chain))

        return all_vars

    def get_direct_dependencies(self):
        return self._dependencies[:]

    def get_all_dependencies(self):
        '''
        Returns a list of all deps, built recursively from all child dependencies,
        in the proper order in which they should be executed or evaluated.
        '''

        child_deps = []

        for dep in self.get_direct_dependencies():
            for child_dep in dep.get_all_dependencies():
                child_deps.append(child_dep)
            child_deps.append(dep)

        return child_deps

    def get_task_blocks(self):
        return self._task_blocks[:]

    def get_handler_blocks(self, play, dep_chain=None):
        # Do not recreate this list each time ``get_handler_blocks`` is called.
        # Cache the results so that we don't potentially overwrite with copied duplicates
        #
        # ``get_handler_blocks`` may be called when handling ``import_role`` during parsing
        # as well as with ``Play.compile_roles_handlers`` from ``TaskExecutor``
        if self._compiled_handler_blocks:
            return self._compiled_handler_blocks

        self._compiled_handler_blocks = block_list = []

        # update the dependency chain here
        if dep_chain is None:
            dep_chain = []
        new_dep_chain = dep_chain + [self]

        for dep in self.get_direct_dependencies():
            dep_blocks = dep.get_handler_blocks(play=play, dep_chain=new_dep_chain)
            block_list.extend(dep_blocks)

        for task_block in self._handler_blocks:
            new_task_block = task_block.copy()
            new_task_block._dep_chain = new_dep_chain
            new_task_block._play = play
            block_list.append(new_task_block)

        return block_list

    def has_run(self, host):
        '''
        Returns true if this role has been iterated over completely and
        at least one task was run
        '''

        return host.name in self._completed and not self._metadata.allow_duplicates

    def compile(self, play, dep_chain=None):
        '''
        Returns the task list for this role, which is created by first
        recursively compiling the tasks for all direct dependencies, and
        then adding on the tasks for this role.

        The role compile() also remembers and saves the dependency chain
        with each task, so tasks know by which route they were found, and
        can correctly take their parent's tags/conditionals into account.
        '''

        block_list = []

        # update the dependency chain here
        if dep_chain is None:
            dep_chain = []
        new_dep_chain = dep_chain + [self]

        deps = self.get_direct_dependencies()
        for dep in deps:
            dep_blocks = dep.compile(play=play, dep_chain=new_dep_chain)
            block_list.extend(dep_blocks)

        for idx, task_block in enumerate(self._task_blocks):
            new_task_block = task_block.copy()
            new_task_block._dep_chain = new_dep_chain
            new_task_block._play = play
            if idx == len(self._task_blocks) - 1:
                new_task_block._eor = True
            block_list.append(new_task_block)

        return block_list

    def serialize(self, include_deps=True):
        res = super(Role, self).serialize()

        res['_role_name'] = self._role_name
        res['_role_path'] = self._role_path
        res['_role_vars'] = self._role_vars
        res['_role_params'] = self._role_params
        res['_default_vars'] = self._default_vars
        res['_had_task_run'] = self._had_task_run.copy()
        res['_completed'] = self._completed.copy()

        if self._metadata:
            res['_metadata'] = self._metadata.serialize()

        if include_deps:
            deps = []
            for role in self.get_direct_dependencies():
                deps.append(role.serialize())
            res['_dependencies'] = deps

        parents = []
        for parent in self._parents:
            parents.append(parent.serialize(include_deps=False))
        res['_parents'] = parents

        return res

    def deserialize(self, data, include_deps=True):
        self._role_name = data.get('_role_name', '')
        self._role_path = data.get('_role_path', '')
        self._role_vars = data.get('_role_vars', dict())
        self._role_params = data.get('_role_params', dict())
        self._default_vars = data.get('_default_vars', dict())
        self._had_task_run = data.get('_had_task_run', dict())
        self._completed = data.get('_completed', dict())

        if include_deps:
            deps = []
            for dep in data.get('_dependencies', []):
                r = Role()
                r.deserialize(dep)
                deps.append(r)
            setattr(self, '_dependencies', deps)

        parent_data = data.get('_parents', [])
        parents = []
        for parent in parent_data:
            r = Role()
            r.deserialize(parent, include_deps=False)
            parents.append(r)
        setattr(self, '_parents', parents)

        metadata_data = data.get('_metadata')
        if metadata_data:
            m = RoleMetadata()
            m.deserialize(metadata_data)
            self._metadata = m

        super(Role, self).deserialize(data)

    def set_loader(self, loader):
        self._loader = loader
        for parent in self._parents:
            parent.set_loader(loader)
        for dep in self.get_direct_dependencies():
            dep.set_loader(loader)
Example #17
0
class PlayContext(Base):
    '''
    This class is used to consolidate the connection information for
    hosts in a play and child tasks, where the task may override some
    connection/authentication information.
    '''

    # connection fields, some are inherited from Base:
    # (connection, port, remote_user, environment, no_log)
    _remote_addr = FieldAttribute(isa='string')
    _password = FieldAttribute(isa='string')
    _private_key_file = FieldAttribute(isa='string',
                                       default=C.DEFAULT_PRIVATE_KEY_FILE)
    _timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
    _shell = FieldAttribute(isa='string')

    # privilege escalation fields
    _become = FieldAttribute(isa='bool')
    _become_method = FieldAttribute(isa='string')
    _become_user = FieldAttribute(isa='string')
    _become_pass = FieldAttribute(isa='string')
    _become_exe = FieldAttribute(isa='string')
    _become_flags = FieldAttribute(isa='string')
    _prompt = FieldAttribute(isa='string')

    # backwards compatibility fields for sudo/su
    _sudo_exe = FieldAttribute(isa='string')
    _sudo_flags = FieldAttribute(isa='string')
    _sudo_pass = FieldAttribute(isa='string')
    _su_exe = FieldAttribute(isa='string')
    _su_flags = FieldAttribute(isa='string')
    _su_pass = FieldAttribute(isa='string')

    # general flags
    _verbosity = FieldAttribute(isa='int', default=0)
    _only_tags = FieldAttribute(isa='set', default=set())
    _skip_tags = FieldAttribute(isa='set', default=set())
    _check_mode = FieldAttribute(isa='bool', default=False)
    _force_handlers = FieldAttribute(isa='bool', default=False)
    _start_at_task = FieldAttribute(isa='string')
    _step = FieldAttribute(isa='bool', default=False)

    def __init__(self, play=None, options=None, passwords=None):

        super(PlayContext, self).__init__()

        if passwords is None:
            passwords = {}

        self.password = passwords.get('conn_pass', '')
        self.become_pass = passwords.get('become_pass', '')

        #TODO: just pull options setup to above?
        # set options before play to allow play to override them
        if options:
            self.set_options(options)

        if play:
            self.set_play(play)

    def set_play(self, play):
        '''
        Configures this connection information instance with data from
        the play class.
        '''

        if play.connection:
            self.connection = play.connection

        if play.remote_user:
            self.remote_user = play.remote_user

        if play.port:
            self.port = int(play.port)

        if play.become is not None:
            self.become = play.become
        if play.become_method:
            self.become_method = play.become_method
        if play.become_user:
            self.become_user = play.become_user

        # non connection related
        self.no_log = play.no_log

        if play.force_handlers is not None:
            self.force_handlers = play.force_handlers

    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        higher precedence than those set on the play or host.
        '''

        if options.connection:
            self.connection = options.connection

        self.remote_user = options.remote_user
        self.private_key_file = options.private_key_file

        # privilege escalation
        self.become = options.become
        self.become_method = options.become_method
        self.become_user = options.become_user

        # general flags (should we move out?)
        if options.verbosity:
            self.verbosity = options.verbosity
        #if options.no_log:
        #    self.no_log     = boolean(options.no_log)
        if options.check:
            self.check_mode = boolean(options.check)
        if hasattr(options, 'force_handlers') and options.force_handlers:
            self.force_handlers = boolean(options.force_handlers)
        if hasattr(options, 'step') and options.step:
            self.step = boolean(options.step)
        if hasattr(options, 'start_at_task') and options.start_at_task:
            self.start_at_task = to_unicode(options.start_at_task)

        # get the tag info from options, converting a comma-separated list
        # of values into a proper list if need be. We check to see if the
        # options have the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            if isinstance(options.tags, list):
                self.only_tags.update(options.tags)
            elif isinstance(options.tags, basestring):
                self.only_tags.update(options.tags.split(','))

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            if isinstance(options.skip_tags, list):
                self.skip_tags.update(options.skip_tags)
            elif isinstance(options.skip_tags, basestring):
                self.skip_tags.update(options.skip_tags.split(','))

    #def copy(self, ci):
    #    '''
    #    Copies the connection info from another connection info object, used
    #    when merging in data from task overrides.
    #    '''
    #
    #    for field in self._get_fields():
    #        value = getattr(ci, field, None)
    #        if isinstance(value, dict):
    #            setattr(self, field, value.copy())
    #        elif isinstance(value, set):
    #            setattr(self, field, value.copy())
    #        elif isinstance(value, list):
    #            setattr(self, field, value[:])
    #        else:
    #            setattr(self, field, value)

    def set_task_and_host_override(self, task, host):
        '''
        Sets attributes from the task if they are set, which will override
        those from the play.
        '''

        new_info = self.copy()

        # loop through a subset of attributes on the task object and set
        # connection fields based on their values
        for attr in ('connection', 'remote_user', 'become', 'become_user',
                     'become_pass', 'become_method', 'no_log'):
            if hasattr(task, attr):
                attr_val = getattr(task, attr)
                if attr_val is not None:
                    setattr(new_info, attr, attr_val)

        # finally, use the MAGIC_VARIABLE_MAPPING dictionary to update this
        # connection info object with 'magic' variables from inventory
        variables = host.get_vars()
        for (attr, variable_names) in MAGIC_VARIABLE_MAPPING.iteritems():
            for variable_name in variable_names:
                if variable_name in variables:
                    setattr(new_info, attr, variables[variable_name])

        # become legacy updates
        if not new_info.become_pass:
            if new_info.become_method == 'sudo' and new_info.sudo_pass:
                setattr(new_info, 'become_pass', new_info.sudo_pass)
            elif new_info.become_method == 'su' and new_info.su_pass:
                setattr(new_info, 'become_pass', new_info.su_pass)

        return new_info

    def make_become_cmd(self, cmd, executable=None):
        """ helper function to create privilege escalation commands """

        prompt = None
        success_key = None

        if executable is None:
            executable = C.DEFAULT_EXECUTABLE

        if self.become:

            becomecmd = None
            randbits = ''.join(
                chr(random.randint(ord('a'), ord('z'))) for x in xrange(32))
            success_key = 'BECOME-SUCCESS-%s' % randbits
            #executable = executable or '$SHELL'
            success_cmd = pipes.quote('echo %s; %s' % (success_key, cmd))

            if self.become_method == 'sudo':
                # Rather than detect if sudo wants a password this time, -k makes sudo always ask for
                # a password if one is required. Passing a quoted compound command to sudo (or sudo -s)
                # directly doesn't work, so we shellquote it with pipes.quote() and pass the quoted
                # string to the user's shell.  We loop reading output until we see the randomly-generated
                # sudo prompt set with the -p option.
                prompt = '[sudo via ansible, key=%s] password: '******'sudo'
                flags = self.become_flags or self.sudo_flags or ''
                becomecmd = '%s -k && %s %s -S -p "%s" -u %s %s -c %s' % \
                    (exe, exe, flags or C.DEFAULT_SUDO_FLAGS, prompt, self.become_user, executable, success_cmd)

            elif self.become_method == 'su':

                def detect_su_prompt(data):
                    SU_PROMPT_LOCALIZATIONS_RE = re.compile(
                        "|".join([
                            '(\w+\'s )?' + x + ' ?: ?'
                            for x in SU_PROMPT_LOCALIZATIONS
                        ]),
                        flags=re.IGNORECASE)
                    return bool(SU_PROMPT_LOCALIZATIONS_RE.match(data))

                prompt = detect_su_prompt
                exe = self.become_exe or self.su_exe or 'su'
                flags = self.become_flags or self.su_flags or ''
                becomecmd = '%s %s %s -c "%s -c %s"' % (
                    exe, flags, self.become_user, executable, success_cmd)

            elif self.become_method == 'pbrun':

                prompt = 'assword:'
                exe = self.become_exe or 'pbrun'
                flags = self.become_flags or ''
                becomecmd = '%s -b %s -u %s %s' % (
                    exe, flags, self.become_user, success_cmd)

            elif self.become_method == 'pfexec':

                exe = self.become_exe or 'pfexec'
                flags = self.become_flags or ''
                # No user as it uses it's own exec_attr to figure it out
                becomecmd = '%s %s "%s"' % (exe, flags, success_cmd)

            else:
                raise AnsibleError(
                    "Privilege escalation method not found: %s" %
                    self.become_method)

            self.prompt = prompt
            self.success_key = success_key
            return ('%s -c ' % executable) + pipes.quote(becomecmd)

        return cmd

    #def _get_fields(self):
    #    return [i for i in self.__dict__.keys() if i[:1] != '_']

    #def post_validate(self, templar):
    #    '''
    #    Finalizes templated values which may be set on this objects fields.
    #    '''
    #
    #    for field in self._get_fields():
    #        value = templar.template(getattr(self, field))
    #        setattr(self, field, value)

    def update_vars(self, variables):
        '''
        Adds 'magic' variables relating to connections to the variable dictionary provided.
        In case users need to access from the play, this is a legacy from runner.
        '''

        #FIXME: remove password? possibly add become/sudo settings
        for special_var in [
                'ansible_connection', 'ansible_ssh_host', 'ansible_ssh_pass',
                'ansible_ssh_port', 'ansible_ssh_user',
                'ansible_ssh_private_key_file'
        ]:
            if special_var not in variables:
                for prop, varnames in MAGIC_VARIABLE_MAPPING.items():
                    if special_var in varnames:
                        variables[special_var] = getattr(self, prop)
Example #18
0
class PlayContext(Base):

    '''
    This class is used to consolidate the connection information for
    hosts in a play and child tasks, where the task may override some
    connection/authentication information.
    '''

    # connection fields, some are inherited from Base:
    # (connection, port, remote_user, environment, no_log)
    _remote_addr      = FieldAttribute(isa='string')
    _password         = FieldAttribute(isa='string')
    _private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE)
    _timeout          = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
    _shell            = FieldAttribute(isa='string')
    _ssh_args         = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_ARGS)
    _ssh_common_args  = FieldAttribute(isa='string')
    _sftp_extra_args  = FieldAttribute(isa='string')
    _scp_extra_args   = FieldAttribute(isa='string')
    _ssh_extra_args   = FieldAttribute(isa='string')
    _connection_lockfd= FieldAttribute(isa='int')
    _pipelining       = FieldAttribute(isa='bool', default=C.ANSIBLE_SSH_PIPELINING)
    _accelerate       = FieldAttribute(isa='bool', default=False)
    _accelerate_ipv6  = FieldAttribute(isa='bool', default=False, always_post_validate=True)
    _accelerate_port  = FieldAttribute(isa='int', default=C.ACCELERATE_PORT, always_post_validate=True)

    # privilege escalation fields
    _become           = FieldAttribute(isa='bool')
    _become_method    = FieldAttribute(isa='string')
    _become_user      = FieldAttribute(isa='string')
    _become_pass      = FieldAttribute(isa='string')
    _become_exe       = FieldAttribute(isa='string')
    _become_flags     = FieldAttribute(isa='string')
    _prompt           = FieldAttribute(isa='string')

    # backwards compatibility fields for sudo/su
    _sudo_exe         = FieldAttribute(isa='string')
    _sudo_flags       = FieldAttribute(isa='string')
    _sudo_pass        = FieldAttribute(isa='string')
    _su_exe           = FieldAttribute(isa='string')
    _su_flags         = FieldAttribute(isa='string')
    _su_pass          = FieldAttribute(isa='string')

    # general flags
    _verbosity        = FieldAttribute(isa='int', default=0)
    _only_tags        = FieldAttribute(isa='set', default=set())
    _skip_tags        = FieldAttribute(isa='set', default=set())
    _check_mode       = FieldAttribute(isa='bool', default=False)
    _force_handlers   = FieldAttribute(isa='bool', default=False)
    _start_at_task    = FieldAttribute(isa='string')
    _step             = FieldAttribute(isa='bool', default=False)
    _diff             = FieldAttribute(isa='bool', default=False)

    def __init__(self, play=None, options=None, passwords=None, connection_lockfd=None):

        super(PlayContext, self).__init__()

        if passwords is None:
            passwords = {}

        self.password    = passwords.get('conn_pass','')
        self.become_pass = passwords.get('become_pass','')

        self.prompt      = ''
        self.success_key = ''

        # a file descriptor to be used during locking operations
        self.connection_lockfd = connection_lockfd

        # set options before play to allow play to override them
        if options:
            self.set_options(options)

        if play:
            self.set_play(play)

    def set_play(self, play):
        '''
        Configures this connection information instance with data from
        the play class.
        '''

        # special handling for accelerated mode, as it is set in a separate
        # play option from the connection parameter
        self.accelerate = play.accelerate
        self.accelerate_ipv6 = play.accelerate_ipv6
        self.accelerate_port = play.accelerate_port

        if play.connection:
            self.connection = play.connection

        if play.remote_user:
            self.remote_user = play.remote_user

        if play.port:
            self.port = int(play.port)

        if play.become is not None:
            self.become = play.become
        if play.become_method:
            self.become_method = play.become_method
        if play.become_user:
            self.become_user = play.become_user

        if play.force_handlers is not None:
            self.force_handlers = play.force_handlers

    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        lower precedence than those set on the play or host.
        '''

        if options.connection:
            self.connection = options.connection

        self.remote_user = options.remote_user
        self.private_key_file = options.private_key_file
        self.ssh_common_args = options.ssh_common_args
        self.sftp_extra_args = options.sftp_extra_args
        self.scp_extra_args = options.scp_extra_args
        self.ssh_extra_args = options.ssh_extra_args

        # privilege escalation
        self.become        = options.become
        self.become_method = options.become_method
        self.become_user   = options.become_user

        # general flags (should we move out?)
        if options.verbosity:
            self.verbosity  = options.verbosity
        if options.check:
            self.check_mode = boolean(options.check)
        if hasattr(options, 'force_handlers') and options.force_handlers:
            self.force_handlers = boolean(options.force_handlers)
        if hasattr(options, 'step') and options.step:
            self.step = boolean(options.step)
        if hasattr(options, 'start_at_task') and options.start_at_task:
            self.start_at_task = to_unicode(options.start_at_task)
        if hasattr(options, 'diff') and options.diff:
            self.diff = boolean(options.diff)
        if hasattr(options, 'timeout') and options.timeout:
            self.timeout = int(options.timeout)

        # get the tag info from options, converting a comma-separated list
        # of values into a proper list if need be. We check to see if the
        # options have the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            if isinstance(options.tags, list):
                self.only_tags.update(options.tags)
            elif isinstance(options.tags, string_types):
                self.only_tags.update(options.tags.split(','))

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            if isinstance(options.skip_tags, list):
                self.skip_tags.update(options.skip_tags)
            elif isinstance(options.skip_tags, string_types):
                self.skip_tags.update(options.skip_tags.split(','))

    def set_task_and_variable_override(self, task, variables, templar):
        '''
        Sets attributes from the task if they are set, which will override
        those from the play.
        '''

        new_info = self.copy()

        # loop through a subset of attributes on the task object and set
        # connection fields based on their values
        for attr in TASK_ATTRIBUTE_OVERRIDES:
            if hasattr(task, attr):
                attr_val = getattr(task, attr)
                if attr_val is not None:
                    setattr(new_info, attr, attr_val)

        # next, use the MAGIC_VARIABLE_MAPPING dictionary to update this
        # connection info object with 'magic' variables from the variable list.
        # If the value 'ansible_delegated_vars' is in the variables, it means
        # we have a delegated-to host, so we check there first before looking
        # at the variables in general
        if task.delegate_to is not None:
            # In the case of a loop, the delegated_to host may have been
            # templated based on the loop variable, so we try and locate
            # the host name in the delegated variable dictionary here
            delegated_host_name = templar.template(task.delegate_to)
            delegated_vars = variables.get('ansible_delegated_vars', dict()).get(delegated_host_name, dict())
            # make sure this delegated_to host has something set for its remote
            # address, otherwise we default to connecting to it by name. This
            # may happen when users put an IP entry into their inventory, or if
            # they rely on DNS for a non-inventory hostname
            for address_var in MAGIC_VARIABLE_MAPPING.get('remote_addr'):
                if address_var in delegated_vars:
                    break
            else:
                display.warning("no remote address found for delegated host %s, using its name by default" % delegated_host_name)
                delegated_vars['ansible_host'] = delegated_host_name
        else:
            delegated_vars = dict()

        for (attr, variable_names) in iteritems(MAGIC_VARIABLE_MAPPING):
            for variable_name in variable_names:
                if isinstance(delegated_vars, dict) and variable_name in delegated_vars:
                    setattr(new_info, attr, delegated_vars[variable_name])
                elif variable_name in variables:
                    setattr(new_info, attr, variables[variable_name])

        # make sure we get port defaults if needed
        if new_info.port is None and C.DEFAULT_REMOTE_PORT is not None:
            new_info.port = int(C.DEFAULT_REMOTE_PORT)

        # become legacy updates
        if not new_info.become_pass:
            if new_info.become_method == 'sudo' and new_info.sudo_pass:
               setattr(new_info, 'become_pass', new_info.sudo_pass)
            elif new_info.become_method == 'su' and new_info.su_pass:
               setattr(new_info, 'become_pass', new_info.su_pass)

        # special overrides for the connection setting
        if len(delegated_vars) > 0:
            # in the event that we were using local before make sure to reset the
            # connection type to the default transport for the delegated-to host,
            # if not otherwise specified
            for connection_type in MAGIC_VARIABLE_MAPPING.get('connection'):
                if connection_type in delegated_vars:
                    break
            else:
                if getattr(new_info, 'connection', None) == 'local' and new_info.remote_addr not in C.LOCALHOST:
                    setattr(new_info, 'connection', C.DEFAULT_TRANSPORT)
        elif task._local_action:
            # otherwise, in the special instance that the task was specified
            # as a local action, override the connection in case it was changed
            # during some other step in the process
            setattr(new_info, 'connection', 'local')

        # set no_log to default if it was not previouslly set
        if new_info.no_log is None:
            new_info.no_log = C.DEFAULT_NO_LOG

        return new_info

    def make_become_cmd(self, cmd, executable=None):
        """ helper function to create privilege escalation commands """

        prompt      = None
        success_key = None
        self.prompt = None

        if executable is None:
            executable = C.DEFAULT_EXECUTABLE

        if self.become:

            becomecmd   = None
            randbits    = ''.join(random.choice(string.ascii_lowercase) for x in range(32))
            success_key = 'BECOME-SUCCESS-%s' % randbits
            success_cmd = pipes.quote('echo %s; %s' % (success_key, cmd))

            # set executable to use for the privilege escalation method, with various overrides
            exe = self.become_exe or \
                  getattr(self, '%s_exe' % self.become_method, None) or \
                  C.DEFAULT_BECOME_EXE or \
                  getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or \
                  self.become_method

            # set flags to use for the privilege escalation method, with various overrides
            flags = self.become_flags or \
                    getattr(self, '%s_flags' % self.become_method, None) or \
                    C.DEFAULT_BECOME_FLAGS or \
                    getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or \
                    ''

            if self.become_method == 'sudo':
                # If we have a password, we run sudo with a randomly-generated
                # prompt set using -p. Otherwise we run it with -n, which makes
                # it fail if it would have prompted for a password.
                #
                # Passing a quoted compound command to sudo (or sudo -s)
                # directly doesn't work, so we shellquote it with pipes.quote()
                # and pass the quoted string to the user's shell.

                # force quick error if password is required but not supplied, should prevent sudo hangs.
                if self.become_pass:
                    prompt = '[sudo via ansible, key=%s] password: '******'%s %s -p "%s" -S -u %s %s -c %s' % (exe, flags, prompt, self.become_user, executable, success_cmd)
                else:
                    becomecmd = '%s %s -n -S -u %s %s -c %s' % (exe, flags, self.become_user, executable, success_cmd)


            elif self.become_method == 'su':

                def detect_su_prompt(data):
                    SU_PROMPT_LOCALIZATIONS_RE = re.compile("|".join(['(\w+\'s )?' + x + ' ?: ?' for x in SU_PROMPT_LOCALIZATIONS]), flags=re.IGNORECASE)
                    return bool(SU_PROMPT_LOCALIZATIONS_RE.match(data))

                prompt = detect_su_prompt
                becomecmd = '%s %s %s -c "%s -c %s"' % (exe, flags, self.become_user, executable, success_cmd)

            elif self.become_method == 'pbrun':

                prompt='assword:'
                becomecmd = '%s -b %s -u %s %s' % (exe, flags, self.become_user, success_cmd)

            elif self.become_method == 'pfexec':

                # No user as it uses it's own exec_attr to figure it out
                becomecmd = '%s %s "%s"' % (exe, flags, success_cmd)

            elif self.become_method == 'runas':
                raise AnsibleError("'runas' is not yet implemented")
                #TODO: figure out prompt
                # this is not for use with winrm plugin but if they ever get ssh native on windoez
                becomecmd = '%s %s /user:%s "%s"' % (exe, flags, self.become_user, success_cmd)

            elif self.become_method == 'doas':

                prompt = 'Password:'******'doas'

                if not self.become_pass:
                    flags += ' -n '

                if self.become_user:
                    flags += ' -u %s ' % self.become_user

                becomecmd = '%s %s echo %s && %s %s env ANSIBLE=true %s' % (exe, flags, success_key, exe, flags, cmd)

            else:
                raise AnsibleError("Privilege escalation method not found: %s" % self.become_method)

            if self.become_pass:
                self.prompt = prompt
            self.success_key = success_key
            return ('%s -c %s' % (executable, pipes.quote(becomecmd)))

        return cmd

    def update_vars(self, variables):
        '''
        Adds 'magic' variables relating to connections to the variable dictionary provided.
        In case users need to access from the play, this is a legacy from runner.
        '''

        #FIXME: remove password? possibly add become/sudo settings
        for special_var in  ['ansible_connection', 'ansible_ssh_host', 'ansible_ssh_pass', 'ansible_ssh_port', 'ansible_ssh_user', 'ansible_ssh_private_key_file', 'ansible_ssh_pipelining']:
            if special_var not in variables:
                for prop, varnames in MAGIC_VARIABLE_MAPPING.items():
                    if special_var in varnames:
                        variables[special_var] = getattr(self, prop)
Example #19
0
class Task(Base, Conditional, Taggable, Become):

    """
    A task is a language feature that represents a call to a module, with given arguments and other parameters.
    A handler is a subclass of a task.

    Usage:

       Task.load(datastructure) -> Task
       Task.something(...)
    """

    # =================================================================================
    # ATTRIBUTES
    # load_<attribute_name> and
    # validate_<attribute_name>
    # will be used if defined
    # might be possible to define others

    # NOTE: ONLY set defaults on task attributes that are not inheritable,
    # inheritance is only triggered if the 'current value' is None,
    # default can be set at play/top level object and inheritance will take it's course.

    _args = FieldAttribute(isa='dict', default=dict)
    _action = FieldAttribute(isa='string')

    _async_val = FieldAttribute(isa='int', default=0, alias='async')
    _changed_when = FieldAttribute(isa='list', default=list)
    _delay = FieldAttribute(isa='int', default=5)
    _delegate_to = FieldAttribute(isa='string')
    _delegate_facts = FieldAttribute(isa='bool')
    _failed_when = FieldAttribute(isa='list', default=list)
    _loop = FieldAttribute()
    _loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
    _notify = FieldAttribute(isa='list')
    _poll = FieldAttribute(isa='int', default=10)
    _register = FieldAttribute(isa='string')
    _retries = FieldAttribute(isa='int', default=3)
    _until = FieldAttribute(isa='list', default=list)

    # deprecated, used to be loop and loop_args but loop has been repurposed
    _loop_with = FieldAttribute(isa='string', private=True, inherit=False)

    def __init__(self, block=None, role=None, task_include=None):
        ''' constructors a task, without the Task.load classmethod, it will be pretty blank '''

        self._role = role
        self._parent = None

        if task_include:
            self._parent = task_include
        else:
            self._parent = block

        super(Task, self).__init__()

    def get_path(self):
        ''' return the absolute path of the task with its line number '''

        path = ""
        if hasattr(self, '_ds') and hasattr(self._ds, '_data_source') and hasattr(self._ds, '_line_number'):
            path = "%s:%s" % (self._ds._data_source, self._ds._line_number)
        elif hasattr(self._parent._play, '_ds') and hasattr(self._parent._play._ds, '_data_source') and hasattr(self._parent._play._ds, '_line_number'):
            path = "%s:%s" % (self._parent._play._ds._data_source, self._parent._play._ds._line_number)
        return path

    def get_name(self):
        ''' return the name of the task '''

        if self._role and self.name and ("%s : " % self._role._role_name) not in self.name:
            return "%s : %s" % (self._role.get_name(), self.name)
        elif self.name:
            return self.name
        else:
            if self._role:
                return "%s : %s" % (self._role.get_name(), self.action)
            else:
                return "%s" % (self.action,)

    def _merge_kv(self, ds):
        if ds is None:
            return ""
        elif isinstance(ds, string_types):
            return ds
        elif isinstance(ds, dict):
            buf = ""
            for (k, v) in iteritems(ds):
                if k.startswith('_'):
                    continue
                buf = buf + "%s=%s " % (k, v)
            buf = buf.strip()
            return buf

    @staticmethod
    def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
        t = Task(block=block, role=role, task_include=task_include)
        return t.load_data(data, variable_manager=variable_manager, loader=loader)

    def __repr__(self):
        ''' returns a human readable representation of the task '''
        if self.get_name() == 'meta':
            return "TASK: meta (%s)" % self.args['_raw_params']
        else:
            return "TASK: %s" % self.get_name()

    def _preprocess_with_loop(self, ds, new_ds, k, v):
        ''' take a lookup plugin name and store it correctly '''

        loop_name = k.replace("with_", "")
        if new_ds.get('loop') is not None or new_ds.get('loop_with') is not None:
            raise AnsibleError("duplicate loop in task: %s" % loop_name, obj=ds)
        if v is None:
            raise AnsibleError("you must specify a value when using %s" % k, obj=ds)
        new_ds['loop_with'] = loop_name
        new_ds['loop'] = v
        # display.deprecated("with_ type loops are being phased out, use the 'loop' keyword instead", version="2.10")

    def preprocess_data(self, ds):
        '''
        tasks are especially complex arguments so need pre-processing.
        keep it short.
        '''

        if not isinstance(ds, dict):
            raise AnsibleAssertionError('ds (%s) should be a dict but was a %s' % (ds, type(ds)))

        # the new, cleaned datastructure, which will have legacy
        # items reduced to a standard structure suitable for the
        # attributes of the task class
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.ansible_pos = ds.ansible_pos

        # use the args parsing class to determine the action, args,
        # and the delegate_to value from the various possible forms
        # supported as legacy
        args_parser = ModuleArgsParser(task_ds=ds)
        try:
            (action, args, delegate_to) = args_parser.parse()
        except AnsibleParserError as e:
            # if the raises exception was created with obj=ds args, then it includes the detail
            # so we dont need to add it so we can just re raise.
            if e._obj:
                raise
            # But if it wasn't, we can add the yaml object now to get more detail
            raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e)

        # the command/shell/script modules used to support the `cmd` arg,
        # which corresponds to what we now call _raw_params, so move that
        # value over to _raw_params (assuming it is empty)
        if action in ('command', 'shell', 'script'):
            if 'cmd' in args:
                if args.get('_raw_params', '') != '':
                    raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified."
                                       " Please put everything in one or the other place.", obj=ds)
                args['_raw_params'] = args.pop('cmd')

        new_ds['action'] = action
        new_ds['args'] = args
        new_ds['delegate_to'] = delegate_to

        # we handle any 'vars' specified in the ds here, as we may
        # be adding things to them below (special handling for includes).
        # When that deprecated feature is removed, this can be too.
        if 'vars' in ds:
            # _load_vars is defined in Base, and is used to load a dictionary
            # or list of dictionaries in a standard way
            new_ds['vars'] = self._load_vars(None, ds.get('vars'))
        else:
            new_ds['vars'] = dict()

        for (k, v) in iteritems(ds):
            if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
                # we don't want to re-assign these values, which were determined by the ModuleArgsParser() above
                continue
            elif k.startswith('with_') and k.replace("with_", "") in lookup_loader:
                # transform into loop property
                self._preprocess_with_loop(ds, new_ds, k, v)
            else:
                # pre-2.0 syntax allowed variables for include statements at the top level of the task,
                # so we move those into the 'vars' dictionary here, and show a deprecation message
                # as we will remove this at some point in the future.
                if action in ('include',) and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES:
                    display.deprecated("Specifying include variables at the top-level of the task is deprecated."
                                       " Please see:\nhttps://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
                                       " for currently supported syntax regarding included files and variables", version="2.12")
                    new_ds['vars'][k] = v
                elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs:
                    new_ds[k] = v
                else:
                    display.warning("Ignoring invalid attribute: %s" % k)

        return super(Task, self).preprocess_data(new_ds)

    def _load_loop_control(self, attr, ds):
        if not isinstance(ds, dict):
            raise AnsibleParserError(
                "the `loop_control` value must be specified as a dictionary and cannot "
                "be a variable itself (though it can contain variables)",
                obj=ds,
            )

        return LoopControl.load(data=ds, variable_manager=self._variable_manager, loader=self._loader)

    def _validate_attributes(self, ds):
        try:
            super(Task, self)._validate_attributes(ds)
        except AnsibleParserError as e:
            e.message += '\nThis error can be suppressed as a warning using the "invalid_task_attribute_failed" configuration'
            raise e

    def post_validate(self, templar):
        '''
        Override of base class post_validate, to also do final validation on
        the block and task include (if any) to which this task belongs.
        '''

        if self._parent:
            self._parent.post_validate(templar)

        super(Task, self).post_validate(templar)

    def _post_validate_loop(self, attr, value, templar):
        '''
        Override post validation for the loop field, which is templated
        specially in the TaskExecutor class when evaluating loops.
        '''
        return value

    def _post_validate_environment(self, attr, value, templar):
        '''
        Override post validation of vars on the play, as we don't want to
        template these too early.
        '''
        env = {}
        if value is not None:

            def _parse_env_kv(k, v):
                try:
                    env[k] = templar.template(v, convert_bare=False)
                except AnsibleUndefinedVariable as e:
                    error = to_native(e)
                    if self.action in ('setup', 'gather_facts') and 'ansible_facts.env' in error or 'ansible_env' in error:
                        # ignore as fact gathering is required for 'env' facts
                        return
                    raise

            if isinstance(value, list):
                for env_item in value:
                    if isinstance(env_item, dict):
                        for k in env_item:
                            _parse_env_kv(k, env_item[k])
                    else:
                        isdict = templar.template(env_item, convert_bare=False)
                        if isinstance(isdict, dict):
                            env.update(isdict)
                        else:
                            display.warning("could not parse environment value, skipping: %s" % value)

            elif isinstance(value, dict):
                # should not really happen
                env = dict()
                for env_item in value:
                    _parse_env_kv(env_item, value[env_item])
            else:
                # at this point it should be a simple string, also should not happen
                env = templar.template(value, convert_bare=False)

        return env

    def _post_validate_changed_when(self, attr, value, templar):
        '''
        changed_when is evaluated after the execution of the task is complete,
        and should not be templated during the regular post_validate step.
        '''
        return value

    def _post_validate_failed_when(self, attr, value, templar):
        '''
        failed_when is evaluated after the execution of the task is complete,
        and should not be templated during the regular post_validate step.
        '''
        return value

    def _post_validate_until(self, attr, value, templar):
        '''
        until is evaluated after the execution of the task is complete,
        and should not be templated during the regular post_validate step.
        '''
        return value

    def get_vars(self):
        all_vars = dict()
        if self._parent:
            all_vars.update(self._parent.get_vars())

        all_vars.update(self.vars)

        if 'tags' in all_vars:
            del all_vars['tags']
        if 'when' in all_vars:
            del all_vars['when']

        return all_vars

    def get_include_params(self):
        all_vars = dict()
        if self._parent:
            all_vars.update(self._parent.get_include_params())
        if self.action in ('include', 'include_tasks', 'include_role'):
            all_vars.update(self.vars)
        return all_vars

    def copy(self, exclude_parent=False, exclude_tasks=False):
        new_me = super(Task, self).copy()

        new_me._parent = None
        if self._parent and not exclude_parent:
            new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)

        new_me._role = None
        if self._role:
            new_me._role = self._role

        return new_me

    def serialize(self):
        data = super(Task, self).serialize()

        if not self._squashed and not self._finalized:
            if self._parent:
                data['parent'] = self._parent.serialize()
                data['parent_type'] = self._parent.__class__.__name__

            if self._role:
                data['role'] = self._role.serialize()

        return data

    def deserialize(self, data):

        # import is here to avoid import loops
        from ansible.playbook.task_include import TaskInclude
        from ansible.playbook.handler_task_include import HandlerTaskInclude

        parent_data = data.get('parent', None)
        if parent_data:
            parent_type = data.get('parent_type')
            if parent_type == 'Block':
                p = Block()
            elif parent_type == 'TaskInclude':
                p = TaskInclude()
            elif parent_type == 'HandlerTaskInclude':
                p = HandlerTaskInclude()
            p.deserialize(parent_data)
            self._parent = p
            del data['parent']

        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r
            del data['role']

        super(Task, self).deserialize(data)

    def set_loader(self, loader):
        '''
        Sets the loader on this object and recursively on parent, child objects.
        This is used primarily after the Task has been serialized/deserialized, which
        does not preserve the loader.
        '''

        self._loader = loader

        if self._parent:
            self._parent.set_loader(loader)

    def _get_parent_attribute(self, attr, extend=False, prepend=False):
        '''
        Generic logic to get the attribute or parent attribute for a task value.
        '''

        extend = self._valid_attrs[attr].extend
        prepend = self._valid_attrs[attr].prepend
        try:
            value = self._attributes[attr]
            # If parent is static, we can grab attrs from the parent
            # otherwise, defer to the grandparent
            if getattr(self._parent, 'statically_loaded', True):
                _parent = self._parent
            else:
                _parent = self._parent._parent

            if _parent and (value is Sentinel or extend):
                if getattr(_parent, 'statically_loaded', True):
                    # vars are always inheritable, other attributes might not be for the parent but still should be for other ancestors
                    if attr != 'vars' and hasattr(_parent, '_get_parent_attribute'):
                        parent_value = _parent._get_parent_attribute(attr)
                    else:
                        parent_value = _parent._attributes.get(attr, Sentinel)

                    if extend:
                        value = self._extend_value(value, parent_value, prepend)
                    else:
                        value = parent_value
        except KeyError:
            pass

        return value

    def get_dep_chain(self):
        if self._parent:
            return self._parent.get_dep_chain()
        else:
            return None

    def get_search_path(self):
        '''
        Return the list of paths you should search for files, in order.
        This follows role/playbook dependency chain.
        '''
        path_stack = []

        dep_chain = self.get_dep_chain()
        # inside role: add the dependency chain from current to dependent
        if dep_chain:
            path_stack.extend(reversed([x._role_path for x in dep_chain]))

        # add path of task itself, unless it is already in the list
        task_dir = os.path.dirname(self.get_path())
        if task_dir not in path_stack:
            path_stack.append(task_dir)

        return path_stack

    def all_parents_static(self):
        if self._parent:
            return self._parent.all_parents_static()
        return True

    def get_first_parent_include(self):
        from ansible.playbook.task_include import TaskInclude
        if self._parent:
            if isinstance(self._parent, TaskInclude):
                return self._parent
            return self._parent.get_first_parent_include()
        return None
Example #20
0
class TaskInclude(Task):
    """
    A task include is derived from a regular task to handle the special
    circumstances related to the `- include: ...` task.
    """

    BASE = frozenset(('file', '_raw_params'))  # directly assigned
    OTHER_ARGS = frozenset(('apply', ))  # assigned to matching property
    VALID_ARGS = BASE.union(OTHER_ARGS)  # all valid args
    VALID_INCLUDE_KEYWORDS = frozenset(
        ('action', 'args', 'collections', 'debugger', 'ignore_errors', 'loop',
         'loop_control', 'loop_with', 'name', 'no_log', 'register', 'run_once',
         'tags', 'vars', 'when'))

    # =================================================================================
    # ATTRIBUTES

    _static = FieldAttribute(isa='bool', default=None)

    def __init__(self, block=None, role=None, task_include=None):
        super(TaskInclude, self).__init__(block=block,
                                          role=role,
                                          task_include=task_include)
        self.statically_loaded = False

    @staticmethod
    def load(data,
             block=None,
             role=None,
             task_include=None,
             variable_manager=None,
             loader=None):
        ti = TaskInclude(block=block, role=role, task_include=task_include)
        task = ti.load_data(data,
                            variable_manager=variable_manager,
                            loader=loader)

        # Validate options
        my_arg_names = frozenset(task.args.keys())

        # validate bad args, otherwise we silently ignore
        bad_opts = my_arg_names.difference(TaskInclude.VALID_ARGS)
        if bad_opts and task.action in ('include_tasks', 'import_tasks'):
            raise AnsibleParserError('Invalid options for %s: %s' %
                                     (task.action, ','.join(list(bad_opts))),
                                     obj=data)

        if not task.args.get('_raw_params'):
            task.args['_raw_params'] = task.args.pop('file', None)

        apply_attrs = task.args.get('apply', {})
        if apply_attrs and task.action != 'include_tasks':
            raise AnsibleParserError('Invalid options for %s: apply' %
                                     task.action,
                                     obj=data)
        elif not isinstance(apply_attrs, dict):
            raise AnsibleParserError(
                'Expected a dict for apply but got %s instead' %
                type(apply_attrs),
                obj=data)

        return task

    def preprocess_data(self, ds):
        ds = super(TaskInclude, self).preprocess_data(ds)

        diff = set(ds.keys()).difference(self.VALID_INCLUDE_KEYWORDS)
        for k in diff:
            # This check doesn't handle ``include`` as we have no idea at this point if it is static or not
            if ds[k] is not Sentinel and ds['action'] in ('include_tasks',
                                                          'include_role'):
                if C.INVALID_TASK_ATTRIBUTE_FAILED:
                    raise AnsibleParserError(
                        "'%s' is not a valid attribute for a %s" %
                        (k, self.__class__.__name__),
                        obj=ds)
                else:
                    display.warning("Ignoring invalid attribute: %s" % k)

        return ds

    def copy(self, exclude_parent=False, exclude_tasks=False):
        new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent,
                                               exclude_tasks=exclude_tasks)
        new_me.statically_loaded = self.statically_loaded
        return new_me

    def get_vars(self):
        '''
        We override the parent Task() classes get_vars here because
        we need to include the args of the include into the vars as
        they are params to the included tasks. But ONLY for 'include'
        '''
        if self.action != 'include':
            all_vars = super(TaskInclude, self).get_vars()
        else:
            all_vars = dict()
            if self._parent:
                all_vars.update(self._parent.get_vars())

            all_vars.update(self.vars)
            all_vars.update(self.args)

            if 'tags' in all_vars:
                del all_vars['tags']
            if 'when' in all_vars:
                del all_vars['when']

        return all_vars

    def build_parent_block(self):
        '''
        This method is used to create the parent block for the included tasks
        when ``apply`` is specified
        '''
        apply_attrs = self.args.pop('apply', {})
        if apply_attrs:
            apply_attrs['block'] = []
            p_block = Block.load(
                apply_attrs,
                play=self._parent._play,
                task_include=self,
                role=self._role,
                variable_manager=self._variable_manager,
                loader=self._loader,
            )
        else:
            p_block = self

        return p_block
Example #21
0
class Play(Base, Taggable, Become):
    """
    A play is a language feature that represents a list of roles and/or
    task/handler blocks to execute on a given set of hosts.

    Usage:

       Play.load(datastructure) -> Play
       Play.something(...)
    """

    # =================================================================================
    _hosts = FieldAttribute(isa='list',
                            required=True,
                            listof=string_types,
                            always_post_validate=True)

    # Facts
    _fact_path = FieldAttribute(isa='string', default=None)
    _gather_facts = FieldAttribute(isa='bool',
                                   default=None,
                                   always_post_validate=True)
    _gather_subset = FieldAttribute(isa='barelist',
                                    default=None,
                                    always_post_validate=True)
    _gather_timeout = FieldAttribute(isa='int',
                                     default=None,
                                     always_post_validate=True)

    # Variable Attributes
    _vars_files = FieldAttribute(isa='list', default=[], priority=99)
    _vars_prompt = FieldAttribute(isa='list',
                                  default=[],
                                  always_post_validate=False)

    # Role Attributes
    _roles = FieldAttribute(isa='list', default=[], priority=90)

    # Block (Task) Lists Attributes
    _handlers = FieldAttribute(isa='list', default=[])
    _pre_tasks = FieldAttribute(isa='list', default=[])
    _post_tasks = FieldAttribute(isa='list', default=[])
    _tasks = FieldAttribute(isa='list', default=[])

    # Flag/Setting Attributes
    _force_handlers = FieldAttribute(isa='bool', always_post_validate=True)
    _max_fail_percentage = FieldAttribute(isa='percent',
                                          always_post_validate=True)
    _serial = FieldAttribute(isa='list', default=[], always_post_validate=True)
    _strategy = FieldAttribute(isa='string',
                               default=C.DEFAULT_STRATEGY,
                               always_post_validate=True)
    _order = FieldAttribute(isa='string', always_post_validate=True)

    # =================================================================================

    def __init__(self):
        super(Play, self).__init__()

        self._included_conditional = None
        self._included_path = None
        self._removed_hosts = []
        self.ROLE_CACHE = {}

    def __repr__(self):
        return self.get_name()

    def get_name(self):
        ''' return the name of the Play '''
        return self._attributes.get('name')

    @staticmethod
    def load(data, variable_manager=None, loader=None, vars=None):
        if ('name' not in data or data['name'] is None) and 'hosts' in data:
            if isinstance(data['hosts'], list):
                data['name'] = ','.join(data['hosts'])
            else:
                data['name'] = data['hosts']
        p = Play()
        if vars:
            p.vars = vars.copy()
        return p.load_data(data,
                           variable_manager=variable_manager,
                           loader=loader)

    def preprocess_data(self, ds):
        '''
        Adjusts play datastructure to cleanup old/legacy items
        '''

        if not isinstance(ds, dict):
            raise AnsibleAssertionError(
                'while preprocessing data (%s), ds should be a dict but was a %s'
                % (ds, type(ds)))

        # The use of 'user' in the Play datastructure was deprecated to
        # line up with the same change for Tasks, due to the fact that
        # 'user' conflicted with the user module.
        if 'user' in ds:
            # this should never happen, but error out with a helpful message
            # to the user if it does...
            if 'remote_user' in ds:
                raise AnsibleParserError(
                    "both 'user' and 'remote_user' are set for %s. "
                    "The use of 'user' is deprecated, and should be removed" %
                    self.get_name(),
                    obj=ds)

            ds['remote_user'] = ds['user']
            del ds['user']

        return super(Play, self).preprocess_data(ds)

    def _load_tasks(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed tasks/blocks.
        Bare tasks outside of a block are given an implicit block.
        '''
        try:
            return load_list_of_blocks(ds=ds,
                                       play=self,
                                       variable_manager=self._variable_manager,
                                       loader=self._loader)
        except AssertionError as e:
            raise AnsibleParserError(
                "A malformed block was encountered while loading tasks",
                obj=self._ds,
                orig_exc=e)

    def _load_pre_tasks(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed tasks/blocks.
        Bare tasks outside of a block are given an implicit block.
        '''
        try:
            return load_list_of_blocks(ds=ds,
                                       play=self,
                                       variable_manager=self._variable_manager,
                                       loader=self._loader)
        except AssertionError as e:
            raise AnsibleParserError(
                "A malformed block was encountered while loading pre_tasks",
                obj=self._ds,
                orig_exc=e)

    def _load_post_tasks(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed tasks/blocks.
        Bare tasks outside of a block are given an implicit block.
        '''
        try:
            return load_list_of_blocks(ds=ds,
                                       play=self,
                                       variable_manager=self._variable_manager,
                                       loader=self._loader)
        except AssertionError as e:
            raise AnsibleParserError(
                "A malformed block was encountered while loading post_tasks",
                obj=self._ds,
                orig_exc=e)

    def _load_handlers(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed handlers/blocks.
        Bare handlers outside of a block are given an implicit block.
        '''
        try:
            return self._extend_value(
                self.handlers,
                load_list_of_blocks(ds=ds,
                                    play=self,
                                    use_handlers=True,
                                    variable_manager=self._variable_manager,
                                    loader=self._loader),
                prepend=True)
        except AssertionError as e:
            raise AnsibleParserError(
                "A malformed block was encountered while loading handlers",
                obj=self._ds,
                orig_exc=e)

    def _load_roles(self, attr, ds):
        '''
        Loads and returns a list of RoleInclude objects from the datastructure
        list of role definitions and creates the Role from those objects
        '''

        if ds is None:
            ds = []

        try:
            role_includes = load_list_of_roles(
                ds,
                play=self,
                variable_manager=self._variable_manager,
                loader=self._loader)
        except AssertionError as e:
            raise AnsibleParserError(
                "A malformed role declaration was encountered.",
                obj=self._ds,
                orig_exc=e)

        roles = []
        for ri in role_includes:
            roles.append(Role.load(ri, play=self))
        return roles

    def _load_vars_prompt(self, attr, ds):
        new_ds = preprocess_vars(ds)
        vars_prompts = []
        if new_ds is not None:
            for prompt_data in new_ds:
                if 'name' not in prompt_data:
                    display.deprecated(
                        "Using the 'short form' for vars_prompt has been deprecated",
                        version="2.7")
                    for vname, prompt in prompt_data.items():
                        vars_prompts.append(
                            dict(
                                name=vname,
                                prompt=prompt,
                                default=None,
                                private=None,
                                confirm=None,
                                encrypt=None,
                                salt_size=None,
                                salt=None,
                            ))
                else:
                    vars_prompts.append(prompt_data)
        return vars_prompts

    def _compile_roles(self):
        '''
        Handles the role compilation step, returning a flat list of tasks
        with the lowest level dependencies first. For example, if a role R
        has a dependency D1, which also has a dependency D2, the tasks from
        D2 are merged first, followed by D1, and lastly by the tasks from
        the parent role R last. This is done for all roles in the Play.
        '''

        block_list = []

        if len(self.roles) > 0:
            for r in self.roles:
                # Don't insert tasks from ``import/include_role``, preventing
                # duplicate execution at the wrong time
                if r.from_include:
                    continue
                block_list.extend(r.compile(play=self))

        return block_list

    def compile_roles_handlers(self):
        '''
        Handles the role handler compilation step, returning a flat list of Handlers
        This is done for all roles in the Play.
        '''

        block_list = []

        if len(self.roles) > 0:
            for r in self.roles:
                block_list.extend(r.get_handler_blocks(play=self))

        return block_list

    def compile(self):
        '''
        Compiles and returns the task list for this play, compiled from the
        roles (which are themselves compiled recursively) and/or the list of
        tasks specified in the play.
        '''

        # create a block containing a single flush handlers meta
        # task, so we can be sure to run handlers at certain points
        # of the playbook execution
        flush_block = Block.load(data={'meta': 'flush_handlers'},
                                 play=self,
                                 variable_manager=self._variable_manager,
                                 loader=self._loader)

        block_list = []

        block_list.extend(self.pre_tasks)
        block_list.append(flush_block)
        block_list.extend(self._compile_roles())
        block_list.extend(self.tasks)
        block_list.append(flush_block)
        block_list.extend(self.post_tasks)
        block_list.append(flush_block)

        return block_list

    def get_vars(self):
        return self.vars.copy()

    def get_vars_files(self):
        if self.vars_files is None:
            return []
        elif not isinstance(self.vars_files, list):
            return [self.vars_files]
        return self.vars_files

    def get_handlers(self):
        return self.handlers[:]

    def get_roles(self):
        return self.roles[:]

    def get_tasks(self):
        tasklist = []
        for task in self.pre_tasks + self.tasks + self.post_tasks:
            if isinstance(task, Block):
                tasklist.append(task.block + task.rescue + task.always)
            else:
                tasklist.append(task)
        return tasklist

    def serialize(self):
        data = super(Play, self).serialize()

        roles = []
        for role in self.get_roles():
            roles.append(role.serialize())
        data['roles'] = roles
        data['included_path'] = self._included_path

        return data

    def deserialize(self, data):
        super(Play, self).deserialize(data)

        self._included_path = data.get('included_path', None)
        if 'roles' in data:
            role_data = data.get('roles', [])
            roles = []
            for role in role_data:
                r = Role()
                r.deserialize(role)
                roles.append(r)

            setattr(self, 'roles', roles)
            del data['roles']

    def copy(self):
        new_me = super(Play, self).copy()
        new_me.ROLE_CACHE = self.ROLE_CACHE.copy()
        new_me._included_conditional = self._included_conditional
        new_me._included_path = self._included_path
        return new_me
Example #22
0
class Role(Base):

    _role_name = FieldAttribute(isa='string')
    _role_path = FieldAttribute(isa='string')
    _src = FieldAttribute(isa='string')
    _scm = FieldAttribute(isa='string')
    _version = FieldAttribute(isa='string')
    _params = FieldAttribute(isa='dict', default=dict())
    _metadata = FieldAttribute(isa='dict', default=dict())
    _task_blocks = FieldAttribute(isa='list', default=[])
    _handler_blocks = FieldAttribute(isa='list', default=[])
    _default_vars = FieldAttribute(isa='dict', default=dict())
    _role_vars = FieldAttribute(isa='dict', default=dict())

    def __init__(self, vault_password=None):
        self._role_path = None
        self._vault_password = vault_password
        super(Role, self).__init__()

    def __repr__(self):
        return self.get_name()

    def get_name(self):
        return self._attributes['role_name']

    @staticmethod
    def load(data, vault_password=None):
        assert isinstance(data, string_types) or isinstance(data, dict)
        r = Role(vault_password=vault_password)
        r.load_data(data)
        return r

    #------------------------------------------------------------------------------
    # munge, and other functions used for loading the ds

    def munge(self, ds):
        # create the new ds as an AnsibleMapping, so we can preserve any line/column
        # data from the parser, and copy that info from the old ds (if applicable)
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.copy_position_info(ds)

        # Role definitions can be strings or dicts, so we fix things up here.
        # Anything that is not a role name, tag, or conditional will also be
        # added to the params sub-dictionary for loading later
        if isinstance(ds, string_types):
            new_ds['role_name'] = ds
        else:
            # munge the role ds here to correctly fill in the various fields which
            # may be used to define the role, like: role, src, scm, etc.
            ds = self._munge_role(ds)

            # now we split any random role params off from the role spec and store
            # them in a dictionary of params for parsing later
            params = dict()
            attr_names = [
                attr_name
                for (attr_name,
                     attr_value) in self._get_base_attributes().iteritems()
            ]
            for (key, value) in iteritems(ds):
                if key not in attr_names and key != 'role':
                    # this key does not match a field attribute, so it must be a role param
                    params[key] = value
                else:
                    # this is a field attribute, so copy it over directly
                    new_ds[key] = value
            new_ds['params'] = params

        # Set the role name and path, based on the role definition
        (role_name, role_path) = self._get_role_path(new_ds.get('role_name'))
        new_ds['role_name'] = role_name
        new_ds['role_path'] = role_path

        # load the role's files, if they exist
        new_ds['metadata'] = self._load_role_yaml(role_path, 'meta')
        new_ds['task_blocks'] = self._load_role_yaml(role_path, 'tasks')
        new_ds['handler_blocks'] = self._load_role_yaml(role_path, 'handlers')
        new_ds['default_vars'] = self._load_role_yaml(role_path, 'defaults')
        new_ds['role_vars'] = self._load_role_yaml(role_path, 'vars')

        # and return the newly munged ds
        return new_ds

    def _load_role_yaml(self, role_path, subdir):
        file_path = os.path.join(role_path, subdir)
        if os.path.exists(file_path) and os.path.isdir(file_path):
            main_file = self._resolve_main(file_path)
            if os.path.exists(main_file):
                return load_data_from_file(main_file, self._vault_password)
        return None

    def _resolve_main(self, basepath):
        ''' flexibly handle variations in main filenames '''
        possible_mains = (
            os.path.join(basepath, 'main'),
            os.path.join(basepath, 'main.yml'),
            os.path.join(basepath, 'main.yaml'),
            os.path.join(basepath, 'main.json'),
        )

        if sum([os.path.isfile(x) for x in possible_mains]) > 1:
            raise AnsibleError(
                "found multiple main files at %s, only one allowed" %
                (basepath))
        else:
            for m in possible_mains:
                if os.path.isfile(m):
                    return m  # exactly one main file
            return possible_mains[
                0]  # zero mains (we still need to return something)

    def _get_role_path(self, role):
        '''
        the 'role', as specified in the ds (or as a bare string), can either
        be a simple name or a full path. If it is a full path, we use the
        basename as the role name, otherwise we take the name as-given and
        append it to the default role path
        '''

        # FIXME: this should use unfrackpath once the utils code has been sorted out
        role_path = os.path.normpath(role)
        print("first role path is %s" % role_path)
        if os.path.exists(role_path):
            role_name = os.path.basename(role)
            print('returning role path %s' % role_path)
            return (role_name, role_path)
        else:
            for path in ('./roles', '/etc/ansible/roles'):
                role_path = os.path.join(path, role)
                print("current role path is %s" % role_path)
                if os.path.exists(role_path):
                    print('returning role path %s' % role_path)
                    return (role, role_path)

        # FIXME: make the parser smart about list/string entries
        #        in the yaml so the error line/file can be reported
        #        here
        raise AnsibleError("the role '%s' was not found" % role, obj=role)

    def _repo_url_to_role_name(self, repo_url):
        # gets the role name out of a repo like
        # http://git.example.com/repos/repo.git" => "repo"

        if '://' not in repo_url and '@' not in repo_url:
            return repo_url
        trailing_path = repo_url.split('/')[-1]
        if trailing_path.endswith('.git'):
            trailing_path = trailing_path[:-4]
        if trailing_path.endswith('.tar.gz'):
            trailing_path = trailing_path[:-7]
        if ',' in trailing_path:
            trailing_path = trailing_path.split(',')[0]
        return trailing_path

    def _role_spec_parse(self, role_spec):
        # takes a repo and a version like
        # git+http://git.example.com/repos/repo.git,v1.0
        # and returns a list of properties such as:
        # {
        #   'scm': 'git',
        #   'src': 'http://git.example.com/repos/repo.git',
        #   'version': 'v1.0',
        #   'name': 'repo'
        # }

        default_role_versions = dict(git='master', hg='tip')

        role_spec = role_spec.strip()
        role_version = ''
        if role_spec == "" or role_spec.startswith("#"):
            return (None, None, None, None)

        tokens = [s.strip() for s in role_spec.split(',')]

        # assume https://github.com URLs are git+https:// URLs and not
        # tarballs unless they end in '.zip'
        if 'github.com/' in tokens[0] and not tokens[0].startswith(
                "git+") and not tokens[0].endswith('.tar.gz'):
            tokens[0] = 'git+' + tokens[0]

        if '+' in tokens[0]:
            (scm, role_url) = tokens[0].split('+')
        else:
            scm = None
            role_url = tokens[0]

        if len(tokens) >= 2:
            role_version = tokens[1]

        if len(tokens) == 3:
            role_name = tokens[2]
        else:
            role_name = self._repo_url_to_role_name(tokens[0])

        if scm and not role_version:
            role_version = default_role_versions.get(scm, '')

        return dict(scm=scm,
                    src=role_url,
                    version=role_version,
                    role_name=role_name)

    def _munge_role(self, ds):
        if 'role' in ds:
            # Old style: {role: "galaxy.role,version,name", other_vars: "here" }
            role_info = self._role_spec_parse(ds['role'])
            if isinstance(role_info, dict):
                # Warning: Slight change in behaviour here.  name may be being
                # overloaded.  Previously, name was only a parameter to the role.
                # Now it is both a parameter to the role and the name that
                # ansible-galaxy will install under on the local system.
                if 'name' in ds and 'name' in role_info:
                    del role_info['name']
                ds.update(role_info)
        else:
            # New style: { src: 'galaxy.role,version,name', other_vars: "here" }
            if 'github.com' in ds["src"] and 'http' in ds[
                    "src"] and '+' not in ds["src"] and not ds["src"].endswith(
                        '.tar.gz'):
                ds["src"] = "git+" + ds["src"]

            if '+' in ds["src"]:
                (scm, src) = ds["src"].split('+')
                ds["scm"] = scm
                ds["src"] = src

            if 'name' in role:
                ds["role"] = ds["name"]
                del ds["name"]
            else:
                ds["role"] = self._repo_url_to_role_name(ds["src"])

            # set some values to a default value, if none were specified
            ds.setdefault('version', '')
            ds.setdefault('scm', None)

        return ds

    #------------------------------------------------------------------------------
    # attribute loading defs

    def _load_list_of_blocks(self, ds):
        assert type(ds) == list
        block_list = []
        for block in ds:
            b = Block(block)
            block_list.append(b)
        return block_list

    def _load_task_blocks(self, attr, ds):
        if ds is None:
            return []
        return self._load_list_of_blocks(ds)

    def _load_handler_blocks(self, attr, ds):
        if ds is None:
            return []
        return self._load_list_of_blocks(ds)

    #------------------------------------------------------------------------------
    # other functions

    def get_variables(self):
        # returns the merged variables for this role, including
        # recursively merging those of all child roles
        return dict()

    def get_immediate_dependencies(self):
        return self._dependencies

    def get_all_dependencies(self):
        # returns a list built recursively, of all deps from
        # all child dependencies
        all_deps = []
        for dep in self._dependencies:
            list_union(all_deps, dep.get_all_dependencies())
        all_deps = list_union(all_deps, self.dependencies)
        return all_deps
Example #23
0
class PlayContext(Base):
    '''
    This class is used to consolidate the connection information for
    hosts in a play and child tasks, where the task may override some
    connection/authentication information.
    '''

    # connection fields, some are inherited from Base:
    # (connection, port, remote_user, environment, no_log)
    _docker_extra_args = FieldAttribute(isa='string')
    _remote_addr = FieldAttribute(isa='string')
    _password = FieldAttribute(isa='string')
    _private_key_file = FieldAttribute(isa='string',
                                       default=C.DEFAULT_PRIVATE_KEY_FILE)
    _timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
    _shell = FieldAttribute(isa='string')
    _network_os = FieldAttribute(isa='string')
    _connection_user = FieldAttribute(isa='string')
    _ssh_args = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_ARGS)
    _ssh_common_args = FieldAttribute(isa='string')
    _sftp_extra_args = FieldAttribute(isa='string')
    _scp_extra_args = FieldAttribute(isa='string')
    _ssh_extra_args = FieldAttribute(isa='string')
    _ssh_executable = FieldAttribute(isa='string',
                                     default=C.ANSIBLE_SSH_EXECUTABLE)
    _ssh_transfer_method = FieldAttribute(
        isa='string', default=C.DEFAULT_SSH_TRANSFER_METHOD)
    _connection_lockfd = FieldAttribute(isa='int')
    _pipelining = FieldAttribute(isa='bool', default=C.ANSIBLE_SSH_PIPELINING)
    _accelerate = FieldAttribute(isa='bool', default=False)
    _accelerate_ipv6 = FieldAttribute(isa='bool',
                                      default=False,
                                      always_post_validate=True)
    _accelerate_port = FieldAttribute(isa='int',
                                      default=C.ACCELERATE_PORT,
                                      always_post_validate=True)
    _executable = FieldAttribute(isa='string', default=C.DEFAULT_EXECUTABLE)
    _module_compression = FieldAttribute(isa='string',
                                         default=C.DEFAULT_MODULE_COMPRESSION)

    # privilege escalation fields
    _become = FieldAttribute(isa='bool')
    _become_method = FieldAttribute(isa='string')
    _become_user = FieldAttribute(isa='string')
    _become_pass = FieldAttribute(isa='string')
    _become_exe = FieldAttribute(isa='string')
    _become_flags = FieldAttribute(isa='string')
    _prompt = FieldAttribute(isa='string')

    # backwards compatibility fields for sudo/su
    _sudo_exe = FieldAttribute(isa='string')
    _sudo_flags = FieldAttribute(isa='string')
    _sudo_pass = FieldAttribute(isa='string')
    _su_exe = FieldAttribute(isa='string')
    _su_flags = FieldAttribute(isa='string')
    _su_pass = FieldAttribute(isa='string')

    # general flags
    _verbosity = FieldAttribute(isa='int', default=0)
    _only_tags = FieldAttribute(isa='set', default=set())
    _skip_tags = FieldAttribute(isa='set', default=set())
    _check_mode = FieldAttribute(isa='bool', default=False)
    _force_handlers = FieldAttribute(isa='bool', default=False)
    _start_at_task = FieldAttribute(isa='string')
    _step = FieldAttribute(isa='bool', default=False)
    _diff = FieldAttribute(isa='bool', default=C.DIFF_ALWAYS)

    # Fact gathering settings
    _gather_subset = FieldAttribute(isa='string',
                                    default=C.DEFAULT_GATHER_SUBSET)
    _gather_timeout = FieldAttribute(isa='string',
                                     default=C.DEFAULT_GATHER_TIMEOUT)
    _fact_path = FieldAttribute(isa='string', default=C.DEFAULT_FACT_PATH)

    def __init__(self,
                 play=None,
                 options=None,
                 passwords=None,
                 connection_lockfd=None):

        super(PlayContext, self).__init__()

        if passwords is None:
            passwords = {}

        self.password = passwords.get('conn_pass', '')
        self.become_pass = passwords.get('become_pass', '')

        self.prompt = ''
        self.success_key = ''

        # a file descriptor to be used during locking operations
        self.connection_lockfd = connection_lockfd

        # set options before play to allow play to override them
        if options:
            self.set_options(options)

        if play:
            self.set_play(play)

    def set_play(self, play):
        '''
        Configures this connection information instance with data from
        the play class.
        '''

        # special handling for accelerated mode, as it is set in a separate
        # play option from the connection parameter
        self.accelerate = play.accelerate
        self.accelerate_ipv6 = play.accelerate_ipv6
        self.accelerate_port = play.accelerate_port

        if play.connection:
            self.connection = play.connection

        if play.remote_user:
            self.remote_user = play.remote_user

        if play.port:
            self.port = int(play.port)

        if play.become is not None:
            self.become = play.become
        if play.become_method:
            self.become_method = play.become_method
        if play.become_user:
            self.become_user = play.become_user

        if play.force_handlers is not None:
            self.force_handlers = play.force_handlers

    def set_options(self, options):
        '''
        Configures this connection information instance with data from
        options specified by the user on the command line. These have a
        lower precedence than those set on the play or host.
        '''

        # privilege escalation
        self.become = options.become
        self.become_method = options.become_method
        self.become_user = options.become_user

        self.check_mode = boolean(options.check)

        # get ssh options FIXME: make these common to all connections
        for flag in [
                'ssh_common_args', 'docker_extra_args', 'sftp_extra_args',
                'scp_extra_args', 'ssh_extra_args'
        ]:
            setattr(self, flag, getattr(options, flag, ''))

        # general flags (should we move out?)
        for flag in [
                'connection', 'remote_user', 'private_key_file', 'verbosity',
                'force_handlers', 'step', 'start_at_task', 'diff'
        ]:
            attribute = getattr(options, flag, False)
            if attribute:
                setattr(self, flag, attribute)

        if hasattr(options, 'timeout') and options.timeout:
            self.timeout = int(options.timeout)

        # get the tag info from options. We check to see if the options have
        # the attribute, as it is not always added via the CLI
        if hasattr(options, 'tags'):
            self.only_tags.update(options.tags)

        if len(self.only_tags) == 0:
            self.only_tags = set(['all'])

        if hasattr(options, 'skip_tags'):
            self.skip_tags.update(options.skip_tags)

    def set_task_and_variable_override(self, task, variables, templar):
        '''
        Sets attributes from the task if they are set, which will override
        those from the play.
        '''

        new_info = self.copy()

        # loop through a subset of attributes on the task object and set
        # connection fields based on their values
        for attr in TASK_ATTRIBUTE_OVERRIDES:
            if hasattr(task, attr):
                attr_val = getattr(task, attr)
                if attr_val is not None:
                    setattr(new_info, attr, attr_val)

        # next, use the MAGIC_VARIABLE_MAPPING dictionary to update this
        # connection info object with 'magic' variables from the variable list.
        # If the value 'ansible_delegated_vars' is in the variables, it means
        # we have a delegated-to host, so we check there first before looking
        # at the variables in general
        if task.delegate_to is not None:
            # In the case of a loop, the delegated_to host may have been
            # templated based on the loop variable, so we try and locate
            # the host name in the delegated variable dictionary here
            delegated_host_name = templar.template(task.delegate_to)
            delegated_vars = variables.get('ansible_delegated_vars',
                                           dict()).get(delegated_host_name,
                                                       dict())

            delegated_transport = C.DEFAULT_TRANSPORT
            for transport_var in MAGIC_VARIABLE_MAPPING.get('connection'):
                if transport_var in delegated_vars:
                    delegated_transport = delegated_vars[transport_var]
                    break

            # make sure this delegated_to host has something set for its remote
            # address, otherwise we default to connecting to it by name. This
            # may happen when users put an IP entry into their inventory, or if
            # they rely on DNS for a non-inventory hostname
            for address_var in MAGIC_VARIABLE_MAPPING.get('remote_addr'):
                if address_var in delegated_vars:
                    break
            else:
                display.debug(
                    "no remote address found for delegated host %s\nusing its name, so success depends on DNS resolution"
                    % delegated_host_name)
                delegated_vars['ansible_host'] = delegated_host_name

            # reset the port back to the default if none was specified, to prevent
            # the delegated host from inheriting the original host's setting
            for port_var in MAGIC_VARIABLE_MAPPING.get('port'):
                if port_var in delegated_vars:
                    break
            else:
                if delegated_transport == 'winrm':
                    delegated_vars['ansible_port'] = 5986
                else:
                    delegated_vars['ansible_port'] = C.DEFAULT_REMOTE_PORT

            # and likewise for the remote user
            for user_var in MAGIC_VARIABLE_MAPPING.get('remote_user'):
                if user_var in delegated_vars and delegated_vars[user_var]:
                    break
            else:
                delegated_vars[
                    'ansible_user'] = task.remote_user or self.remote_user
        else:
            delegated_vars = dict()

            # setup shell
            for exe_var in MAGIC_VARIABLE_MAPPING.get('executable'):
                if exe_var in variables:
                    setattr(new_info, 'executable', variables.get(exe_var))

        attrs_considered = []
        for (attr, variable_names) in iteritems(MAGIC_VARIABLE_MAPPING):
            for variable_name in variable_names:
                if attr in attrs_considered:
                    continue
                # if delegation task ONLY use delegated host vars, avoid delegated FOR host vars
                if task.delegate_to is not None:
                    if isinstance(delegated_vars,
                                  dict) and variable_name in delegated_vars:
                        setattr(new_info, attr, delegated_vars[variable_name])
                        attrs_considered.append(attr)
                elif variable_name in variables:
                    setattr(new_info, attr, variables[variable_name])
                    attrs_considered.append(attr)
                # no else, as no other vars should be considered

        # become legacy updates -- from commandline
        if not new_info.become_pass:
            if new_info.become_method == 'sudo' and new_info.sudo_pass:
                setattr(new_info, 'become_pass', new_info.sudo_pass)
            elif new_info.become_method == 'su' and new_info.su_pass:
                setattr(new_info, 'become_pass', new_info.su_pass)

        # become legacy updates -- from inventory file (inventory overrides
        # commandline)
        for become_pass_name in MAGIC_VARIABLE_MAPPING.get('become_pass'):
            if become_pass_name in variables:
                break
        else:  # This is a for-else
            if new_info.become_method == 'sudo':
                for sudo_pass_name in MAGIC_VARIABLE_MAPPING.get('sudo_pass'):
                    if sudo_pass_name in variables:
                        setattr(new_info, 'become_pass',
                                variables[sudo_pass_name])
                        break
            if new_info.become_method == 'sudo':
                for su_pass_name in MAGIC_VARIABLE_MAPPING.get('su_pass'):
                    if su_pass_name in variables:
                        setattr(new_info, 'become_pass',
                                variables[su_pass_name])
                        break

        # make sure we get port defaults if needed
        if new_info.port is None and C.DEFAULT_REMOTE_PORT is not None:
            new_info.port = int(C.DEFAULT_REMOTE_PORT)

        # special overrides for the connection setting
        if len(delegated_vars) > 0:
            # in the event that we were using local before make sure to reset the
            # connection type to the default transport for the delegated-to host,
            # if not otherwise specified
            for connection_type in MAGIC_VARIABLE_MAPPING.get('connection'):
                if connection_type in delegated_vars:
                    break
            else:
                remote_addr_local = new_info.remote_addr in C.LOCALHOST
                inv_hostname_local = delegated_vars.get(
                    'inventory_hostname') in C.LOCALHOST
                if remote_addr_local and inv_hostname_local:
                    setattr(new_info, 'connection', 'local')
                elif getattr(new_info, 'connection',
                             None) == 'local' and (not remote_addr_local
                                                   or not inv_hostname_local):
                    setattr(new_info, 'connection', C.DEFAULT_TRANSPORT)

        # if the final connection type is local, reset the remote_user value
        # to that of the currently logged in user, to ensure any become settings
        # are obeyed correctly
        # additionally, we need to do this check after final connection has been
        # correctly set above ...
        if new_info.connection == 'local':
            new_info.connection_user = new_info.remote_user
            new_info.remote_user = pwd.getpwuid(os.getuid()).pw_name

        # set no_log to default if it was not previouslly set
        if new_info.no_log is None:
            new_info.no_log = C.DEFAULT_NO_LOG

        # set become defaults if not previouslly set
        task.set_become_defaults(new_info.become, new_info.become_method,
                                 new_info.become_user)

        if task.always_run:
            display.deprecated(
                "always_run is deprecated. Use check_mode = no instead.",
                version="2.4",
                removed=False)
            new_info.check_mode = False

        # check_mode replaces always_run, overwrite always_run if both are given
        if task.check_mode is not None:
            new_info.check_mode = task.check_mode

        return new_info

    def make_become_cmd(self, cmd, executable=None):
        """ helper function to create privilege escalation commands """

        prompt = None
        success_key = None
        self.prompt = None

        if self.become:

            if not executable:
                executable = self.executable

            becomecmd = None
            randbits = ''.join(
                random.choice(string.ascii_lowercase) for x in range(32))
            success_key = 'BECOME-SUCCESS-%s' % randbits
            success_cmd = shlex_quote('echo %s; %s' % (success_key, cmd))

            if executable:
                command = '%s -c %s' % (executable, success_cmd)
            else:
                command = success_cmd

            # set executable to use for the privilege escalation method, with various overrides
            exe = self.become_exe or \
                  getattr(self, '%s_exe' % self.become_method, None) or \
                  C.DEFAULT_BECOME_EXE or \
                  getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or \
                  self.become_method

            # set flags to use for the privilege escalation method, with various overrides
            flags = self.become_flags or \
                    getattr(self, '%s_flags' % self.become_method, None) or \
                    C.DEFAULT_BECOME_FLAGS or \
                    getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or \
                    ''

            if self.become_method == 'sudo':
                # If we have a password, we run sudo with a randomly-generated
                # prompt set using -p. Otherwise we run it with default -n, which makes
                # it fail if it would have prompted for a password.
                # Cannot rely on -n as it can be removed from defaults, which should be
                # done for older versions of sudo that do not support the option.
                #
                # Passing a quoted compound command to sudo (or sudo -s)
                # directly doesn't work, so we shellquote it with shlex_quote()
                # and pass the quoted string to the user's shell.

                # force quick error if password is required but not supplied, should prevent sudo hangs.
                if self.become_pass:
                    prompt = '[sudo via ansible, key=%s] password: '******'%s %s -p "%s" -u %s %s' % (
                        exe, flags.replace(
                            '-n', ''), prompt, self.become_user, command)
                else:
                    becomecmd = '%s %s -u %s %s' % (exe, flags,
                                                    self.become_user, command)

            elif self.become_method == 'su':

                # passing code ref to examine prompt as simple string comparisson isn't good enough with su
                def detect_su_prompt(b_data):
                    b_password_string = b"|".join(
                        [b'(\w+\'s )?' + x for x in b_SU_PROMPT_LOCALIZATIONS])
                    # Colon or unicode fullwidth colon
                    b_password_string = b_password_string + to_bytes(
                        u' ?(:|:) ?')
                    b_SU_PROMPT_LOCALIZATIONS_RE = re.compile(
                        b_password_string, flags=re.IGNORECASE)
                    return bool(b_SU_PROMPT_LOCALIZATIONS_RE.match(b_data))

                prompt = detect_su_prompt

                becomecmd = '%s %s %s -c %s' % (exe, flags, self.become_user,
                                                shlex_quote(command))

            elif self.become_method == 'pbrun':

                prompt = 'Password:'******'%s %s -u %s %s' % (exe, flags, self.become_user,
                                                success_cmd)

            elif self.become_method == 'ksu':

                def detect_ksu_prompt(b_data):
                    return re.match(b"Kerberos password for .*@.*:", b_data)

                prompt = detect_ksu_prompt
                becomecmd = '%s %s %s -e %s' % (exe, self.become_user, flags,
                                                command)

            elif self.become_method == 'pfexec':

                # No user as it uses it's own exec_attr to figure it out
                becomecmd = '%s %s "%s"' % (exe, flags, success_cmd)

            elif self.become_method == 'runas':
                # become is handled inside the WinRM connection plugin
                becomecmd = cmd

            elif self.become_method == 'doas':

                prompt = 'doas (%s@' % self.remote_user
                exe = self.become_exe or 'doas'

                if not self.become_pass:
                    flags += ' -n '

                if self.become_user:
                    flags += ' -u %s ' % self.become_user

                #FIXME: make shell independent
                becomecmd = '%s %s echo %s && %s %s env ANSIBLE=true %s' % (
                    exe, flags, success_key, exe, flags, cmd)

            elif self.become_method == 'dzdo':

                exe = self.become_exe or 'dzdo'
                if self.become_pass:
                    prompt = '[dzdo via ansible, key=%s] password: '******'%s -p %s -u %s %s' % (
                        exe, shlex_quote(prompt), self.become_user, command)
                else:
                    becomecmd = '%s -u %s %s' % (exe, self.become_user,
                                                 command)

            else:
                raise AnsibleError(
                    "Privilege escalation method not found: %s" %
                    self.become_method)

            if self.become_pass:
                self.prompt = prompt
            self.success_key = success_key
            return becomecmd

        return cmd

    def update_vars(self, variables):
        '''
        Adds 'magic' variables relating to connections to the variable dictionary provided.
        In case users need to access from the play, this is a legacy from runner.
        '''

        for prop, var_list in MAGIC_VARIABLE_MAPPING.items():
            try:
                if 'become' in prop:
                    continue

                # perserves the user var for local connections
                if self.connection == 'local' and 'remote_user' in prop:
                    continue

                var_val = getattr(self, prop)
                for var_opt in var_list:
                    if var_opt not in variables and var_val is not None:
                        variables[var_opt] = var_val
            except AttributeError:
                continue
Example #24
0
class Base(FieldAttributeBase):

    _name = FieldAttribute(isa='string', default='', always_post_validate=True, inherit=False)

    # connection/transport
    _connection = FieldAttribute(isa='string', default=context.cliargs_deferred_get('connection'))
    _port = FieldAttribute(isa='int')
    _remote_user = FieldAttribute(isa='string', default=context.cliargs_deferred_get('remote_user'))

    # variables
    _vars = FieldAttribute(isa='dict', priority=100, inherit=False, static=True)

    # module default params
    _module_defaults = FieldAttribute(isa='list', extend=True, prepend=True)

    # flags and misc. settings
    _environment = FieldAttribute(isa='list', extend=True, prepend=True)
    _no_log = FieldAttribute(isa='bool')
    _run_once = FieldAttribute(isa='bool')
    _ignore_errors = FieldAttribute(isa='bool')
    _ignore_unreachable = FieldAttribute(isa='bool')
    _check_mode = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('check'))
    _diff = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('diff'))
    _any_errors_fatal = FieldAttribute(isa='bool', default=C.ANY_ERRORS_FATAL)
    _throttle = FieldAttribute(isa='int', default=0)

    # explicitly invoke a debugger on tasks
    _debugger = FieldAttribute(isa='string')

    # Privilege escalation
    _become = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('become'))
    _become_method = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_method'))
    _become_user = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_user'))
    _become_flags = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_flags'))
    _become_exe = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_exe'))

    # used to hold sudo/su stuff
    DEPRECATED_ATTRIBUTES = []
Example #25
0
class Block(Base, Become, Conditional, Taggable):

    # main block fields containing the task lists
    _block = FieldAttribute(isa='list', default=[], inherit=False)
    _rescue = FieldAttribute(isa='list', default=[], inherit=False)
    _always = FieldAttribute(isa='list', default=[], inherit=False)

    # other fields
    _delegate_to = FieldAttribute(isa='string')
    _delegate_facts = FieldAttribute(isa='bool', default=False)

    # for future consideration? this would be functionally
    # similar to the 'else' clause for exceptions
    # _otherwise = FieldAttribute(isa='list')

    def __init__(self,
                 play=None,
                 parent_block=None,
                 role=None,
                 task_include=None,
                 use_handlers=False,
                 implicit=False):
        self._play = play
        self._role = role
        self._parent = None
        self._dep_chain = None
        self._use_handlers = use_handlers
        self._implicit = implicit

        # end of role flag
        self._eor = False

        if task_include:
            self._parent = task_include
        elif parent_block:
            self._parent = parent_block

        super(Block, self).__init__()

    def __repr__(self):
        return "BLOCK(uuid=%s)(id=%s)(parent=%s)" % (self._uuid, id(self),
                                                     self._parent)

    def __eq__(self, other):
        '''object comparison based on _uuid'''
        return self._uuid == other._uuid

    def __ne__(self, other):
        '''object comparison based on _uuid'''
        return self._uuid != other._uuid

    def get_vars(self):
        '''
        Blocks do not store variables directly, however they may be a member
        of a role or task include which does, so return those if present.
        '''

        all_vars = self.vars.copy()

        if self._parent:
            all_vars.update(self._parent.get_vars())

        return all_vars

    @staticmethod
    def load(data,
             play=None,
             parent_block=None,
             role=None,
             task_include=None,
             use_handlers=False,
             variable_manager=None,
             loader=None):
        implicit = not Block.is_block(data)
        b = Block(play=play,
                  parent_block=parent_block,
                  role=role,
                  task_include=task_include,
                  use_handlers=use_handlers,
                  implicit=implicit)
        return b.load_data(data,
                           variable_manager=variable_manager,
                           loader=loader)

    @staticmethod
    def is_block(ds):
        is_block = False
        if isinstance(ds, dict):
            for attr in ('block', 'rescue', 'always'):
                if attr in ds:
                    is_block = True
                    break
        return is_block

    def preprocess_data(self, ds):
        '''
        If a simple task is given, an implicit block for that single task
        is created, which goes in the main portion of the block
        '''

        if not Block.is_block(ds):
            if isinstance(ds, list):
                return super(Block, self).preprocess_data(dict(block=ds))
            else:
                return super(Block, self).preprocess_data(dict(block=[ds]))

        return super(Block, self).preprocess_data(ds)

    def _load_block(self, attr, ds):
        try:
            return load_list_of_tasks(
                ds,
                play=self._play,
                block=self,
                role=self._role,
                task_include=None,
                variable_manager=self._variable_manager,
                loader=self._loader,
                use_handlers=self._use_handlers,
            )
        except AssertionError as e:
            raise AnsibleParserError(
                "A malformed block was encountered while loading a block",
                obj=self._ds,
                orig_exc=e)

    def _load_rescue(self, attr, ds):
        try:
            return load_list_of_tasks(
                ds,
                play=self._play,
                block=self,
                role=self._role,
                task_include=None,
                variable_manager=self._variable_manager,
                loader=self._loader,
                use_handlers=self._use_handlers,
            )
        except AssertionError as e:
            raise AnsibleParserError(
                "A malformed block was encountered while loading rescue.",
                obj=self._ds,
                orig_exc=e)

    def _load_always(self, attr, ds):
        try:
            return load_list_of_tasks(
                ds,
                play=self._play,
                block=self,
                role=self._role,
                task_include=None,
                variable_manager=self._variable_manager,
                loader=self._loader,
                use_handlers=self._use_handlers,
            )
        except AssertionError as e:
            raise AnsibleParserError(
                "A malformed block was encountered while loading always",
                obj=self._ds,
                orig_exc=e)

    def get_dep_chain(self):
        if self._dep_chain is None:
            if self._parent:
                return self._parent.get_dep_chain()
            else:
                return None
        else:
            return self._dep_chain[:]

    def copy(self, exclude_parent=False, exclude_tasks=False):
        def _dupe_task_list(task_list, new_block):
            new_task_list = []
            for task in task_list:
                new_task = task.copy(exclude_parent=True)
                if task._parent:
                    new_task._parent = task._parent.copy(exclude_tasks=True)
                    if task._parent == new_block:
                        # If task._parent is the same as new_block, just replace it
                        new_task._parent = new_block
                    else:
                        # task may not be a direct child of new_block, search for the correct place to insert new_block
                        cur_obj = new_task._parent
                        while cur_obj._parent and cur_obj._parent != new_block:
                            cur_obj = cur_obj._parent

                        cur_obj._parent = new_block
                else:
                    new_task._parent = new_block
                new_task_list.append(new_task)
            return new_task_list

        new_me = super(Block, self).copy()
        new_me._play = self._play
        new_me._use_handlers = self._use_handlers
        new_me._eor = self._eor

        if self._dep_chain is not None:
            new_me._dep_chain = self._dep_chain[:]

        new_me._parent = None
        if self._parent and not exclude_parent:
            new_me._parent = self._parent.copy(exclude_tasks=True)

        if not exclude_tasks:
            new_me.block = _dupe_task_list(self.block or [], new_me)
            new_me.rescue = _dupe_task_list(self.rescue or [], new_me)
            new_me.always = _dupe_task_list(self.always or [], new_me)

        new_me._role = None
        if self._role:
            new_me._role = self._role

        new_me.validate()
        return new_me

    def serialize(self):
        '''
        Override of the default serialize method, since when we're serializing
        a task we don't want to include the attribute list of tasks.
        '''

        data = dict()
        for attr in self._valid_attrs:
            if attr not in ('block', 'rescue', 'always'):
                data[attr] = getattr(self, attr)

        data['dep_chain'] = self.get_dep_chain()
        data['eor'] = self._eor

        if self._role is not None:
            data['role'] = self._role.serialize()
        if self._parent is not None:
            data['parent'] = self._parent.copy(exclude_tasks=True).serialize()
            data['parent_type'] = self._parent.__class__.__name__

        return data

    def deserialize(self, data):
        '''
        Override of the default deserialize method, to match the above overridden
        serialize method
        '''

        # import is here to avoid import loops
        from ansible.playbook.task_include import TaskInclude
        from ansible.playbook.handler_task_include import HandlerTaskInclude

        # we don't want the full set of attributes (the task lists), as that
        # would lead to a serialize/deserialize loop
        for attr in self._valid_attrs:
            if attr in data and attr not in ('block', 'rescue', 'always'):
                setattr(self, attr, data.get(attr))

        self._dep_chain = data.get('dep_chain', None)
        self._eor = data.get('eor', False)

        # if there was a serialized role, unpack it too
        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r

        parent_data = data.get('parent')
        if parent_data:
            parent_type = data.get('parent_type')
            if parent_type == 'Block':
                p = Block()
            elif parent_type == 'TaskInclude':
                p = TaskInclude()
            elif parent_type == 'HandlerTaskInclude':
                p = HandlerTaskInclude()
            p.deserialize(parent_data)
            self._parent = p
            self._dep_chain = self._parent.get_dep_chain()

    def set_loader(self, loader):
        self._loader = loader
        if self._parent:
            self._parent.set_loader(loader)
        elif self._role:
            self._role.set_loader(loader)

        dep_chain = self.get_dep_chain()
        if dep_chain:
            for dep in dep_chain:
                dep.set_loader(loader)

    def _get_parent_attribute(self, attr, extend=False, prepend=False):
        '''
        Generic logic to get the attribute or parent attribute for a block value.
        '''

        extend = self._valid_attrs[attr].extend
        prepend = self._valid_attrs[attr].prepend
        try:
            value = self._attributes[attr]
            # If parent is static, we can grab attrs from the parent
            # otherwise, defer to the grandparent
            if getattr(self._parent, 'statically_loaded', True):
                _parent = self._parent
            else:
                _parent = self._parent._parent

            if _parent and (value is None or extend):
                try:
                    if getattr(_parent, 'statically_loaded', True):
                        if hasattr(_parent, '_get_parent_attribute'):
                            parent_value = _parent._get_parent_attribute(attr)
                        else:
                            parent_value = _parent._attributes.get(attr, None)
                        if extend:
                            value = self._extend_value(value, parent_value,
                                                       prepend)
                        else:
                            value = parent_value
                except AttributeError:
                    pass
            if self._role and (value is None or extend):
                try:
                    if hasattr(self._role, '_get_parent_attribute'):
                        parent_value = self._role.get_parent_attribute(attr)
                    else:
                        parent_value = self._role._attributes.get(attr, None)
                    if extend:
                        value = self._extend_value(value, parent_value,
                                                   prepend)
                    else:
                        value = parent_value

                    dep_chain = self.get_dep_chain()
                    if dep_chain and (value is None or extend):
                        dep_chain.reverse()
                        for dep in dep_chain:
                            if hasattr(dep, '_get_parent_attribute'):
                                dep_value = dep._get_parent_attribute(attr)
                            else:
                                dep_value = dep._attributes.get(attr, None)
                            if extend:
                                value = self._extend_value(
                                    value, dep_value, prepend)
                            else:
                                value = dep_value

                            if value is not None and not extend:
                                break
                except AttributeError:
                    pass
            if self._play and (value is None or extend):
                try:
                    play_value = self._play._attributes.get(attr, None)
                    if play_value is not None:
                        if extend:
                            value = self._extend_value(value, play_value,
                                                       prepend)
                        else:
                            value = play_value
                except AttributeError:
                    pass
        except KeyError:
            pass

        return value

    def filter_tagged_tasks(self, play_context, all_vars):
        '''
        Creates a new block, with task lists filtered based on the tags contained
        within the play_context object.
        '''
        def evaluate_and_append_task(target):
            tmp_list = []
            for task in target:
                if isinstance(task, Block):
                    tmp_list.append(evaluate_block(task))
                elif (task.action == 'meta'
                      or (task.action == 'include' and task.evaluate_tags(
                          [], play_context.skip_tags, all_vars=all_vars))
                      or task.evaluate_tags(play_context.only_tags,
                                            play_context.skip_tags,
                                            all_vars=all_vars)):
                    tmp_list.append(task)
            return tmp_list

        def evaluate_block(block):
            new_block = self.copy(exclude_tasks=True)
            new_block.block = evaluate_and_append_task(block.block)
            new_block.rescue = evaluate_and_append_task(block.rescue)
            new_block.always = evaluate_and_append_task(block.always)
            return new_block

        return evaluate_block(self)

    def has_tasks(self):
        return len(self.block) > 0 or len(self.rescue) > 0 or len(
            self.always) > 0

    def get_include_params(self):
        if self._parent:
            return self._parent.get_include_params()
        else:
            return dict()

    def all_parents_static(self):
        '''
        Determine if all of the parents of this block were statically loaded
        or not. Since Task/TaskInclude objects may be in the chain, they simply
        call their parents all_parents_static() method. Only Block objects in
        the chain check the statically_loaded value of the parent.
        '''
        from ansible.playbook.task_include import TaskInclude
        if self._parent:
            if isinstance(self._parent,
                          TaskInclude) and not self._parent.statically_loaded:
                return False
            return self._parent.all_parents_static()

        return True

    def get_first_parent_include(self):
        from ansible.playbook.task_include import TaskInclude
        if self._parent:
            if isinstance(self._parent, TaskInclude):
                return self._parent
            return self._parent.get_first_parent_include()
        return None
Example #26
0
class Base:

    # connection/transport
    _connection = FieldAttribute(isa='string')
    _port = FieldAttribute(isa='int')
    _remote_user = FieldAttribute(isa='string')

    # variables
    _vars = FieldAttribute(isa='dict', default=dict())

    # flags and misc. settings
    _environment = FieldAttribute(isa='list')
    _no_log = FieldAttribute(isa='bool')

    def __init__(self):

        # initialize the data loader and variable manager, which will be provided
        # later when the object is actually loaded
        self._loader = None
        self._variable_manager = None

        # every object gets a random uuid:
        self._uuid = uuid.uuid4()

        # and initialize the base attributes
        self._initialize_base_attributes()

        try:
            from __main__ import display
            self._display = display
        except ImportError:
            from ansible.utils.display import Display
            self._display = Display()

    # The following three functions are used to programatically define data
    # descriptors (aka properties) for the Attributes of all of the playbook
    # objects (tasks, blocks, plays, etc).
    #
    # The function signature is a little strange because of how we define
    # them.  We use partial to give each method the name of the Attribute that
    # it is for.  Since partial prefills the positional arguments at the
    # beginning of the function we end up with the first positional argument
    # being allocated to the name instead of to the class instance (self) as
    # normal.  To deal with that we make the property name field the first
    # positional argument and self the second arg.
    #
    # Because these methods are defined inside of the class, they get bound to
    # the instance when the object is created.  After we run partial on them
    # and put the result back into the class as a property, they get bound
    # a second time.  This leads to self being  placed in the arguments twice.
    # To work around that, we mark the functions as @staticmethod so that the
    # first binding to the instance doesn't happen.

    @staticmethod
    def _generic_g(prop_name, self):
        method = "_get_attr_%s" % prop_name
        if hasattr(self, method):
            return getattr(self, method)()

        return self._attributes[prop_name]

    @staticmethod
    def _generic_s(prop_name, self, value):
        self._attributes[prop_name] = value

    @staticmethod
    def _generic_d(prop_name, self):
        del self._attributes[prop_name]

    def _get_base_attributes(self):
        '''
        Returns the list of attributes for this class (or any subclass thereof).
        If the attribute name starts with an underscore, it is removed
        '''
        base_attributes = dict()
        for (name, value) in getmembers(self.__class__):
            if isinstance(value, Attribute):
                if name.startswith('_'):
                    name = name[1:]
                base_attributes[name] = value
        return base_attributes

    def _initialize_base_attributes(self):
        # each class knows attributes set upon it, see Task.py for example
        self._attributes = dict()

        for (name, value) in self._get_base_attributes().items():
            getter = partial(self._generic_g, name)
            setter = partial(self._generic_s, name)
            deleter = partial(self._generic_d, name)

            # Place the property into the class so that cls.name is the
            # property functions.
            setattr(Base, name, property(getter, setter, deleter))

            # Place the value into the instance so that the property can
            # process and hold that value/
            setattr(self, name, value.default)

    def preprocess_data(self, ds):
        ''' infrequently used method to do some pre-processing of legacy terms '''

        for base_class in self.__class__.mro():
            method = getattr(
                self, "_preprocess_data_%s" % base_class.__name__.lower(),
                None)
            if method:
                return method(ds)
        return ds

    def load_data(self, ds, variable_manager=None, loader=None):
        ''' walk the input datastructure and assign any values '''

        assert ds is not None

        # the variable manager class is used to manage and merge variables
        # down to a single dictionary for reference in templating, etc.
        self._variable_manager = variable_manager

        # the data loader class is used to parse data from strings and files
        if loader is not None:
            self._loader = loader
        else:
            self._loader = DataLoader()

        # call the preprocess_data() function to massage the data into
        # something we can more easily parse, and then call the validation
        # function on it to ensure there are no incorrect key values
        ds = self.preprocess_data(ds)
        self._validate_attributes(ds)

        # Walk all attributes in the class. We sort them based on their priority
        # so that certain fields can be loaded before others, if they are dependent.
        # FIXME: we currently don't do anything with private attributes but
        #        may later decide to filter them out of 'ds' here.
        base_attributes = self._get_base_attributes()
        for name, attr in sorted(base_attributes.items(),
                                 key=operator.itemgetter(1)):
            # copy the value over unless a _load_field method is defined
            if name in ds:
                method = getattr(self, '_load_%s' % name, None)
                if method:
                    self._attributes[name] = method(name, ds[name])
                else:
                    self._attributes[name] = ds[name]

        # run early, non-critical validation
        self.validate()

        # cache the datastructure internally
        setattr(self, '_ds', ds)

        # return the constructed object
        return self

    def get_ds(self):
        try:
            return getattr(self, '_ds')
        except AttributeError:
            return None

    def get_loader(self):
        return self._loader

    def get_variable_manager(self):
        return self._variable_manager

    def _validate_attributes(self, ds):
        '''
        Ensures that there are no keys in the datastructure which do
        not map to attributes for this object.
        '''

        valid_attrs = frozenset(name for name in self._get_base_attributes())
        for key in ds:
            if key not in valid_attrs:
                raise AnsibleParserError(
                    "'%s' is not a valid attribute for a %s" %
                    (key, self.__class__.__name__),
                    obj=ds)

    def validate(self, all_vars=dict()):
        ''' validation that is done at parse time, not load time '''

        # walk all fields in the object
        for (name, attribute) in iteritems(self._get_base_attributes()):

            # run validator only if present
            method = getattr(self, '_validate_%s' % name, None)
            if method:
                method(attribute, name, getattr(self, name))

    def copy(self):
        '''
        Create a copy of this object and return it.
        '''

        new_me = self.__class__()

        for name in self._get_base_attributes():
            setattr(new_me, name, getattr(self, name))

        new_me._loader = self._loader
        new_me._variable_manager = self._variable_manager

        # if the ds value was set on the object, copy it to the new copy too
        if hasattr(self, '_ds'):
            new_me._ds = self._ds

        return new_me

    def post_validate(self, templar):
        '''
        we can't tell that everything is of the right type until we have
        all the variables.  Run basic types (from isa) as well as
        any _post_validate_<foo> functions.
        '''

        basedir = None
        if self._loader is not None:
            basedir = self._loader.get_basedir()

        # save the omit value for later checking
        omit_value = templar._available_variables.get('omit')

        for (name, attribute) in iteritems(self._get_base_attributes()):

            if getattr(self, name) is None:
                if not attribute.required:
                    continue
                else:
                    raise AnsibleParserError(
                        "the field '%s' is required but was not set" % name)

            try:
                # Run the post-validator if present. These methods are responsible for
                # using the given templar to template the values, if required.
                method = getattr(self, '_post_validate_%s' % name, None)
                if method:
                    value = method(attribute, getattr(self, name), templar)
                else:
                    # if the attribute contains a variable, template it now
                    value = templar.template(getattr(self, name))

                # if this evaluated to the omit value, set the value back to
                # the default specified in the FieldAttribute and move on
                if omit_value is not None and value == omit_value:
                    value = attribute.default
                    continue

                # and make sure the attribute is of the type it should be
                if value is not None:
                    if attribute.isa == 'string':
                        value = unicode(value)
                    elif attribute.isa == 'int':
                        value = int(value)
                    elif attribute.isa == 'float':
                        value = float(value)
                    elif attribute.isa == 'bool':
                        value = boolean(value)
                    elif attribute.isa == 'percent':
                        # special value, which may be an integer or float
                        # with an optional '%' at the end
                        if isinstance(value, string_types) and '%' in value:
                            value = value.replace('%', '')
                        value = float(value)
                    elif attribute.isa == 'list':
                        if value is None:
                            value = []
                        elif not isinstance(value, list):
                            value = [value]
                        if attribute.listof is not None:
                            for item in value:
                                if not isinstance(item, attribute.listof):
                                    raise AnsibleParserError(
                                        "the field '%s' should be a list of %s, but the item '%s' is a %s"
                                        % (name, attribute.listof, item,
                                           type(item)),
                                        obj=self.get_ds())
                                elif attribute.required and attribute.listof == string_types:
                                    if item is None or item.strip() == "":
                                        raise AnsibleParserError(
                                            "the field '%s' is required, and cannot have empty values"
                                            % (name, ),
                                            obj=self.get_ds())
                    elif attribute.isa == 'set':
                        if value is None:
                            value = set()
                        else:
                            if not isinstance(value, (list, set)):
                                value = [value]
                            if not isinstance(value, set):
                                value = set(value)
                    elif attribute.isa == 'dict':
                        if value is None:
                            value = dict()
                        elif not isinstance(value, dict):
                            raise TypeError("%s is not a dictionary" % value)

                # and assign the massaged value back to the attribute field
                setattr(self, name, value)

            except (TypeError, ValueError) as e:
                raise AnsibleParserError(
                    "the field '%s' has an invalid value (%s), and could not be converted to an %s. Error was: %s"
                    % (name, value, attribute.isa, e),
                    obj=self.get_ds())
            except UndefinedError as e:
                if templar._fail_on_undefined_errors and name != 'name':
                    raise AnsibleParserError(
                        "the field '%s' has an invalid value, which appears to include a variable that is undefined. The error was: %s"
                        % (name, e),
                        obj=self.get_ds())

    def serialize(self):
        '''
        Serializes the object derived from the base object into
        a dictionary of values. This only serializes the field
        attributes for the object, so this may need to be overridden
        for any classes which wish to add additional items not stored
        as field attributes.
        '''

        repr = dict()

        for name in self._get_base_attributes():
            repr[name] = getattr(self, name)

        # serialize the uuid field
        repr['uuid'] = getattr(self, '_uuid')

        return repr

    def deserialize(self, data):
        '''
        Given a dictionary of values, load up the field attributes for
        this object. As with serialize(), if there are any non-field
        attribute data members, this method will need to be overridden
        and extended.
        '''

        assert isinstance(data, dict)

        for (name, attribute) in iteritems(self._get_base_attributes()):
            if name in data:
                setattr(self, name, data[name])
            else:
                setattr(self, name, attribute.default)

        # restore the UUID field
        setattr(self, '_uuid', data.get('uuid'))

    def _load_vars(self, attr, ds):
        '''
        Vars in a play can be specified either as a dictionary directly, or
        as a list of dictionaries. If the later, this method will turn the
        list into a single dictionary.
        '''
        def _validate_variable_keys(ds):
            for key in ds:
                if not isidentifier(key):
                    raise TypeError("%s is not a valid variable name" % key)

        try:
            if isinstance(ds, dict):
                _validate_variable_keys(ds)
                return ds
            elif isinstance(ds, list):
                all_vars = dict()
                for item in ds:
                    if not isinstance(item, dict):
                        raise ValueError
                    _validate_variable_keys(item)
                    all_vars = combine_vars(all_vars, item)
                return all_vars
            elif ds is None:
                return {}
            else:
                raise ValueError
        except ValueError:
            raise AnsibleParserError(
                "Vars in a %s must be specified as a dictionary, or a list of dictionaries"
                % self.__class__.__name__,
                obj=ds)
        except TypeError as e:
            raise AnsibleParserError(
                "Invalid variable name in vars specified for %s: %s" %
                (self.__class__.__name__, e),
                obj=ds)

    def _extend_value(self, value, new_value):
        '''
        Will extend the value given with new_value (and will turn both
        into lists if they are not so already). The values are run through
        a set to remove duplicate values.
        '''

        if not isinstance(value, list):
            value = [value]
        if not isinstance(new_value, list):
            new_value = [new_value]

        #return list(set(value + new_value))
        return [i for i, _ in itertools.groupby(value + new_value)]

    def __getstate__(self):
        return self.serialize()

    def __setstate__(self, data):
        self.__init__()
        self.deserialize(data)
Example #27
0
class Task(Base, Conditional, Taggable, Become):
    """
    A task is a language feature that represents a call to a module, with given arguments and other parameters.
    A handler is a subclass of a task.

    Usage:

       Task.load(datastructure) -> Task
       Task.something(...)
    """

    # =================================================================================
    # ATTRIBUTES
    # load_<attribute_name> and
    # validate_<attribute_name>
    # will be used if defined
    # might be possible to define others

    _args = FieldAttribute(isa='dict', default=dict())
    _action = FieldAttribute(isa='string')

    _always_run = FieldAttribute(isa='bool')
    _any_errors_fatal = FieldAttribute(isa='bool')
    _async = FieldAttribute(isa='int', default=0)
    _changed_when = FieldAttribute(isa='string')
    _delay = FieldAttribute(isa='int', default=5)
    _delegate_to = FieldAttribute(isa='string')
    _failed_when = FieldAttribute(isa='string')
    _first_available_file = FieldAttribute(isa='list')
    _ignore_errors = FieldAttribute(isa='bool')

    _loop = FieldAttribute(isa='string', private=True)
    _loop_args = FieldAttribute(isa='list', private=True)
    _local_action = FieldAttribute(isa='string')

    # FIXME: this should not be a Task
    _meta = FieldAttribute(isa='string')

    _name = FieldAttribute(isa='string', default='')

    _notify = FieldAttribute(isa='list')
    _poll = FieldAttribute(isa='int')
    _register = FieldAttribute(isa='string')
    _retries = FieldAttribute(isa='int', default=1)
    _run_once = FieldAttribute(isa='bool')
    _until = FieldAttribute(isa='list')  # ?

    def __init__(self, block=None, role=None, task_include=None):
        ''' constructors a task, without the Task.load classmethod, it will be pretty blank '''

        self._block = block
        self._role = role
        self._task_include = task_include

        super(Task, self).__init__()

    def get_name(self):
        ''' return the name of the task '''

        if self._role and self.name:
            return "%s : %s" % (self._role.get_name(), self.name)
        elif self.name:
            return self.name
        else:
            flattened_args = self._merge_kv(self.args)
            if self._role:
                return "%s : %s %s" % (self._role.get_name(), self.action,
                                       flattened_args)
            else:
                return "%s %s" % (self.action, flattened_args)

    def _merge_kv(self, ds):
        if ds is None:
            return ""
        elif isinstance(ds, basestring):
            return ds
        elif isinstance(ds, dict):
            buf = ""
            for (k, v) in ds.iteritems():
                if k.startswith('_'):
                    continue
                buf = buf + "%s=%s " % (k, v)
            buf = buf.strip()
            return buf

    @staticmethod
    def load(data,
             block=None,
             role=None,
             task_include=None,
             variable_manager=None,
             loader=None):
        t = Task(block=block, role=role, task_include=task_include)
        return t.load_data(data,
                           variable_manager=variable_manager,
                           loader=loader)

    def __repr__(self):
        ''' returns a human readable representation of the task '''
        return "TASK: %s" % self.get_name()

    def _preprocess_loop(self, ds, new_ds, k, v):
        ''' take a lookup plugin name and store it correctly '''

        loop_name = k.replace("with_", "")
        if new_ds.get('loop') is not None:
            raise AnsibleError("duplicate loop in task: %s" % loop_name,
                               obj=ds)
        if v is None:
            raise AnsibleError("you must specify a value when using %s" % k,
                               obj=ds)
        new_ds['loop'] = loop_name
        new_ds['loop_args'] = v

    def preprocess_data(self, ds):
        '''
        tasks are especially complex arguments so need pre-processing.
        keep it short.
        '''

        assert isinstance(ds, dict)

        # the new, cleaned datastructure, which will have legacy
        # items reduced to a standard structure suitable for the
        # attributes of the task class
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.ansible_pos = ds.ansible_pos

        # use the args parsing class to determine the action, args,
        # and the delegate_to value from the various possible forms
        # supported as legacy
        args_parser = ModuleArgsParser(task_ds=ds)
        (action, args, delegate_to) = args_parser.parse()

        new_ds['action'] = action
        new_ds['args'] = args
        new_ds['delegate_to'] = delegate_to

        for (k, v) in ds.iteritems():
            if k in ('action', 'local_action', 'args',
                     'delegate_to') or k == action or k == 'shell':
                # we don't want to re-assign these values, which were
                # determined by the ModuleArgsParser() above
                continue
            elif k.replace("with_", "") in lookup_loader:
                self._preprocess_loop(ds, new_ds, k, v)
            else:
                new_ds[k] = v

        return super(Task, self).preprocess_data(new_ds)

    def post_validate(self, templar):
        '''
        Override of base class post_validate, to also do final validation on
        the block and task include (if any) to which this task belongs.
        '''

        if self._block:
            self._block.post_validate(templar)
        if self._task_include:
            self._task_include.post_validate(templar)

        super(Task, self).post_validate(templar)

    def get_vars(self):
        all_vars = self.vars.copy()
        if self._block:
            all_vars.update(self._block.get_vars())
        if self._task_include:
            all_vars.update(self._task_include.get_vars())

        if isinstance(self.args, dict):
            all_vars.update(self.args)

        if 'tags' in all_vars:
            del all_vars['tags']
        if 'when' in all_vars:
            del all_vars['when']
        return all_vars

    def copy(self, exclude_block=False):
        new_me = super(Task, self).copy()

        new_me._block = None
        if self._block and not exclude_block:
            new_me._block = self._block.copy()

        new_me._role = None
        if self._role:
            new_me._role = self._role

        new_me._task_include = None
        if self._task_include:
            new_me._task_include = self._task_include.copy()

        return new_me

    def serialize(self):
        data = super(Task, self).serialize()

        if self._block:
            data['block'] = self._block.serialize()

        if self._role:
            data['role'] = self._role.serialize()

        if self._task_include:
            data['task_include'] = self._task_include.serialize()

        return data

    def deserialize(self, data):

        # import is here to avoid import loops
        #from ansible.playbook.task_include import TaskInclude

        block_data = data.get('block')

        if block_data:
            b = Block()
            b.deserialize(block_data)
            self._block = b
            del data['block']

        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r
            del data['role']

        ti_data = data.get('task_include')
        if ti_data:
            #ti = TaskInclude()
            ti = Task()
            ti.deserialize(ti_data)
            self._task_include = ti
            del data['task_include']

        super(Task, self).deserialize(data)

    def evaluate_conditional(self, templar, all_vars):
        if self._block is not None:
            if not self._block.evaluate_conditional(templar, all_vars):
                return False
        if self._task_include is not None:
            if not self._task_include.evaluate_conditional(templar, all_vars):
                return False
        return super(Task, self).evaluate_conditional(templar, all_vars)

    def set_loader(self, loader):
        '''
        Sets the loader on this object and recursively on parent, child objects.
        This is used primarily after the Task has been serialized/deserialized, which
        does not preserve the loader.
        '''

        self._loader = loader

        if self._block:
            self._block.set_loader(loader)
        if self._task_include:
            self._task_include.set_loader(loader)

    def _get_parent_attribute(self, attr, extend=False):
        '''
        Generic logic to get the attribute or parent attribute for a task value.
        '''
        value = self._attributes[attr]
        if self._block and (value is None or extend):
            parent_value = getattr(self._block, attr)
            if extend:
                value = self._extend_value(value, parent_value)
            else:
                value = parent_value
        if self._task_include and (value is None or extend):
            parent_value = getattr(self._task_include, attr)
            if extend:
                value = self._extend_value(value, parent_value)
            else:
                value = parent_value
        return value
Example #28
0
class Play(Base, Taggable, Become):
    """
    A play is a language feature that represents a list of roles and/or
    task/handler blocks to execute on a given set of hosts.

    Usage:

       Play.load(datastructure) -> Play
       Play.something(...)
    """

    # =================================================================================
    # Connection-Related Attributes

    # TODO: generalize connection
    _accelerate = FieldAttribute(isa='bool', default=False)
    _accelerate_ipv6 = FieldAttribute(isa='bool', default=False)
    _accelerate_port = FieldAttribute(isa='int',
                                      default=5099)  # should be alias of port

    # Connection
    _gather_facts = FieldAttribute(isa='string', default='smart')
    _hosts = FieldAttribute(isa='list', default=[], required=True)
    _name = FieldAttribute(isa='string', default='<no name specified>')

    # Variable Attributes
    _vars_files = FieldAttribute(isa='list', default=[])
    _vars_prompt = FieldAttribute(isa='dict', default=dict())
    _vault_password = FieldAttribute(isa='string')

    # Block (Task) Lists Attributes
    _handlers = FieldAttribute(isa='list', default=[])
    _pre_tasks = FieldAttribute(isa='list', default=[])
    _post_tasks = FieldAttribute(isa='list', default=[])
    _tasks = FieldAttribute(isa='list', default=[])

    # Role Attributes
    _roles = FieldAttribute(isa='list', default=[])

    # Flag/Setting Attributes
    _any_errors_fatal = FieldAttribute(isa='bool', default=False)
    _max_fail_percentage = FieldAttribute(isa='string', default='0')
    _serial = FieldAttribute(isa='int', default=0)
    _strategy = FieldAttribute(isa='string', default='linear')

    # =================================================================================

    def __init__(self):
        super(Play, self).__init__()

    def __repr__(self):
        return self.get_name()

    def get_name(self):
        ''' return the name of the Play '''
        return "PLAY: %s" % self._attributes.get('name')

    @staticmethod
    def load(data, variable_manager=None, loader=None):
        p = Play()
        return p.load_data(data,
                           variable_manager=variable_manager,
                           loader=loader)

    def preprocess_data(self, ds):
        '''
        Adjusts play datastructure to cleanup old/legacy items
        '''

        assert isinstance(ds, dict)

        # The use of 'user' in the Play datastructure was deprecated to
        # line up with the same change for Tasks, due to the fact that
        # 'user' conflicted with the user module.
        if 'user' in ds:
            # this should never happen, but error out with a helpful message
            # to the user if it does...
            if 'remote_user' in ds:
                raise AnsibleParserError(
                    "both 'user' and 'remote_user' are set for %s. The use of 'user' is deprecated, and should be removed"
                    % self.get_name(),
                    obj=ds)

            ds['remote_user'] = ds['user']
            del ds['user']

        return super(Play, self).preprocess_data(ds)

    def _load_vars(self, attr, ds):
        '''
        Vars in a play can be specified either as a dictionary directly, or
        as a list of dictionaries. If the later, this method will turn the
        list into a single dictionary.
        '''

        try:
            if isinstance(ds, dict):
                return ds
            elif isinstance(ds, list):
                all_vars = dict()
                for item in ds:
                    if not isinstance(item, dict):
                        raise ValueError
                    all_vars = combine_vars(all_vars, item)
                return all_vars
            else:
                raise ValueError
        except ValueError:
            raise AnsibleParserError(
                "Vars in a playbook must be specified as a dictionary, or a list of dictionaries",
                obj=ds)

    def _load_tasks(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed tasks/blocks.
        Bare tasks outside of a block are given an implicit block.
        '''
        return load_list_of_blocks(ds=ds,
                                   play=self,
                                   variable_manager=self._variable_manager,
                                   loader=self._loader)

    def _load_pre_tasks(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed tasks/blocks.
        Bare tasks outside of a block are given an implicit block.
        '''
        return load_list_of_blocks(ds=ds,
                                   play=self,
                                   variable_manager=self._variable_manager,
                                   loader=self._loader)

    def _load_post_tasks(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed tasks/blocks.
        Bare tasks outside of a block are given an implicit block.
        '''
        return load_list_of_blocks(ds=ds,
                                   play=self,
                                   variable_manager=self._variable_manager,
                                   loader=self._loader)

    def _load_handlers(self, attr, ds):
        '''
        Loads a list of blocks from a list which may be mixed handlers/blocks.
        Bare handlers outside of a block are given an implicit block.
        '''
        return load_list_of_blocks(ds=ds,
                                   play=self,
                                   use_handlers=True,
                                   variable_manager=self._variable_manager,
                                   loader=self._loader)

    def _load_roles(self, attr, ds):
        '''
        Loads and returns a list of RoleInclude objects from the datastructure
        list of role definitions and creates the Role from those objects
        '''

        role_includes = load_list_of_roles(
            ds, variable_manager=self._variable_manager, loader=self._loader)

        roles = []
        for ri in role_includes:
            roles.append(Role.load(ri))
        return roles

    # FIXME: post_validation needs to ensure that become/su/sudo have only 1 set

    def _compile_roles(self):
        '''
        Handles the role compilation step, returning a flat list of tasks
        with the lowest level dependencies first. For example, if a role R
        has a dependency D1, which also has a dependency D2, the tasks from
        D2 are merged first, followed by D1, and lastly by the tasks from
        the parent role R last. This is done for all roles in the Play.
        '''

        block_list = []

        if len(self.roles) > 0:
            for r in self.roles:
                block_list.extend(r.compile(play=self))

        return block_list

    def compile(self):
        '''
        Compiles and returns the task list for this play, compiled from the
        roles (which are themselves compiled recursively) and/or the list of
        tasks specified in the play.
        '''

        block_list = []

        block_list.extend(self.pre_tasks)
        block_list.extend(self._compile_roles())
        block_list.extend(self.tasks)
        block_list.extend(self.post_tasks)

        return block_list

    def get_vars(self):
        return self.vars.copy()

    def get_vars_files(self):
        return self.vars_files

    def get_handlers(self):
        return self.handlers[:]

    def get_roles(self):
        return self.roles[:]

    def get_tasks(self):
        tasklist = []
        for task in self.pre_tasks + self.tasks + self.post_tasks:
            if isinstance(task, Block):
                tasklist.append(task.block + task.rescue + task.always)
            else:
                tasklist.append(task)
        return tasklist

    def serialize(self):
        data = super(Play, self).serialize()

        roles = []
        for role in self.get_roles():
            roles.append(role.serialize())
        data['roles'] = roles

        return data

    def deserialize(self, data):
        super(Play, self).deserialize(data)

        if 'roles' in data:
            role_data = data.get('roles', [])
            roles = []
            for role in role_data:
                r = Role()
                r.deserialize(role)
                roles.append(r)

            setattr(self, 'roles', roles)
            del data['roles']
Example #29
0
class Task(Base, Conditional, Taggable, CollectionSearch):
    """
    A task is a language feature that represents a call to a module, with given arguments and other parameters.
    A handler is a subclass of a task.

    Usage:

       Task.load(datastructure) -> Task
       Task.something(...)
    """

    # =================================================================================
    # ATTRIBUTES
    # load_<attribute_name> and
    # validate_<attribute_name>
    # will be used if defined
    # might be possible to define others

    # NOTE: ONLY set defaults on task attributes that are not inheritable,
    # inheritance is only triggered if the 'current value' is Sentinel,
    # default can be set at play/top level object and inheritance will take it's course.

    args = FieldAttribute(isa='dict', default=dict)
    action = FieldAttribute(isa='string')

    async_val = FieldAttribute(isa='int', default=0, alias='async')
    changed_when = FieldAttribute(isa='list', default=list)
    delay = FieldAttribute(isa='int', default=5)
    delegate_to = FieldAttribute(isa='string')
    delegate_facts = FieldAttribute(isa='bool')
    failed_when = FieldAttribute(isa='list', default=list)
    loop = FieldAttribute()
    loop_control = NonInheritableFieldAttribute(isa='class',
                                                class_type=LoopControl)
    notify = FieldAttribute(isa='list')
    poll = FieldAttribute(isa='int', default=C.DEFAULT_POLL_INTERVAL)
    register = FieldAttribute(isa='string', static=True)
    retries = FieldAttribute(isa='int', default=3)
    until = FieldAttribute(isa='list', default=list)

    # deprecated, used to be loop and loop_args but loop has been repurposed
    loop_with = NonInheritableFieldAttribute(isa='string', private=True)

    def __init__(self, block=None, role=None, task_include=None):
        ''' constructors a task, without the Task.load classmethod, it will be pretty blank '''

        self._role = role
        self._parent = None
        self.implicit = False
        self.resolved_action = None

        if task_include:
            self._parent = task_include
        else:
            self._parent = block

        super(Task, self).__init__()

    def get_name(self, include_role_fqcn=True):
        ''' return the name of the task '''

        if self._role:
            role_name = self._role.get_name(
                include_role_fqcn=include_role_fqcn)

        if self._role and self.name:
            return "%s : %s" % (role_name, self.name)
        elif self.name:
            return self.name
        else:
            if self._role:
                return "%s : %s" % (role_name, self.action)
            else:
                return "%s" % (self.action, )

    def _merge_kv(self, ds):
        if ds is None:
            return ""
        elif isinstance(ds, string_types):
            return ds
        elif isinstance(ds, dict):
            buf = ""
            for (k, v) in ds.items():
                if k.startswith('_'):
                    continue
                buf = buf + "%s=%s " % (k, v)
            buf = buf.strip()
            return buf

    @staticmethod
    def load(data,
             block=None,
             role=None,
             task_include=None,
             variable_manager=None,
             loader=None):
        t = Task(block=block, role=role, task_include=task_include)
        return t.load_data(data,
                           variable_manager=variable_manager,
                           loader=loader)

    def __repr__(self):
        ''' returns a human readable representation of the task '''
        if self.get_name() in C._ACTION_META:
            return "TASK: meta (%s)" % self.args['_raw_params']
        else:
            return "TASK: %s" % self.get_name()

    def _preprocess_with_loop(self, ds, new_ds, k, v):
        ''' take a lookup plugin name and store it correctly '''

        loop_name = k.removeprefix("with_")
        if new_ds.get('loop') is not None or new_ds.get(
                'loop_with') is not None:
            raise AnsibleError("duplicate loop in task: %s" % loop_name,
                               obj=ds)
        if v is None:
            raise AnsibleError("you must specify a value when using %s" % k,
                               obj=ds)
        new_ds['loop_with'] = loop_name
        new_ds['loop'] = v
        # display.deprecated("with_ type loops are being phased out, use the 'loop' keyword instead",
        #                    version="2.10", collection_name='ansible.builtin')

    def preprocess_data(self, ds):
        '''
        tasks are especially complex arguments so need pre-processing.
        keep it short.
        '''

        if not isinstance(ds, dict):
            raise AnsibleAssertionError(
                'ds (%s) should be a dict but was a %s' % (ds, type(ds)))

        # the new, cleaned datastructure, which will have legacy
        # items reduced to a standard structure suitable for the
        # attributes of the task class
        new_ds = AnsibleMapping()
        if isinstance(ds, AnsibleBaseYAMLObject):
            new_ds.ansible_pos = ds.ansible_pos

        # since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator
        default_collection = AnsibleCollectionConfig.default_collection

        collections_list = ds.get('collections')
        if collections_list is None:
            # use the parent value if our ds doesn't define it
            collections_list = self.collections
        else:
            # Validate this untemplated field early on to guarantee we are dealing with a list.
            # This is also done in CollectionSearch._load_collections() but this runs before that call.
            collections_list = self.get_validated_value(
                'collections', self.fattributes.get('collections'),
                collections_list, None)

        if default_collection and not self._role:  # FIXME: and not a collections role
            if collections_list:
                if default_collection not in collections_list:
                    collections_list.insert(0, default_collection)
            else:
                collections_list = [default_collection]

        if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list:
            collections_list.append('ansible.legacy')

        if collections_list:
            ds['collections'] = collections_list

        # use the args parsing class to determine the action, args,
        # and the delegate_to value from the various possible forms
        # supported as legacy
        args_parser = ModuleArgsParser(task_ds=ds,
                                       collection_list=collections_list)
        try:
            (action, args, delegate_to) = args_parser.parse()
        except AnsibleParserError as e:
            # if the raises exception was created with obj=ds args, then it includes the detail
            # so we dont need to add it so we can just re raise.
            if e.obj:
                raise
            # But if it wasn't, we can add the yaml object now to get more detail
            raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e)
        else:
            self.resolved_action = args_parser.resolved_action

        # the command/shell/script modules used to support the `cmd` arg,
        # which corresponds to what we now call _raw_params, so move that
        # value over to _raw_params (assuming it is empty)
        if action in C._ACTION_HAS_CMD:
            if 'cmd' in args:
                if args.get('_raw_params', '') != '':
                    raise AnsibleError(
                        "The 'cmd' argument cannot be used when other raw parameters are specified."
                        " Please put everything in one or the other place.",
                        obj=ds)
                args['_raw_params'] = args.pop('cmd')

        new_ds['action'] = action
        new_ds['args'] = args
        new_ds['delegate_to'] = delegate_to

        # we handle any 'vars' specified in the ds here, as we may
        # be adding things to them below (special handling for includes).
        # When that deprecated feature is removed, this can be too.
        if 'vars' in ds:
            # _load_vars is defined in Base, and is used to load a dictionary
            # or list of dictionaries in a standard way
            new_ds['vars'] = self._load_vars(None, ds.get('vars'))
        else:
            new_ds['vars'] = dict()

        for (k, v) in ds.items():
            if k in ('action', 'local_action', 'args',
                     'delegate_to') or k == action or k == 'shell':
                # we don't want to re-assign these values, which were determined by the ModuleArgsParser() above
                continue
            elif k.startswith('with_') and k.removeprefix(
                    "with_") in lookup_loader:
                # transform into loop property
                self._preprocess_with_loop(ds, new_ds, k, v)
            elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs:
                new_ds[k] = v
            else:
                display.warning("Ignoring invalid attribute: %s" % k)

        return super(Task, self).preprocess_data(new_ds)

    def _load_loop_control(self, attr, ds):
        if not isinstance(ds, dict):
            raise AnsibleParserError(
                "the `loop_control` value must be specified as a dictionary and cannot "
                "be a variable itself (though it can contain variables)",
                obj=ds,
            )

        return LoopControl.load(data=ds,
                                variable_manager=self._variable_manager,
                                loader=self._loader)

    def _validate_attributes(self, ds):
        try:
            super(Task, self)._validate_attributes(ds)
        except AnsibleParserError as e:
            e.message += '\nThis error can be suppressed as a warning using the "invalid_task_attribute_failed" configuration'
            raise e

    def _validate_changed_when(self, attr, name, value):
        if not isinstance(value, list):
            setattr(self, name, [value])

    def _validate_failed_when(self, attr, name, value):
        if not isinstance(value, list):
            setattr(self, name, [value])

    def post_validate(self, templar):
        '''
        Override of base class post_validate, to also do final validation on
        the block and task include (if any) to which this task belongs.
        '''

        if self._parent:
            self._parent.post_validate(templar)

        if AnsibleCollectionConfig.default_collection:
            pass

        super(Task, self).post_validate(templar)

    def _post_validate_loop(self, attr, value, templar):
        '''
        Override post validation for the loop field, which is templated
        specially in the TaskExecutor class when evaluating loops.
        '''
        return value

    def _post_validate_environment(self, attr, value, templar):
        '''
        Override post validation of vars on the play, as we don't want to
        template these too early.
        '''
        env = {}
        if value is not None:

            def _parse_env_kv(k, v):
                try:
                    env[k] = templar.template(v, convert_bare=False)
                except AnsibleUndefinedVariable as e:
                    error = to_native(e)
                    if self.action in C._ACTION_FACT_GATHERING and 'ansible_facts.env' in error or 'ansible_env' in error:
                        # ignore as fact gathering is required for 'env' facts
                        return
                    raise

            if isinstance(value, list):
                for env_item in value:
                    if isinstance(env_item, dict):
                        for k in env_item:
                            _parse_env_kv(k, env_item[k])
                    else:
                        isdict = templar.template(env_item, convert_bare=False)
                        if isinstance(isdict, dict):
                            env |= isdict
                        else:
                            display.warning(
                                "could not parse environment value, skipping: %s"
                                % value)

            elif isinstance(value, dict):
                # should not really happen
                env = dict()
                for env_item in value:
                    _parse_env_kv(env_item, value[env_item])
            else:
                # at this point it should be a simple string, also should not happen
                env = templar.template(value, convert_bare=False)

        return env

    def _post_validate_changed_when(self, attr, value, templar):
        '''
        changed_when is evaluated after the execution of the task is complete,
        and should not be templated during the regular post_validate step.
        '''
        return value

    def _post_validate_failed_when(self, attr, value, templar):
        '''
        failed_when is evaluated after the execution of the task is complete,
        and should not be templated during the regular post_validate step.
        '''
        return value

    def _post_validate_until(self, attr, value, templar):
        '''
        until is evaluated after the execution of the task is complete,
        and should not be templated during the regular post_validate step.
        '''
        return value

    def get_vars(self):
        all_vars = dict()
        if self._parent:
            all_vars |= self._parent.get_vars()

        all_vars |= self.vars

        if 'tags' in all_vars:
            del all_vars['tags']
        if 'when' in all_vars:
            del all_vars['when']

        return all_vars

    def get_include_params(self):
        all_vars = dict()
        if self._parent:
            all_vars |= self._parent.get_include_params()
        if self.action in C._ACTION_ALL_INCLUDES:
            all_vars |= self.vars
        return all_vars

    def copy(self, exclude_parent=False, exclude_tasks=False):
        new_me = super(Task, self).copy()

        new_me._parent = None
        if self._parent and not exclude_parent:
            new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)

        new_me._role = None
        if self._role:
            new_me._role = self._role

        new_me.implicit = self.implicit
        new_me.resolved_action = self.resolved_action
        new_me._uuid = self._uuid

        return new_me

    def serialize(self):
        data = super(Task, self).serialize()

        if not self._squashed and not self._finalized:
            if self._parent:
                data['parent'] = self._parent.serialize()
                data['parent_type'] = self._parent.__class__.__name__

            if self._role:
                data['role'] = self._role.serialize()

            data['implicit'] = self.implicit
            data['resolved_action'] = self.resolved_action

        return data

    def deserialize(self, data):

        # import is here to avoid import loops
        from ansible.playbook.task_include import TaskInclude
        from ansible.playbook.handler_task_include import HandlerTaskInclude

        parent_data = data.get('parent', None)
        if parent_data:
            parent_type = data.get('parent_type')
            if parent_type == 'Block':
                p = Block()
            elif parent_type == 'TaskInclude':
                p = TaskInclude()
            elif parent_type == 'HandlerTaskInclude':
                p = HandlerTaskInclude()
            p.deserialize(parent_data)
            self._parent = p
            del data['parent']

        role_data = data.get('role')
        if role_data:
            r = Role()
            r.deserialize(role_data)
            self._role = r
            del data['role']

        self.implicit = data.get('implicit', False)
        self.resolved_action = data.get('resolved_action')

        super(Task, self).deserialize(data)

    def set_loader(self, loader):
        '''
        Sets the loader on this object and recursively on parent, child objects.
        This is used primarily after the Task has been serialized/deserialized, which
        does not preserve the loader.
        '''

        self._loader = loader

        if self._parent:
            self._parent.set_loader(loader)

    def _get_parent_attribute(self, attr, extend=False, prepend=False):
        '''
        Generic logic to get the attribute or parent attribute for a task value.
        '''
        extend = self.fattributes.get(attr).extend
        prepend = self.fattributes.get(attr).prepend
        try:
            value = getattr(self, f'_{attr}', Sentinel)
            # If parent is static, we can grab attrs from the parent
            # otherwise, defer to the grandparent
            if getattr(self._parent, 'statically_loaded', True):
                _parent = self._parent
            else:
                _parent = self._parent._parent

            if _parent and (value is Sentinel or extend):
                if getattr(_parent, 'statically_loaded', True):
                    # vars are always inheritable, other attributes might not be for the parent but still should be for other ancestors
                    if attr != 'vars' and hasattr(_parent,
                                                  '_get_parent_attribute'):
                        parent_value = _parent._get_parent_attribute(attr)
                    else:
                        parent_value = getattr(_parent, f'_{attr}', Sentinel)

                    if extend:
                        value = self._extend_value(value, parent_value,
                                                   prepend)
                    else:
                        value = parent_value
        except KeyError:
            pass

        return value

    def all_parents_static(self):
        if self._parent:
            return self._parent.all_parents_static()
        return True

    def get_first_parent_include(self):
        from ansible.playbook.task_include import TaskInclude
        if self._parent:
            if isinstance(self._parent, TaskInclude):
                return self._parent
            return self._parent.get_first_parent_include()
        return None
Example #30
0
class Become:

    # Privlege escalation
    _become              = FieldAttribute(isa='bool')
    _become_method       = FieldAttribute(isa='string')
    _become_user         = FieldAttribute(isa='string')

    def __init__(self):
        return super(Become, self).__init__()

    def _detect_privilege_escalation_conflict(self, ds):

        # Fail out if user specifies conflicting privilege escalations
        has_become = 'become' in ds or 'become_user'in ds
        has_sudo   = 'sudo' in ds or 'sudo_user' in ds
        has_su     = 'su' in ds or 'su_user' in ds

        if has_become:
            msg = 'The become params ("become", "become_user") and'
            if has_sudo:
                raise AnsibleParserError('%s sudo params ("sudo", "sudo_user") cannot be used together' % msg)
            elif has_su:
                raise AnsibleParserError('%s su params ("su", "su_user") cannot be used together' % msg)
        elif has_sudo and has_su:
            raise AnsibleParserError('sudo params ("sudo", "sudo_user") and su params ("su", "su_user") cannot be used together')

    def _preprocess_data_become(self, ds):
        """Preprocess the playbook data for become attributes

        This is called from the Base object's preprocess_data() method which
        in turn is called pretty much anytime any sort of playbook object
        (plays, tasks, blocks, etc) are created.
        """

        self._detect_privilege_escalation_conflict(ds)

        # Privilege escalation, backwards compatibility for sudo/su
        if 'sudo' in ds or 'sudo_user' in ds:
            ds['become_method'] = 'sudo'
            if 'sudo' in ds:
                ds['become'] = ds['sudo']
                del ds['sudo']

            if 'sudo_user' in ds:
                ds['become_user'] = ds['sudo_user']
                del ds['sudo_user']

            display.deprecated("Instead of sudo/sudo_user, use become/become_user and make sure become_method is 'sudo' (default)")

        elif 'su' in ds or 'su_user' in ds:
            ds['become_method'] = 'su'
            if 'su' in ds:
                ds['become'] = ds['su']
                del ds['su']

            if 'su_user' in ds:
                ds['become_user'] = ds['su_user']
                del ds['su_user']

            display.deprecated("Instead of su/su_user, use become/become_user and set become_method to 'su' (default is sudo)")


        return ds

    def set_become_defaults(self, become, become_method, become_user):
        ''' if we are becoming someone else, but some fields are unset,
            make sure they're initialized to the default config values  '''
        if become:
            if become_method is None:
                become_method = C.DEFAULT_BECOME_METHOD
            if become_user is None:
                become_user = C.DEFAULT_BECOME_USER

    def _get_attr_become(self):
        '''
        Override for the 'become' getattr fetcher, used from Base.
        '''
        if hasattr(self, '_get_parent_attribute'):
            return self._get_parent_attribute('become')
        else:
            return self._attributes['become']

    def _get_attr_become_method(self):
        '''
        Override for the 'become_method' getattr fetcher, used from Base.
        '''
        if hasattr(self, '_get_parent_attribute'):
            return self._get_parent_attribute('become_method')
        else:
            return self._attributes['become_method']

    def _get_attr_become_user(self):
        '''
        Override for the 'become_user' getattr fetcher, used from Base.
        '''
        if hasattr(self, '_get_parent_attribute'):
            return self._get_parent_attribute('become_user')
        else:
            return self._attributes['become_user']