Пример #1
0
class ActionBase(with_metaclass(ABCMeta, object)):
    '''
    This class is the base class for all action plugins, and defines
    code common to all actions. The base class handles the connection
    by putting/getting files and executing commands based on the current
    action in use.
    '''
    def __init__(self, task, connection, play_context, loader, templar,
                 shared_loader_obj):
        self._task = task
        self._connection = connection
        self._play_context = play_context
        self._loader = loader
        self._templar = templar
        self._shared_loader_obj = shared_loader_obj
        # Backwards compat: self._display isn't really needed, just import the global display and use that.
        self._display = display
        self._cleanup_remote_tmp = False

        self._supports_check_mode = True
        self._supports_async = False

    @abstractmethod
    def run(self, tmp=None, task_vars=None):
        """ Action Plugins should implement this method to perform their
        tasks.  Everything else in this base class is a helper method for the
        action plugin to do that.

        :kwarg tmp: Temporary directory.  Sometimes an action plugin sets up
            a temporary directory and then calls another module.  This parameter
            allows us to reuse the same directory for both.
        :kwarg task_vars: The variables (host vars, group vars, config vars,
            etc) associated with this task.
        :returns: dictionary of results from the module

        Implementors of action modules may find the following variables especially useful:

        * Module parameters.  These are stored in self._task.args
        """

        result = {}

        if self._task.async_val and not self._supports_async:
            raise AnsibleActionFail('async is not supported for this task.')
        elif self._play_context.check_mode and not self._supports_check_mode:
            raise AnsibleActionSkip(
                'check mode is not supported for this task.')
        elif self._task.async_val and self._play_context.check_mode:
            raise AnsibleActionFail(
                'check mode and async cannot be used on same task.')

        return result

    def _remote_file_exists(self, path):
        cmd = self._connection._shell.exists(path)
        result = self._low_level_execute_command(cmd=cmd, sudoable=True)
        if result['rc'] == 0:
            return True
        return False

    def _configure_module(self, module_name, module_args, task_vars=None):
        '''
        Handles the loading and templating of the module code through the
        modify_module() function.
        '''
        if task_vars is None:
            task_vars = dict()

        # Search module path(s) for named module.
        for mod_type in self._connection.module_implementation_preferences:
            # Check to determine if PowerShell modules are supported, and apply
            # some fixes (hacks) to module name + args.
            if mod_type == '.ps1':
                # win_stat, win_file, and win_copy are not just like their
                # python counterparts but they are compatible enough for our
                # internal usage
                if module_name in ('stat', 'file', 'copy'
                                   ) and self._task.action != module_name:
                    module_name = 'win_%s' % module_name

                # Remove extra quotes surrounding path parameters before sending to module.
                if module_name in ('win_stat', 'win_file', 'win_copy',
                                   'slurp') and module_args and hasattr(
                                       self._connection._shell, '_unquote'):
                    for key in ('src', 'dest', 'path'):
                        if key in module_args:
                            module_args[
                                key] = self._connection._shell._unquote(
                                    module_args[key])

            module_path = self._shared_loader_obj.module_loader.find_plugin(
                module_name, mod_type)
            if module_path:
                break
        else:  # This is a for-else: http://bit.ly/1ElPkyg
            # Use Windows version of ping module to check module paths when
            # using a connection that supports .ps1 suffixes. We check specifically
            # for win_ping here, otherwise the code would look for ping.ps1
            if '.ps1' in self._connection.module_implementation_preferences:
                ping_module = 'win_ping'
            else:
                ping_module = 'ping'
            module_path2 = self._shared_loader_obj.module_loader.find_plugin(
                ping_module,
                self._connection.module_implementation_preferences)
            if module_path2 is not None:
                raise AnsibleError(
                    "The module %s was not found in configured module paths" %
                    (module_name))
            else:
                raise AnsibleError(
                    "The module %s was not found in configured module paths. "
                    "Additionally, core modules are missing. If this is a checkout, "
                    "run 'git pull --rebase' to correct this problem." %
                    (module_name))

        # insert shared code and arguments into the module
        final_environment = dict()
        self._compute_environment_string(final_environment)

        (module_data, module_style, module_shebang) = modify_module(
            module_name,
            module_path,
            module_args,
            task_vars=task_vars,
            templar=self._templar,
            module_compression=self._play_context.module_compression,
            async_timeout=self._task.async_val,
            become=self._play_context.become,
            become_method=self._play_context.become_method,
            become_user=self._play_context.become_user,
            become_password=self._play_context.become_pass,
            environment=final_environment)

        return (module_style, module_shebang, module_data, module_path)

    def _compute_environment_string(self, raw_environment_out=None):
        '''
        Builds the environment string to be used when executing the remote task.
        '''

        final_environment = dict()
        if self._task.environment is not None:
            environments = self._task.environment
            if not isinstance(environments, list):
                environments = [environments]

            # The order of environments matters to make sure we merge
            # in the parent's values first so those in the block then
            # task 'win' in precedence
            for environment in environments:
                if environment is None or len(environment) == 0:
                    continue
                temp_environment = self._templar.template(environment)
                if not isinstance(temp_environment, dict):
                    raise AnsibleError(
                        "environment must be a dictionary, received %s (%s)" %
                        (temp_environment, type(temp_environment)))
                # very deliberately using update here instead of combine_vars, as
                # these environment settings should not need to merge sub-dicts
                final_environment.update(temp_environment)

        if len(final_environment) > 0:
            final_environment = self._templar.template(final_environment)

        if isinstance(raw_environment_out, dict):
            raw_environment_out.clear()
            raw_environment_out.update(final_environment)

        return self._connection._shell.env_prefix(**final_environment)

    def _early_needs_tmp_path(self):
        '''
        Determines if a temp path should be created before the action is executed.
        '''

        return getattr(self, 'TRANSFERS_FILES', False)

    def _is_pipelining_enabled(self, module_style, wrap_async=False):
        '''
        Determines if we are required and can do pipelining
        '''

        # any of these require a true
        for condition in [
                self._connection.has_pipelining,
                self._play_context.pipelining or self._connection.
                always_pipeline_modules,  # pipelining enabled for play or connection requires it (eg winrm)
                module_style ==
                "new",  # old style modules do not support pipelining
                not C.DEFAULT_KEEP_REMOTE_FILES,  # user wants remote files
                not wrap_async or self._connection.
                always_pipeline_modules,  # async does not normally support pipelining unless it does (eg winrm)
                self._play_context.become_method !=
                'su',  # su does not work with pipelining,
                # FIXME: we might need to make become_method exclusion a configurable list
        ]:
            if not condition:
                return False

        return True

    def _make_tmp_path(self, remote_user=None):
        '''
        Create and return a temporary path on a remote box.
        '''

        if remote_user is None:
            remote_user = self._play_context.remote_user

        basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(
            0, 2**48))
        use_system_tmp = False

        if self._play_context.become and self._play_context.become_user not in (
                'root', remote_user):
            use_system_tmp = True

        tmp_mode = 0o700
        tmpdir = self._remote_expand_user(self._play_context.remote_tmp_dir,
                                          sudoable=False)

        cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp,
                                              tmp_mode, tmpdir)
        result = self._low_level_execute_command(cmd, sudoable=False)

        # error handling on this seems a little aggressive?
        if result['rc'] != 0:
            if result['rc'] == 5:
                output = 'Authentication failure.'
            elif result['rc'] == 255 and self._connection.transport in (
                    'ssh', ):

                if self._play_context.verbosity > 3:
                    output = u'SSH encountered an unknown error. The output was:\n%s%s' % (
                        result['stdout'], result['stderr'])
                else:
                    output = (
                        u'SSH encountered an unknown error during the connection. '
                        'We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue'
                    )

            elif u'No space left on device' in result['stderr']:
                output = result['stderr']
            else:
                output = (
                    'Authentication or permission failure. '
                    'In some cases, you may have been able to authenticate and did not have permissions on the target directory. '
                    'Consider changing the remote temp path in ansible.cfg to a path rooted in "/tmp". '
                    'Failed command was: %s, exited with result %d' %
                    (cmd, result['rc']))
            if 'stdout' in result and result['stdout'] != u'':
                output = output + u", stdout output: %s" % result['stdout']
            if self._play_context.verbosity > 3 and 'stderr' in result and result[
                    'stderr'] != u'':
                output += u", stderr output: %s" % result['stderr']
            raise AnsibleConnectionFailure(output)
        else:
            self._cleanup_remote_tmp = True

        try:
            stdout_parts = result['stdout'].strip().split('%s=' % basefile, 1)
            rc = self._connection._shell.join_path(stdout_parts[-1],
                                                   u'').splitlines()[-1]
        except IndexError:
            # stdout was empty or just space, set to / to trigger error in next if
            rc = '/'

        # Catch failure conditions, files should never be
        # written to locations in /.
        if rc == '/':
            raise AnsibleError(
                'failed to resolve remote temporary directory from %s: `%s` returned empty string'
                % (basefile, cmd))

        return rc

    def _should_remove_tmp_path(self, tmp_path):
        '''Determine if temporary path should be deleted or kept by user request/config'''

        return tmp_path and self._cleanup_remote_tmp and not C.DEFAULT_KEEP_REMOTE_FILES and "-tmp-" in tmp_path

    def _remove_tmp_path(self, tmp_path):
        '''Remove a temporary path we created. '''

        if self._should_remove_tmp_path(tmp_path):
            cmd = self._connection._shell.remove(tmp_path, recurse=True)
            # If we have gotten here we have a working ssh configuration.
            # If ssh breaks we could leave tmp directories out on the remote system.
            tmp_rm_res = self._low_level_execute_command(cmd, sudoable=False)

            tmp_rm_data = self._parse_returned_data(tmp_rm_res)
            if tmp_rm_data.get('rc', 0) != 0:
                display.warning(
                    'Error deleting remote temporary files (rc: %s, stderr: %s})'
                    % (tmp_rm_res.get('rc'),
                       tmp_rm_res.get('stderr', 'No error string available.')))

    def _transfer_file(self, local_path, remote_path):
        self._connection.put_file(local_path, remote_path)
        return remote_path

    def _transfer_data(self, remote_path, data):
        '''
        Copies the module data out to the temporary module path.
        '''

        if isinstance(data, dict):
            data = jsonify(data)

        afd, afile = tempfile.mkstemp()
        afo = os.fdopen(afd, 'wb')
        try:
            data = to_bytes(data, errors='surrogate_or_strict')
            afo.write(data)
        except Exception as e:
            raise AnsibleError(
                "failure writing module data to temporary file for transfer: %s"
                % to_native(e))

        afo.flush()
        afo.close()

        try:
            self._transfer_file(afile, remote_path)
        finally:
            os.unlink(afile)

        return remote_path

    def _fixup_perms(self,
                     remote_path,
                     remote_user=None,
                     execute=True,
                     recursive=True):
        """
        We need the files we upload to be readable (and sometimes executable)
        by the user being sudo'd to but we want to limit other people's access
        (because the files could contain passwords or other private
        information.

        Deprecated in favor of _fixup_perms2. Ansible code has been updated to
        use _fixup_perms2. This code is maintained to provide partial support
        for custom actions (non-recursive mode only).

        """
        if remote_user is None:
            remote_user = self._play_context.remote_user

        display.deprecated(
            '_fixup_perms is deprecated. Use _fixup_perms2 instead.',
            version='2.4',
            removed=False)

        if recursive:
            raise AnsibleError(
                '_fixup_perms with recursive=True (the default) is no longer supported. '
                +
                'Use _fixup_perms2 if support for previous releases is not required. '
                'Otherwise use fixup_perms with recursive=False.')

        return self._fixup_perms2([remote_path], remote_user, execute)

    def _fixup_perms2(self, remote_paths, remote_user=None, execute=True):
        """
        We need the files we upload to be readable (and sometimes executable)
        by the user being sudo'd to but we want to limit other people's access
        (because the files could contain passwords or other private
        information.  We achieve this in one of these ways:

        * If no sudo is performed or the remote_user is sudo'ing to
          themselves, we don't have to change permissions.
        * If the remote_user sudo's to a privileged user (for instance, root),
          we don't have to change permissions
        * If the remote_user sudo's to an unprivileged user then we attempt to
          grant the unprivileged user access via file system acls.
        * If granting file system acls fails we try to change the owner of the
          file with chown which only works in case the remote_user is
          privileged or the remote systems allows chown calls by unprivileged
          users (e.g. HP-UX)
        * If the chown fails we can set the file to be world readable so that
          the second unprivileged user can read the file.
          Since this could allow other users to get access to private
          information we only do this ansible is configured with
          "allow_world_readable_tmpfiles" in the ansible.cfg
        """
        if remote_user is None:
            remote_user = self._play_context.remote_user

        if self._connection._shell.SHELL_FAMILY == 'powershell':
            # This won't work on Powershell as-is, so we'll just completely skip until
            # we have a need for it, at which point we'll have to do something different.
            return remote_paths

        if self._play_context.become and self._play_context.become_user and self._play_context.become_user not in (
                'root', remote_user):
            # Unprivileged user that's different than the ssh user.  Let's get
            # to work!

            # Try to use file system acls to make the files readable for sudo'd
            # user
            if execute:
                chmod_mode = 'rx'
                setfacl_mode = 'r-x'
            else:
                chmod_mode = 'rX'
                # NOTE: this form fails silently on freebsd.  We currently
                # never call _fixup_perms2() with execute=False but if we
                # start to we'll have to fix this.
                setfacl_mode = 'r-X'

            res = self._remote_set_user_facl(remote_paths,
                                             self._play_context.become_user,
                                             setfacl_mode)
            if res['rc'] != 0:
                # File system acls failed; let's try to use chown next
                # Set executable bit first as on some systems an
                # unprivileged user can use chown
                if execute:
                    res = self._remote_chmod(remote_paths, 'u+x')
                    if res['rc'] != 0:
                        raise AnsibleError(
                            'Failed to set file mode on remote temporary files (rc: {0}, err: {1})'
                            .format(res['rc'], to_native(res['stderr'])))

                res = self._remote_chown(remote_paths,
                                         self._play_context.become_user)
                if res['rc'] != 0 and remote_user == 'root':
                    # chown failed even if remove_user is root
                    raise AnsibleError(
                        'Failed to change ownership of the temporary files Ansible needs to create despite connecting as root. '
                        'Unprivileged become user would be unable to read the file.'
                    )
                elif res['rc'] != 0:
                    if C.ALLOW_WORLD_READABLE_TMPFILES:
                        # chown and fs acls failed -- do things this insecure
                        # way only if the user opted in in the config file
                        display.warning(
                            'Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user. '
                            'This may be insecure. For information on securing this, see '
                            'https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'
                        )
                        res = self._remote_chmod(remote_paths,
                                                 'a+%s' % chmod_mode)
                        if res['rc'] != 0:
                            raise AnsibleError(
                                'Failed to set file mode on remote files (rc: {0}, err: {1})'
                                .format(res['rc'], to_native(res['stderr'])))
                    else:
                        raise AnsibleError(
                            'Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user '
                            '(rc: %s, err: %s}). For information on working around this, see '
                            'https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'
                            % (res['rc'], to_native(res['stderr'])))
        elif execute:
            # Can't depend on the file being transferred with execute permissions.
            # Only need user perms because no become was used here
            res = self._remote_chmod(remote_paths, 'u+x')
            if res['rc'] != 0:
                raise AnsibleError(
                    'Failed to set execute bit on remote files (rc: {0}, err: {1})'
                    .format(res['rc'], to_native(res['stderr'])))

        return remote_paths

    def _remote_chmod(self, paths, mode, sudoable=False):
        '''
        Issue a remote chmod command
        '''
        cmd = self._connection._shell.chmod(paths, mode)
        res = self._low_level_execute_command(cmd, sudoable=sudoable)
        return res

    def _remote_chown(self, paths, user, sudoable=False):
        '''
        Issue a remote chown command
        '''
        cmd = self._connection._shell.chown(paths, user)
        res = self._low_level_execute_command(cmd, sudoable=sudoable)
        return res

    def _remote_set_user_facl(self, paths, user, mode, sudoable=False):
        '''
        Issue a remote call to setfacl
        '''
        cmd = self._connection._shell.set_user_facl(paths, user, mode)
        res = self._low_level_execute_command(cmd, sudoable=sudoable)
        return res

    def _execute_remote_stat(self,
                             path,
                             all_vars,
                             follow,
                             tmp=None,
                             checksum=True):
        '''
        Get information from remote file.
        '''
        module_args = dict(
            path=path,
            follow=follow,
            get_checksum=checksum,
            checksum_algo='sha1',
        )
        mystat = self._execute_module(module_name='stat',
                                      module_args=module_args,
                                      task_vars=all_vars,
                                      tmp=tmp,
                                      delete_remote_tmp=(tmp is None),
                                      wrap_async=False)

        if mystat.get('failed'):
            msg = mystat.get('module_stderr')
            if not msg:
                msg = mystat.get('module_stdout')
            if not msg:
                msg = mystat.get('msg')
            raise AnsibleError(
                'Failed to get information on remote file (%s): %s' %
                (path, msg))

        if not mystat['stat']['exists']:
            # empty might be matched, 1 should never match, also backwards compatible
            mystat['stat']['checksum'] = '1'

        # happens sometimes when it is a dir and not on bsd
        if 'checksum' not in mystat['stat']:
            mystat['stat']['checksum'] = ''
        elif not isinstance(mystat['stat']['checksum'], string_types):
            raise AnsibleError(
                "Invalid checksum returned by stat: expected a string type but got %s"
                % type(mystat['stat']['checksum']))

        return mystat['stat']

    def _remote_checksum(self, path, all_vars, follow=False):
        '''
        Produces a remote checksum given a path,
        Returns a number 0-4 for specific errors instead of checksum, also ensures it is different
        0 = unknown error
        1 = file does not exist, this might not be an error
        2 = permissions issue
        3 = its a directory, not a file
        4 = stat module failed, likely due to not finding python
        5 = appropriate json module not found
        '''
        x = "0"  # unknown error has occurred
        try:
            remote_stat = self._execute_remote_stat(path,
                                                    all_vars,
                                                    follow=follow)
            if remote_stat['exists'] and remote_stat['isdir']:
                x = "3"  # its a directory not a file
            else:
                x = remote_stat['checksum']  # if 1, file is missing
        except AnsibleError as e:
            errormsg = to_text(e)
            if errormsg.endswith(u'Permission denied'):
                x = "2"  # cannot read file
            elif errormsg.endswith(u'MODULE FAILURE'):
                x = "4"  # python not found or module uncaught exception
            elif 'json' in errormsg or 'simplejson' in errormsg:
                x = "5"  # json or simplejson modules needed
        finally:
            return x  # pylint: disable=lost-exception

    def _remote_expand_user(self, path, sudoable=True):
        ''' takes a remote path and performs tilde expansion on the remote host '''
        if not path.startswith(
                '~'
        ):  # FIXME: Windows paths may start with "~ instead of just ~
            return path

        # FIXME: Can't use os.path.sep for Windows paths.
        split_path = path.split(os.path.sep, 1)
        expand_path = split_path[0]
        if sudoable and expand_path == '~' and self._play_context.become and self._play_context.become_user:
            expand_path = '~%s' % self._play_context.become_user

        cmd = self._connection._shell.expand_user(expand_path)
        data = self._low_level_execute_command(cmd, sudoable=False)
        try:
            initial_fragment = data['stdout'].strip().splitlines()[-1]
        except IndexError:
            initial_fragment = None

        if not initial_fragment:
            # Something went wrong trying to expand the path remotely.  Return
            # the original string
            return path

        if len(split_path) > 1:
            return self._connection._shell.join_path(initial_fragment,
                                                     *split_path[1:])
        else:
            return initial_fragment

    def _strip_success_message(self, data):
        '''
        Removes the BECOME-SUCCESS message from the data.
        '''
        if data.strip().startswith('BECOME-SUCCESS-'):
            data = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', data)
        return data

    def _update_module_args(self, module_name, module_args, task_vars):

        # set check mode in the module arguments, if required
        if self._play_context.check_mode:
            if not self._supports_check_mode:
                raise AnsibleError(
                    "check mode is not supported for this operation")
            module_args['_ansible_check_mode'] = True
        else:
            module_args['_ansible_check_mode'] = False

        # set no log in the module arguments, if required
        module_args[
            '_ansible_no_log'] = self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG

        # set debug in the module arguments, if required
        module_args['_ansible_debug'] = C.DEFAULT_DEBUG

        # let module know we are in diff mode
        module_args['_ansible_diff'] = self._play_context.diff

        # let module know our verbosity
        module_args['_ansible_verbosity'] = display.verbosity

        # give the module information about the ansible version
        module_args['_ansible_version'] = __version__

        # give the module information about its name
        module_args['_ansible_module_name'] = module_name

        # set the syslog facility to be used in the module
        module_args['_ansible_syslog_facility'] = task_vars.get(
            'ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY)

        # let module know about filesystems that selinux treats specially
        module_args[
            '_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS

        # give the module the socket for persistent connections
        module_args['_ansible_socket'] = getattr(self._connection,
                                                 'socket_path')
        if not module_args['_ansible_socket']:
            module_args['_ansible_socket'] = task_vars.get('ansible_socket')

        # make sure all commands use the designated shell executable
        module_args[
            '_ansible_shell_executable'] = self._play_context.executable

    def _update_connection_options(self, options, variables=None):
        ''' ensures connections have the appropriate information '''
        update = {}

        if getattr(self.connection, 'glob_option_vars', False):
            # if the connection allows for it, pass any variables matching it.
            if variables is not None:
                for varname in variables:
                    if varname.match('ansible_%s_' %
                                     self.connection._load_name):
                        update[varname] = variables[varname]

        # always override existing with options
        update.update(options)
        self.connection.set_options(update)

    def _execute_module(self,
                        module_name=None,
                        module_args=None,
                        tmp=None,
                        task_vars=None,
                        persist_files=False,
                        delete_remote_tmp=True,
                        wrap_async=False):
        '''
        Transfer and run a module along with its arguments.
        '''
        if task_vars is None:
            task_vars = dict()

        remote_module_path = None
        args_file_path = None
        remote_files = []

        # if a module name was not specified for this execution, use the action from the task
        if module_name is None:
            module_name = self._task.action
        if module_args is None:
            module_args = self._task.args

        self._update_module_args(module_name, module_args, task_vars)

        # FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
        (module_style, shebang, module_data,
         module_path) = self._configure_module(module_name=module_name,
                                               module_args=module_args,
                                               task_vars=task_vars)
        display.vvv("Using module file %s" % module_path)
        if not shebang and module_style != 'binary':
            raise AnsibleError("module (%s) is missing interpreter line" %
                               module_name)

        if not self._is_pipelining_enabled(module_style, wrap_async):

            # we might need remote tmp dir
            if not tmp or 'tmp' not in tmp:
                tmp = self._make_tmp_path()

            remote_module_filename = self._connection._shell.get_remote_filename(
                module_path)
            remote_module_path = self._connection._shell.join_path(
                tmp, remote_module_filename)

        if module_style in ('old', 'non_native_want_json', 'binary'):
            # we'll also need a temp file to hold our module arguments
            args_file_path = self._connection._shell.join_path(tmp, 'args')

        if remote_module_path or module_style != 'new':
            display.debug("transferring module to remote %s" %
                          remote_module_path)
            if module_style == 'binary':
                self._transfer_file(module_path, remote_module_path)
            else:
                self._transfer_data(remote_module_path, module_data)
            if module_style == 'old':
                # we need to dump the module args to a k=v string in a file on
                # the remote system, which can be read and parsed by the module
                args_data = ""
                for k, v in iteritems(module_args):
                    args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
                self._transfer_data(args_file_path, args_data)
            elif module_style in ('non_native_want_json', 'binary'):
                self._transfer_data(args_file_path, json.dumps(module_args))
            display.debug("done transferring module to remote")

        environment_string = self._compute_environment_string()

        if tmp and remote_module_path:
            remote_files = [tmp, remote_module_path]

        if args_file_path:
            remote_files.append(args_file_path)

        sudoable = True
        in_data = None
        cmd = ""

        if wrap_async and not self._connection.always_pipeline_modules:
            # configure, upload, and chmod the async_wrapper module
            (async_module_style, shebang, async_module_data,
             async_module_path) = self._configure_module(
                 module_name='async_wrapper',
                 module_args=dict(),
                 task_vars=task_vars)
            async_module_remote_filename = self._connection._shell.get_remote_filename(
                async_module_path)
            remote_async_module_path = self._connection._shell.join_path(
                tmp, async_module_remote_filename)
            self._transfer_data(remote_async_module_path, async_module_data)
            remote_files.append(remote_async_module_path)

            async_limit = self._task.async_val
            async_jid = str(random.randint(0, 999999999999))

            # call the interpreter for async_wrapper directly
            # this permits use of a script for an interpreter on non-Linux platforms
            # TODO: re-implement async_wrapper as a regular module to avoid this special case
            interpreter = shebang.replace('#!', '').strip()
            async_cmd = [
                interpreter, remote_async_module_path, async_jid, async_limit,
                remote_module_path
            ]

            if environment_string:
                async_cmd.insert(0, environment_string)

            if args_file_path:
                async_cmd.append(args_file_path)
            else:
                # maintain a fixed number of positional parameters for async_wrapper
                async_cmd.append('_')

            if not self._should_remove_tmp_path(tmp):
                async_cmd.append("-preserve_tmp")

            cmd = " ".join(to_text(x) for x in async_cmd)

        else:

            if self._is_pipelining_enabled(module_style):
                in_data = module_data
            else:
                cmd = remote_module_path

            rm_tmp = None

            if self._should_remove_tmp_path(
                    tmp) and not persist_files and delete_remote_tmp:
                if not self._play_context.become or self._play_context.become_user == 'root':
                    # not sudoing or sudoing to root, so can cleanup files in the same step
                    rm_tmp = tmp

            cmd = self._connection._shell.build_module_command(
                environment_string,
                shebang,
                cmd,
                arg_path=args_file_path,
                rm_tmp=rm_tmp).strip()

        # Fix permissions of the tmp path and tmp files. This should be called after all files have been transferred.
        if remote_files:
            # remove none/empty
            remote_files = [x for x in remote_files if x]
            self._fixup_perms2(remote_files, self._play_context.remote_user)

        # actually execute
        res = self._low_level_execute_command(cmd,
                                              sudoable=sudoable,
                                              in_data=in_data)

        # parse the main result
        data = self._parse_returned_data(res)

        # NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
        # get internal info before cleaning
        tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False)
                         and wrap_async)

        # remove internal keys
        remove_internal_keys(data)

        # cleanup tmp?
        if (self._play_context.become
                and self._play_context.become_user != 'root'
            ) and not persist_files and delete_remote_tmp or tmpdir_delete:
            self._remove_tmp_path(tmp)

        # FIXME: for backwards compat, figure out if still makes sense
        if wrap_async:
            data['changed'] = True

        # pre-split stdout/stderr into lines if needed
        if 'stdout' in data and 'stdout_lines' not in data:
            # if the value is 'False', a default won't catch it.
            txt = data.get('stdout', None) or u''
            data['stdout_lines'] = txt.splitlines()
        if 'stderr' in data and 'stderr_lines' not in data:
            # if the value is 'False', a default won't catch it.
            txt = data.get('stderr', None) or u''
            data['stderr_lines'] = txt.splitlines()

        display.debug("done with _execute_module (%s, %s)" %
                      (module_name, module_args))
        return data

    def _parse_returned_data(self, res):
        try:
            filtered_output, warnings = _filter_non_json_lines(
                res.get('stdout', u''))
            for w in warnings:
                display.warning(w)

            data = json.loads(filtered_output)

            if 'ansible_facts' in data and isinstance(data['ansible_facts'],
                                                      dict):
                data['ansible_facts'] = wrap_var(data['ansible_facts'])
            data['_ansible_parsed'] = True
        except ValueError:
            # not valid json, lets try to capture error
            data = dict(failed=True, _ansible_parsed=False)
            data['msg'] = "MODULE FAILURE"
            data['module_stdout'] = res.get('stdout', u'')
            if 'stderr' in res:
                data['module_stderr'] = res['stderr']
                if res['stderr'].startswith(u'Traceback'):
                    data['exception'] = res['stderr']
            if 'rc' in res:
                data['rc'] = res['rc']
        return data

    def _low_level_execute_command(self,
                                   cmd,
                                   sudoable=True,
                                   in_data=None,
                                   executable=None,
                                   encoding_errors='surrogate_then_replace',
                                   chdir=None):
        '''
        This is the function which executes the low level shell command, which
        may be commands to create/remove directories for temporary files, or to
        run the module code or python directly when pipelining.

        :kwarg encoding_errors: If the value returned by the command isn't
            utf-8 then we have to figure out how to transform it to unicode.
            If the value is just going to be displayed to the user (or
            discarded) then the default of 'replace' is fine.  If the data is
            used as a key or is going to be written back out to a file
            verbatim, then this won't work.  May have to use some sort of
            replacement strategy (python3 could use surrogateescape)
        :kwarg chdir: cd into this directory before executing the command.
        '''

        display.debug("_low_level_execute_command(): starting")
        #        if not cmd:
        #            # this can happen with powershell modules when there is no analog to a Windows command (like chmod)
        #            display.debug("_low_level_execute_command(): no command, exiting")
        #           return dict(stdout='', stderr='', rc=254)

        if chdir:
            display.debug(
                "_low_level_execute_command(): changing cwd to %s for this command"
                % chdir)
            cmd = self._connection._shell.append_command('cd %s' % chdir, cmd)

        allow_same_user = C.BECOME_ALLOW_SAME_USER
        same_user = self._play_context.become_user == self._play_context.remote_user
        if sudoable and self._play_context.become and (allow_same_user
                                                       or not same_user):
            display.debug(
                "_low_level_execute_command(): using become for this command")
            if self._connection.transport != 'network_cli' and self._play_context.become_method != 'enable':
                cmd = self._play_context.make_become_cmd(cmd,
                                                         executable=executable)

        if self._connection.allow_executable:
            if executable is None:
                executable = self._play_context.executable
                # mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876)
                # only applied for the default executable to avoid interfering with the raw action
                cmd = self._connection._shell.append_command(cmd, 'sleep 0')
            if executable:
                cmd = executable + ' -c ' + shlex_quote(cmd)

        display.debug("_low_level_execute_command(): executing: %s" % (cmd, ))

        # Change directory to basedir of task for command execution when connection is local
        if self._connection.transport == 'local':
            cwd = os.getcwd()
            os.chdir(self._loader.get_basedir())
        try:
            rc, stdout, stderr = self._connection.exec_command(
                cmd, in_data=in_data, sudoable=sudoable)
        finally:
            if self._connection.transport == 'local':
                os.chdir(cwd)

        # stdout and stderr may be either a file-like or a bytes object.
        # Convert either one to a text type
        if isinstance(stdout, binary_type):
            out = to_text(stdout, errors=encoding_errors)
        elif not isinstance(stdout, text_type):
            out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
        else:
            out = stdout

        if isinstance(stderr, binary_type):
            err = to_text(stderr, errors=encoding_errors)
        elif not isinstance(stderr, text_type):
            err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
        else:
            err = stderr

        if rc is None:
            rc = 0

        # be sure to remove the BECOME-SUCCESS message now
        out = self._strip_success_message(out)

        display.debug(
            u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" %
            (rc, out, err))
        return dict(rc=rc,
                    stdout=out,
                    stdout_lines=out.splitlines(),
                    stderr=err)

    def _get_diff_data(self, destination, source, task_vars, source_file=True):

        diff = {}
        display.debug("Going to peek to see if file has changed permissions")
        peek_result = self._execute_module(module_name='file',
                                           module_args=dict(path=destination,
                                                            diff_peek=True),
                                           task_vars=task_vars,
                                           persist_files=True)

        if not peek_result.get('failed', False) or peek_result.get('rc',
                                                                   0) == 0:

            if peek_result.get('state') == 'absent':
                diff['before'] = ''
            elif peek_result.get('appears_binary'):
                diff['dst_binary'] = 1
            elif peek_result.get(
                    'size') and C.MAX_FILE_SIZE_FOR_DIFF > 0 and peek_result[
                        'size'] > C.MAX_FILE_SIZE_FOR_DIFF:
                diff['dst_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
            else:
                display.debug("Slurping the file %s" % source)
                dest_result = self._execute_module(
                    module_name='slurp',
                    module_args=dict(path=destination),
                    task_vars=task_vars,
                    persist_files=True)
                if 'content' in dest_result:
                    dest_contents = dest_result['content']
                    if dest_result['encoding'] == 'base64':
                        dest_contents = base64.b64decode(dest_contents)
                    else:
                        raise AnsibleError(
                            "unknown encoding in content option, failed: %s" %
                            dest_result)
                    diff['before_header'] = destination
                    diff['before'] = dest_contents

            if source_file:
                st = os.stat(source)
                if C.MAX_FILE_SIZE_FOR_DIFF > 0 and st[
                        stat.ST_SIZE] > C.MAX_FILE_SIZE_FOR_DIFF:
                    diff['src_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
                else:
                    display.debug("Reading local copy of the file %s" % source)
                    try:
                        with open(source, 'rb') as src:
                            src_contents = src.read()
                    except Exception as e:
                        raise AnsibleError(
                            "Unexpected error while reading source (%s) for diff: %s "
                            % (source, str(e)))

                    if b"\x00" in src_contents:
                        diff['src_binary'] = 1
                    else:
                        diff['after_header'] = source
                        diff['after'] = src_contents
            else:
                display.debug("source of file passed in")
                diff['after_header'] = 'dynamically generated'
                diff['after'] = source

        if self._play_context.no_log:
            if 'before' in diff:
                diff["before"] = ""
            if 'after' in diff:
                diff[
                    "after"] = " [[ Diff output has been hidden because 'no_log: true' was specified for this result ]]\n"

        return diff

    def _find_needle(self, dirname, needle):
        '''
            find a needle in haystack of paths, optionally using 'dirname' as a subdir.
            This will build the ordered list of paths to search and pass them to dwim
            to get back the first existing file found.
        '''

        # dwim already deals with playbook basedirs
        path_stack = self._task.get_search_path()

        # if missing it will return a file not found exception
        return self._loader.path_dwim_relative_stack(path_stack, dirname,
                                                     needle)
Пример #2
0
class TerminalBase(with_metaclass(ABCMeta, object)):
    '''
    A base class for implementing cli connections
    '''

    # compiled regular expression as stdout
    terminal_stdout_re = []

    # compiled regular expression as stderr
    terminal_stderr_re = []

    # copiled regular expression to remove ANSI codes
    ansi_re = [re.compile(r'(\x1b\[\?1h\x1b=)'), re.compile(r'\x08.')]

    def __init__(self, connection):
        self._connection = connection

    def _exec_cli_command(self, cmd, check_rc=True):
        """Executes a CLI command on the device"""
        rc, out, err = self._connection.exec_command(cmd)
        if check_rc and rc != 0:
            raise AnsibleConnectionFailure(err)
        return rc, out, err

    def _get_prompt(self):
        """ Returns the current prompt from the device"""
        for cmd in ['\n', 'prompt()']:
            rc, out, err = self._exec_cli_command(cmd)
        return out

    def on_open_shell(self):
        """Called after the SSH session is established

        This method is called right after the invoke_shell() is called from
        the Paramiko SSHClient instance.  It provides an opportunity to setup
        terminal parameters such as disbling paging for instance.
        """
        pass

    def on_close_shell(self):
        """Called before the connection is closed

        This method gets called once the connection close has been requested
        but before the connection is actually closed.  It provides an
        opportunity to clean up any terminal resources before the shell is
        actually closed
        """
        pass

    def on_authorize(self, passwd=None):
        """Called when privilege escalation is requested

        This method is called when the privilege is requested to be elevated
        in the play context by setting become to True.  It is the responsibility
        of the terminal plugin to actually do the privilege escalation such
        as entering `enable` mode for instance
        """
        pass

    def on_deauthorize(self):
        """Called when privilege deescalation is requested

        This method is called when the privilege changed from escalated
        (become=True) to non escalated (become=False).  It is the responsibility
        of the this method to actually perform the deauthorization procedure
        """
        pass
Пример #3
0
class CLI(with_metaclass(ABCMeta, object)):
    ''' code behind bin/ansible* programs '''

    VALID_ACTIONS = []

    _ITALIC = re.compile(r"I\(([^)]+)\)")
    _BOLD = re.compile(r"B\(([^)]+)\)")
    _MODULE = re.compile(r"M\(([^)]+)\)")
    _URL = re.compile(r"U\(([^)]+)\)")
    _CONST = re.compile(r"C\(([^)]+)\)")

    PAGER = 'less'

    # -F (quit-if-one-screen) -R (allow raw ansi control chars)
    # -S (chop long lines) -X (disable termcap init and de-init)
    LESS_OPTS = 'FRSX'
    SKIP_INVENTORY_DEFAULTS = False

    def __init__(self, args, callback=None):
        """
        Base init method for all command line programs
        """

        self.args = args
        self.options = None
        self.parser = None
        self.action = None
        self.callback = callback

    def set_action(self):
        """
        Get the action the user wants to execute from the sys argv list.
        """
        for i in range(0, len(self.args)):
            arg = self.args[i]
            if arg in self.VALID_ACTIONS:
                self.action = arg
                del self.args[i]
                break

        if not self.action:
            # if we're asked for help or version, we don't need an action.
            # have to use a special purpose Option Parser to figure that out as
            # the standard OptionParser throws an error for unknown options and
            # without knowing action, we only know of a subset of the options
            # that could be legal for this command
            tmp_parser = InvalidOptsParser(self.parser)
            tmp_options, tmp_args = tmp_parser.parse_args(self.args)
            if not(hasattr(tmp_options, 'help') and tmp_options.help) or (hasattr(tmp_options, 'version') and tmp_options.version):
                raise AnsibleOptionsError("Missing required action")

    def execute(self):
        """
        Actually runs a child defined method using the execute_<action> pattern
        """
        fn = getattr(self, "execute_%s" % self.action)
        fn()

    @abstractmethod
    def run(self):
        """Run the ansible command

        Subclasses must implement this method.  It does the actual work of
        running an Ansible command.
        """

        display.vv(to_text(self.parser.get_version()))

        if C.CONFIG_FILE:
            display.v(u"Using %s as config file" % to_text(C.CONFIG_FILE))
        else:
            display.v(u"No config file found; using defaults")

        # warn about deprecated config options
        for deprecated in C.config.DEPRECATED:
            name = deprecated[0]
            why = deprecated[1]['why']
            if 'alternatives' in deprecated[1]:
                alt = ', use %s instead' % deprecated[1]['alternatives']
            else:
                alt = ''
            ver = deprecated[1]['version']
            display.deprecated("%s option, %s %s" % (name, why, alt), version=ver)

    @staticmethod
    def split_vault_id(vault_id):
        # return (before_@, after_@)
        # if no @, return whole string as after_
        if '@' not in vault_id:
            return (None, vault_id)

        parts = vault_id.split('@', 1)
        ret = tuple(parts)
        return ret

    @staticmethod
    def build_vault_ids(vault_ids, vault_password_files=None,
                        ask_vault_pass=None, create_new_password=None,
                        auto_prompt=True):
        vault_password_files = vault_password_files or []
        vault_ids = vault_ids or []

        # convert vault_password_files into vault_ids slugs
        for password_file in vault_password_files:
            id_slug = u'%s@%s' % (C.DEFAULT_VAULT_IDENTITY, password_file)

            # note this makes --vault-id higher precendence than --vault-password-file
            # if we want to intertwingle them in order probably need a cli callback to populate vault_ids
            # used by --vault-id and --vault-password-file
            vault_ids.append(id_slug)

        # if an action needs an encrypt password (create_new_password=True) and we dont
        # have other secrets setup, then automatically add a password prompt as well.
        # prompts cant/shouldnt work without a tty, so dont add prompt secrets
        if ask_vault_pass or (not vault_ids and auto_prompt):

            id_slug = u'%s@%s' % (C.DEFAULT_VAULT_IDENTITY, u'prompt_ask_vault_pass')
            vault_ids.append(id_slug)

        return vault_ids

    # TODO: remove the now unused args
    @staticmethod
    def setup_vault_secrets(loader, vault_ids, vault_password_files=None,
                            ask_vault_pass=None, create_new_password=False,
                            auto_prompt=True):
        # list of tuples
        vault_secrets = []

        # Depending on the vault_id value (including how --ask-vault-pass / --vault-password-file create a vault_id)
        # we need to show different prompts. This is for compat with older Towers that expect a
        # certain vault password prompt format, so 'promp_ask_vault_pass' vault_id gets the old format.
        prompt_formats = {}

        # If there are configured default vault identities, they are considered 'first'
        # so we prepend them to vault_ids (from cli) here

        vault_password_files = vault_password_files or []
        if C.DEFAULT_VAULT_PASSWORD_FILE:
            vault_password_files.append(C.DEFAULT_VAULT_PASSWORD_FILE)

        if create_new_password:
            prompt_formats['prompt'] = ['New vault password (%(vault_id)s): ',
                                        'Confirm vew vault password (%(vault_id)s): ']
            # 2.3 format prompts for --ask-vault-pass
            prompt_formats['prompt_ask_vault_pass'] = ['New Vault password: '******'Confirm New Vault password: '******'prompt'] = ['Vault password (%(vault_id)s): ']
            # The format when we use just --ask-vault-pass needs to match 'Vault password:\s*?$'
            prompt_formats['prompt_ask_vault_pass'] = ['Vault password: '******'prompt', 'prompt_ask_vault_pass']:

                # --vault-id some_name@prompt_ask_vault_pass --vault-id other_name@prompt_ask_vault_pass will be a little
                # confusing since it will use the old format without the vault id in the prompt
                built_vault_id = vault_id_name or C.DEFAULT_VAULT_IDENTITY

                # choose the prompt based on --vault-id=prompt or --ask-vault-pass. --ask-vault-pass
                # always gets the old format for Tower compatibility.
                # ie, we used --ask-vault-pass, so we need to use the old vault password prompt
                # format since Tower needs to match on that format.
                prompted_vault_secret = PromptVaultSecret(prompt_formats=prompt_formats[vault_id_value],
                                                          vault_id=built_vault_id)

                # a empty or invalid password from the prompt will warn and continue to the next
                # without erroring globablly
                try:
                    prompted_vault_secret.load()
                except AnsibleError as exc:
                    display.warning('Error in vault password prompt (%s): %s' % (vault_id_name, exc))
                    raise

                vault_secrets.append((built_vault_id, prompted_vault_secret))

                # update loader with new secrets incrementally, so we can load a vault password
                # that is encrypted with a vault secret provided earlier
                loader.set_vault_secrets(vault_secrets)
                continue

            # assuming anything else is a password file
            display.vvvvv('Reading vault password file: %s' % vault_id_value)
            # read vault_pass from a file
            file_vault_secret = get_file_vault_secret(filename=vault_id_value,
                                                      vault_id=vault_id_name,
                                                      loader=loader)

            # an invalid password file will error globally
            try:
                file_vault_secret.load()
            except AnsibleError as exc:
                display.warning('Error in vault password file loading (%s): %s' % (vault_id_name, exc))
                raise

            if vault_id_name:
                vault_secrets.append((vault_id_name, file_vault_secret))
            else:
                vault_secrets.append((C.DEFAULT_VAULT_IDENTITY, file_vault_secret))

            # update loader with as-yet-known vault secrets
            loader.set_vault_secrets(vault_secrets)

        return vault_secrets

    def ask_passwords(self):
        ''' prompt for connection and become passwords if needed '''

        op = self.options
        sshpass = None
        becomepass = None
        become_prompt = ''

        become_prompt_method = "BECOME" if C.AGNOSTIC_BECOME_PROMPT else op.become_method.upper()

        try:
            if op.ask_pass:
                sshpass = getpass.getpass(prompt="SSH password: "******"%s password[defaults to SSH password]: " % become_prompt_method
                if sshpass:
                    sshpass = to_bytes(sshpass, errors='strict', nonstring='simplerepr')
            else:
                become_prompt = "%s password: "******"become" command line arguments' % which, '2.9')

        if self.options.become:
            pass
        elif self.options.sudo:
            self.options.become = True
            self.options.become_method = 'sudo'
            _dep('sudo')
        elif self.options.su:
            self.options.become = True
            self.options.become_method = 'su'
            _dep('su')

        # other deprecations:
        if self.options.ask_sudo_pass or self.options.sudo_user:
            _dep('sudo')
        if self.options.ask_su_pass or self.options.su_user:
            _dep('su')

    def validate_conflicts(self, vault_opts=False, runas_opts=False, fork_opts=False, vault_rekey_opts=False):
        ''' check for conflicting options '''

        op = self.options

        if vault_opts:
            # Check for vault related conflicts
            if (op.ask_vault_pass and op.vault_password_files):
                self.parser.error("--ask-vault-pass and --vault-password-file are mutually exclusive")

        if vault_rekey_opts:
            if (op.new_vault_id and op.new_vault_password_file):
                self.parser.error("--new-vault-password-file and --new-vault-id are mutually exclusive")

        if runas_opts:
            # Check for privilege escalation conflicts
            if ((op.su or op.su_user) and (op.sudo or op.sudo_user) or
                    (op.su or op.su_user) and (op.become or op.become_user) or
                    (op.sudo or op.sudo_user) and (op.become or op.become_user)):

                self.parser.error("Sudo arguments ('--sudo', '--sudo-user', and '--ask-sudo-pass') and su arguments ('--su', '--su-user', and '--ask-su-pass') "
                                  "and become arguments ('--become', '--become-user', and '--ask-become-pass') are exclusive of each other")

        if fork_opts:
            if op.forks < 1:
                self.parser.error("The number of processes (--forks) must be >= 1")

    @staticmethod
    def unfrack_paths(option, opt, value, parser):
        paths = getattr(parser.values, option.dest)
        if paths is None:
            paths = []

        if isinstance(value, string_types):
            paths[:0] = [unfrackpath(x) for x in value.split(os.pathsep) if x]
        elif isinstance(value, list):
            paths[:0] = [unfrackpath(x) for x in value if x]
        else:
            pass  # FIXME: should we raise options error?

        setattr(parser.values, option.dest, paths)

    @staticmethod
    def unfrack_path(option, opt, value, parser):
        if value != '-':
            setattr(parser.values, option.dest, unfrackpath(value))
        else:
            setattr(parser.values, option.dest, value)

    @staticmethod
    def base_parser(usage="", output_opts=False, runas_opts=False, meta_opts=False, runtask_opts=False, vault_opts=False, module_opts=False,
                    async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False,
                    runas_prompt_opts=False, desc=None, basedir_opts=False, vault_rekey_opts=False):
        ''' create an options parser for most ansible scripts '''

        # base opts
        parser = SortedOptParser(usage, version=CLI.version("%prog"), description=desc, epilog=epilog)
        parser.add_option('-v', '--verbose', dest='verbosity', default=C.DEFAULT_VERBOSITY, action="count",
                          help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")

        if inventory_opts:
            parser.add_option('-i', '--inventory', '--inventory-file', dest='inventory', action="append",
                              help="specify inventory host path or comma separated host list. --inventory-file is deprecated")
            parser.add_option('--list-hosts', dest='listhosts', action='store_true',
                              help='outputs a list of matching hosts; does not execute anything else')
            parser.add_option('-l', '--limit', default=C.DEFAULT_SUBSET, dest='subset',
                              help='further limit selected hosts to an additional pattern')

        if module_opts:
            parser.add_option('-M', '--module-path', dest='module_path', default=None,
                              help="prepend colon-separated path(s) to module library (default=%s)" % C.DEFAULT_MODULE_PATH,
                              action="callback", callback=CLI.unfrack_paths, type='str')
        if runtask_opts:
            parser.add_option('-e', '--extra-vars', dest="extra_vars", action="append",
                              help="set additional variables as key=value or YAML/JSON, if filename prepend with @", default=[])

        if fork_opts:
            parser.add_option('-f', '--forks', dest='forks', default=C.DEFAULT_FORKS, type='int',
                              help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS)

        if vault_opts:
            parser.add_option('--ask-vault-pass', default=C.DEFAULT_ASK_VAULT_PASS, dest='ask_vault_pass', action='store_true',
                              help='ask for vault password')
            parser.add_option('--vault-password-file', default=[], dest='vault_password_files',
                              help="vault password file", action="callback", callback=CLI.unfrack_paths, type='string')
            parser.add_option('--vault-id', default=[], dest='vault_ids', action='append', type='string',
                              help='the vault identity to use')

        if vault_rekey_opts:
            parser.add_option('--new-vault-password-file', default=None, dest='new_vault_password_file',
                              help="new vault password file for rekey", action="callback", callback=CLI.unfrack_path, type='string')
            parser.add_option('--new-vault-id', default=None, dest='new_vault_id', type='string',
                              help='the new vault identity to use for rekey')

        if subset_opts:
            parser.add_option('-t', '--tags', dest='tags', default=C.TAGS_RUN, action='append',
                              help="only run plays and tasks tagged with these values")
            parser.add_option('--skip-tags', dest='skip_tags', default=C.TAGS_SKIP, action='append',
                              help="only run plays and tasks whose tags do not match these values")

        if output_opts:
            parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
                              help='condense output')
            parser.add_option('-t', '--tree', dest='tree', default=None,
                              help='log output to this directory')

        if connect_opts:
            connect_group = optparse.OptionGroup(parser, "Connection Options", "control as whom and how to connect to hosts")
            connect_group.add_option('-k', '--ask-pass', default=C.DEFAULT_ASK_PASS, dest='ask_pass', action='store_true',
                                     help='ask for connection password')
            connect_group.add_option('--private-key', '--key-file', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
                                     help='use this file to authenticate the connection', action="callback", callback=CLI.unfrack_path, type='string')
            connect_group.add_option('-u', '--user', default=C.DEFAULT_REMOTE_USER, dest='remote_user',
                                     help='connect as this user (default=%s)' % C.DEFAULT_REMOTE_USER)
            connect_group.add_option('-c', '--connection', dest='connection', default=C.DEFAULT_TRANSPORT,
                                     help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
            connect_group.add_option('-T', '--timeout', default=C.DEFAULT_TIMEOUT, type='int', dest='timeout',
                                     help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT)
            connect_group.add_option('--ssh-common-args', default='', dest='ssh_common_args',
                                     help="specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand)")
            connect_group.add_option('--sftp-extra-args', default='', dest='sftp_extra_args',
                                     help="specify extra arguments to pass to sftp only (e.g. -f, -l)")
            connect_group.add_option('--scp-extra-args', default='', dest='scp_extra_args',
                                     help="specify extra arguments to pass to scp only (e.g. -l)")
            connect_group.add_option('--ssh-extra-args', default='', dest='ssh_extra_args',
                                     help="specify extra arguments to pass to ssh only (e.g. -R)")

            parser.add_option_group(connect_group)

        runas_group = None
        rg = optparse.OptionGroup(parser, "Privilege Escalation Options", "control how and which user you become as on target hosts")
        if runas_opts:
            runas_group = rg
            # priv user defaults to root later on to enable detecting when this option was given here
            runas_group.add_option("-s", "--sudo", default=C.DEFAULT_SUDO, action="store_true", dest='sudo',
                                   help="run operations with sudo (nopasswd) (deprecated, use become)")
            runas_group.add_option('-U', '--sudo-user', dest='sudo_user', default=None,
                                   help='desired sudo user (default=root) (deprecated, use become)')
            runas_group.add_option('-S', '--su', default=C.DEFAULT_SU, action='store_true',
                                   help='run operations with su (deprecated, use become)')
            runas_group.add_option('-R', '--su-user', default=None,
                                   help='run operations with su as this user (default=%s) (deprecated, use become)' % C.DEFAULT_SU_USER)

            # consolidated privilege escalation (become)
            runas_group.add_option("-b", "--become", default=C.DEFAULT_BECOME, action="store_true", dest='become',
                                   help="run operations with become (does not imply password prompting)")
            runas_group.add_option('--become-method', dest='become_method', default=C.DEFAULT_BECOME_METHOD, type='choice', choices=C.BECOME_METHODS,
                                   help="privilege escalation method to use (default=%s), valid choices: [ %s ]" %
                                   (C.DEFAULT_BECOME_METHOD, ' | '.join(C.BECOME_METHODS)))
            runas_group.add_option('--become-user', default=None, dest='become_user', type='string',
                                   help='run operations as this user (default=%s)' % C.DEFAULT_BECOME_USER)

        if runas_opts or runas_prompt_opts:
            if not runas_group:
                runas_group = rg
            runas_group.add_option('--ask-sudo-pass', default=C.DEFAULT_ASK_SUDO_PASS, dest='ask_sudo_pass', action='store_true',
                                   help='ask for sudo password (deprecated, use become)')
            runas_group.add_option('--ask-su-pass', default=C.DEFAULT_ASK_SU_PASS, dest='ask_su_pass', action='store_true',
                                   help='ask for su password (deprecated, use become)')
            runas_group.add_option('-K', '--ask-become-pass', default=False, dest='become_ask_pass', action='store_true',
                                   help='ask for privilege escalation password')

        if runas_group:
            parser.add_option_group(runas_group)

        if async_opts:
            parser.add_option('-P', '--poll', default=C.DEFAULT_POLL_INTERVAL, type='int', dest='poll_interval',
                              help="set the poll interval if using -B (default=%s)" % C.DEFAULT_POLL_INTERVAL)
            parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
                              help='run asynchronously, failing after X seconds (default=N/A)')

        if check_opts:
            parser.add_option("-C", "--check", default=False, dest='check', action='store_true',
                              help="don't make any changes; instead, try to predict some of the changes that may occur")
            parser.add_option('--syntax-check', dest='syntax', action='store_true',
                              help="perform a syntax check on the playbook, but do not execute it")
            parser.add_option("-D", "--diff", default=C.DIFF_ALWAYS, dest='diff', action='store_true',
                              help="when changing (small) files and templates, show the differences in those files; works great with --check")

        if meta_opts:
            parser.add_option('--force-handlers', default=C.DEFAULT_FORCE_HANDLERS, dest='force_handlers', action='store_true',
                              help="run handlers even if a task fails")
            parser.add_option('--flush-cache', dest='flush_cache', action='store_true',
                              help="clear the fact cache for every host in inventory")

        if basedir_opts:
            parser.add_option('--playbook-dir', default=None, dest='basedir', action='store',
                              help="Since this tool does not use playbooks, use this as a subsitute playbook directory."
                                   "This sets the relative path for many features including roles/ group_vars/ etc.")
        return parser

    @abstractmethod
    def parse(self):
        """Parse the command line args

        This method parses the command line arguments.  It uses the parser
        stored in the self.parser attribute and saves the args and options in
        self.args and self.options respectively.

        Subclasses need to implement this method.  They will usually create
        a base_parser, add their own options to the base_parser, and then call
        this method to do the actual parsing.  An implementation will look
        something like this::

            def parse(self):
                parser = super(MyCLI, self).base_parser(usage="My Ansible CLI", inventory_opts=True)
                parser.add_option('--my-option', dest='my_option', action='store')
                self.parser = parser
                super(MyCLI, self).parse()
                # If some additional transformations are needed for the
                # arguments and options, do it here.
        """

        self.options, self.args = self.parser.parse_args(self.args[1:])

        # process tags
        if hasattr(self.options, 'tags') and not self.options.tags:
            # optparse defaults does not do what's expected
            self.options.tags = ['all']
        if hasattr(self.options, 'tags') and self.options.tags:
            tags = set()
            for tag_set in self.options.tags:
                for tag in tag_set.split(u','):
                    tags.add(tag.strip())
            self.options.tags = list(tags)

        # process skip_tags
        if hasattr(self.options, 'skip_tags') and self.options.skip_tags:
            skip_tags = set()
            for tag_set in self.options.skip_tags:
                for tag in tag_set.split(u','):
                    skip_tags.add(tag.strip())
            self.options.skip_tags = list(skip_tags)

        # process inventory options except for CLIs that require their own processing
        if hasattr(self.options, 'inventory') and not self.SKIP_INVENTORY_DEFAULTS:

            if self.options.inventory:

                # should always be list
                if isinstance(self.options.inventory, string_types):
                    self.options.inventory = [self.options.inventory]

                # Ensure full paths when needed
                self.options.inventory = [unfrackpath(opt, follow=False) if ',' not in opt else opt for opt in self.options.inventory]
            else:
                self.options.inventory = C.DEFAULT_HOST_LIST

    @staticmethod
    def version(prog):
        ''' return ansible version '''
        result = "{0} {1}".format(prog, __version__)
        gitinfo = CLI._gitinfo()
        if gitinfo:
            result = result + " {0}".format(gitinfo)
        result += "\n  config file = %s" % C.CONFIG_FILE
        if C.DEFAULT_MODULE_PATH is None:
            cpath = "Default w/o overrides"
        else:
            cpath = C.DEFAULT_MODULE_PATH
        result = result + "\n  configured module search path = %s" % cpath
        result = result + "\n  ansible python module location = %s" % ':'.join(ansible.__path__)
        result = result + "\n  executable location = %s" % sys.argv[0]
        result = result + "\n  python version = %s" % ''.join(sys.version.splitlines())
        return result

    @staticmethod
    def version_info(gitinfo=False):
        ''' return full ansible version info '''
        if gitinfo:
            # expensive call, user with care
            ansible_version_string = CLI.version('')
        else:
            ansible_version_string = __version__
        ansible_version = ansible_version_string.split()[0]
        ansible_versions = ansible_version.split('.')
        for counter in range(len(ansible_versions)):
            if ansible_versions[counter] == "":
                ansible_versions[counter] = 0
            try:
                ansible_versions[counter] = int(ansible_versions[counter])
            except Exception:
                pass
        if len(ansible_versions) < 3:
            for counter in range(len(ansible_versions), 3):
                ansible_versions.append(0)
        return {'string': ansible_version_string.strip(),
                'full': ansible_version,
                'major': ansible_versions[0],
                'minor': ansible_versions[1],
                'revision': ansible_versions[2]}

    @staticmethod
    def _git_repo_info(repo_path):
        ''' returns a string containing git branch, commit id and commit date '''
        result = None
        if os.path.exists(repo_path):
            # Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
            if os.path.isfile(repo_path):
                try:
                    gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
                    # There is a possibility the .git file to have an absolute path.
                    if os.path.isabs(gitdir):
                        repo_path = gitdir
                    else:
                        repo_path = os.path.join(repo_path[:-4], gitdir)
                except (IOError, AttributeError):
                    return ''
            f = open(os.path.join(repo_path, "HEAD"))
            line = f.readline().rstrip("\n")
            if line.startswith("ref:"):
                branch_path = os.path.join(repo_path, line[5:])
            else:
                branch_path = None
            f.close()
            if branch_path and os.path.exists(branch_path):
                branch = '/'.join(line.split('/')[2:])
                f = open(branch_path)
                commit = f.readline()[:10]
                f.close()
            else:
                # detached HEAD
                commit = line[:10]
                branch = 'detached HEAD'
                branch_path = os.path.join(repo_path, "HEAD")

            date = time.localtime(os.stat(branch_path).st_mtime)
            if time.daylight == 0:
                offset = time.timezone
            else:
                offset = time.altzone
            result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit, time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36))
        else:
            result = ''
        return result

    @staticmethod
    def _gitinfo():
        basedir = os.path.join(os.path.dirname(__file__), '..', '..', '..')
        repo_path = os.path.join(basedir, '.git')
        result = CLI._git_repo_info(repo_path)
        submodules = os.path.join(basedir, '.gitmodules')
        if not os.path.exists(submodules):
            return result
        f = open(submodules)
        for line in f:
            tokens = line.strip().split(' ')
            if tokens[0] == 'path':
                submodule_path = tokens[2]
                submodule_info = CLI._git_repo_info(os.path.join(basedir, submodule_path, '.git'))
                if not submodule_info:
                    submodule_info = ' not found - use git submodule update --init ' + submodule_path
                result += "\n  {0}: {1}".format(submodule_path, submodule_info)
        f.close()
        return result

    def pager(self, text):
        ''' find reasonable way to display text '''
        # this is a much simpler form of what is in pydoc.py
        if not sys.stdout.isatty():
            display.display(text, screen_only=True)
        elif 'PAGER' in os.environ:
            if sys.platform == 'win32':
                display.display(text, screen_only=True)
            else:
                self.pager_pipe(text, os.environ['PAGER'])
        else:
            p = subprocess.Popen('less --version', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            p.communicate()
            if p.returncode == 0:
                self.pager_pipe(text, 'less')
            else:
                display.display(text, screen_only=True)

    @staticmethod
    def pager_pipe(text, cmd):
        ''' pipe text through a pager '''
        if 'LESS' not in os.environ:
            os.environ['LESS'] = CLI.LESS_OPTS
        try:
            cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
            cmd.communicate(input=to_bytes(text))
        except IOError:
            pass
        except KeyboardInterrupt:
            pass

    @classmethod
    def tty_ify(cls, text):

        t = cls._ITALIC.sub("`" + r"\1" + "'", text)    # I(word) => `word'
        t = cls._BOLD.sub("*" + r"\1" + "*", t)         # B(word) => *word*
        t = cls._MODULE.sub("[" + r"\1" + "]", t)       # M(word) => [word]
        t = cls._URL.sub(r"\1", t)                      # U(word) => word
        t = cls._CONST.sub("`" + r"\1" + "'", t)        # C(word) => `word'

        return t

    @staticmethod
    def _play_prereqs(options):

        # all needs loader
        loader = DataLoader()

        basedir = getattr(options, 'basedir', False)
        if basedir:
            loader.set_basedir(basedir)

        vault_ids = options.vault_ids
        default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST
        vault_ids = default_vault_ids + vault_ids

        vault_secrets = CLI.setup_vault_secrets(loader,
                                                vault_ids=vault_ids,
                                                vault_password_files=options.vault_password_files,
                                                ask_vault_pass=options.ask_vault_pass,
                                                auto_prompt=False)
        loader.set_vault_secrets(vault_secrets)

        # create the inventory, and filter it based on the subset specified (if any)
        inventory = InventoryManager(loader=loader, sources=options.inventory)

        # create the variable manager, which will be shared throughout
        # the code, ensuring a consistent view of global variables
        variable_manager = VariableManager(loader=loader, inventory=inventory)

        if hasattr(options, 'basedir'):
            if options.basedir:
                variable_manager.safe_basedir = True
        else:
            variable_manager.safe_basedir = True

        # load vars from cli options
        variable_manager.extra_vars = load_extra_vars(loader=loader, options=options)
        variable_manager.options_vars = load_options_vars(options, CLI.version_info(gitinfo=False))

        return loader, inventory, variable_manager

    @staticmethod
    def get_host_list(inventory, subset, pattern='all'):

        no_hosts = False
        if len(inventory.list_hosts()) == 0:
            # Empty inventory
            if C.LOCALHOST_WARNING:
                display.warning("provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'")
            no_hosts = True

        inventory.subset(subset)

        hosts = inventory.list_hosts(pattern)
        if len(hosts) == 0 and no_hosts is False:
            raise AnsibleError("Specified hosts and/or --limit does not match any hosts")

        return hosts
Пример #4
0
class TerminalBase(with_metaclass(ABCMeta, object)):
    '''
    A base class for implementing cli connections

    .. note:: Unlike most of Ansible, nearly all strings in
        :class:`TerminalBase` plugins are byte strings.  This is because of
        how close to the underlying platform these plugins operate.  Remember
        to mark literal strings as byte string (``b"string"``) and to use
        :func:`~ansible.module_utils._text.to_bytes` and
        :func:`~ansible.module_utils._text.to_text` to avoid unexpected
        problems.
    '''

    #: compiled bytes regular expressions as stdout
    terminal_stdout_re = []

    #: compiled bytes regular expressions as stderr
    terminal_stderr_re = []

    #: compiled bytes regular expressions to remove ANSI codes
    ansi_re = [
        re.compile(br'(\x1b\[\?1h\x1b=)'),
        re.compile(br'\x08.')
    ]

    def __init__(self, connection):
        self._connection = connection

    def _exec_cli_command(self, cmd, check_rc=True):
        '''
        Executes the CLI command on the remote device and returns the output

        :arg cmd: Byte string command to be executed
        '''
        return self._connection.exec_command(cmd)

    def _get_prompt(self):
        """
        Returns the current prompt from the device

        :returns: A byte string of the prompt
        """
        # do not send '\n' here, exec_cli_command sends '\r' already,
        # doing so causes double prompts.
        self._exec_cli_command(b'')

        return self._connection._matched_prompt

    def on_open_shell(self):
        """Called after the SSH session is established

        This method is called right after the invoke_shell() is called from
        the Paramiko SSHClient instance.  It provides an opportunity to setup
        terminal parameters such as disbling paging for instance.
        """
        pass

    def on_close_shell(self):
        """Called before the connection is closed

        This method gets called once the connection close has been requested
        but before the connection is actually closed.  It provides an
        opportunity to clean up any terminal resources before the shell is
        actually closed
        """
        pass

    def on_become(self, passwd=None):
        """Called when privilege escalation is requested

        :kwarg passwd: String containing the password

        This method is called when the privilege is requested to be elevated
        in the play context by setting become to True.  It is the responsibility
        of the terminal plugin to actually do the privilege escalation such
        as entering `enable` mode for instance
        """
        pass

    def on_unbecome(self):
        """Called when privilege deescalation is requested

        This method is called when the privilege changed from escalated
        (become=True) to non escalated (become=False).  It is the responsibility
        of this method to actually perform the deauthorization procedure
        """
        pass

    def on_authorize(self, passwd=None):
        """Deprecated method for privilege escalation

        :kwarg passwd: String containing the password
        """
        return self.on_become(passwd)

    def on_deauthorize(self):
        """Deprecated method for privilege deescalation
        """
        return self.on_unbecome()
Пример #5
0
class AnsibleCollectionLoader(with_metaclass(Singleton, object)):
    def __init__(self, config=None):
        if config:
            self._n_configured_paths = config.get_config_value(
                'COLLECTIONS_PATHS')
        else:
            self._n_configured_paths = os.environ.get(
                'ANSIBLE_COLLECTIONS_PATHS', '').split(os.pathsep)

        if isinstance(self._n_configured_paths, string_types):
            self._n_configured_paths = [self._n_configured_paths]
        elif self._n_configured_paths is None:
            self._n_configured_paths = []

        # expand any placeholders in configured paths
        self._n_configured_paths = [
            to_native(os.path.expanduser(p), errors='surrogate_or_strict')
            for p in self._n_configured_paths
        ]

        self._n_playbook_paths = []
        self._default_collection = None
        # pre-inject grafted package maps so we can force them to use the right loader instead of potentially delegating to a "normal" loader
        for syn_pkg_def in (p for p in iteritems(_SYNTHETIC_PACKAGES)
                            if p[1].get('graft')):
            pkg_name = syn_pkg_def[0]
            pkg_def = syn_pkg_def[1]

            newmod = ModuleType(pkg_name)
            newmod.__package__ = pkg_name
            newmod.__file__ = '<ansible_synthetic_collection_package>'
            pkg_type = pkg_def.get('type')

            # TODO: need to rethink map style so we can just delegate all the loading

            if pkg_type == 'flatmap':
                newmod.__loader__ = AnsibleFlatMapLoader(
                    import_module(pkg_def['flatmap']))
            newmod.__path__ = []

            sys.modules[pkg_name] = newmod

    @property
    def n_collection_paths(self):
        return self._n_playbook_paths + self._n_configured_paths

    def get_collection_path(self, collection_name):
        if not AnsibleCollectionRef.is_valid_collection_name(collection_name):
            raise ValueError('{0} is not a valid collection name'.format(
                to_native(collection_name)))

        m = import_module('ansible_collections.{0}'.format(collection_name))

        return m.__file__

    def set_playbook_paths(self, b_playbook_paths):
        if isinstance(b_playbook_paths, string_types):
            b_playbook_paths = [b_playbook_paths]

        # track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins)
        added_paths = set()

        # de-dupe and ensure the paths are native strings (Python seems to do this for package paths etc, so assume it's safe)
        self._n_playbook_paths = [
            os.path.join(to_native(p), 'collections') for p in b_playbook_paths
            if not (p in added_paths or added_paths.add(p))
        ]
        # FIXME: only allow setting this once, or handle any necessary cache/package path invalidations internally?

    # FIXME: is there a better place to store this?
    # FIXME: only allow setting this once
    def set_default_collection(self, collection_name):
        self._default_collection = collection_name

    @property
    def default_collection(self):
        return self._default_collection

    def find_module(self, fullname, path=None):
        if self._find_module(fullname, path, load=False)[0]:
            return self

        return None

    def load_module(self, fullname):
        mod = self._find_module(fullname, None, load=True)[1]

        if not mod:
            raise ImportError('module {0} not found'.format(fullname))

        return mod

    def _find_module(self, fullname, path, load):
        # this loader is only concerned with items under the Ansible Collections namespace hierarchy, ignore others
        if not fullname.startswith(
                'ansible_collections.') and fullname != 'ansible_collections':
            return False, None

        if sys.modules.get(fullname):
            if not load:
                return True, None

            return True, sys.modules[fullname]

        newmod = None

        # this loader implements key functionality for Ansible collections
        # * implicit distributed namespace packages for the root Ansible namespace (no pkgutil.extend_path hackery reqd)
        # * implicit package support for Python 2.7 (no need for __init__.py in collections, except to use standard Py2.7 tooling)
        # * preventing controller-side code injection during collection loading
        # * (default loader would execute arbitrary package code from all __init__.py's)

        parent_pkg_name = '.'.join(fullname.split('.')[:-1])

        parent_pkg = sys.modules.get(parent_pkg_name)

        if parent_pkg_name and not parent_pkg:
            raise ImportError(
                'parent package {0} not found'.format(parent_pkg_name))

        # are we at or below the collection level? eg a.mynamespace.mycollection.something.else
        # if so, we don't want distributed namespace behavior; first mynamespace.mycollection on the path is where
        # we'll load everything from (ie, don't fall back to another mynamespace.mycollection lower on the path)
        sub_collection = fullname.count('.') > 1

        synpkg_def = _SYNTHETIC_PACKAGES.get(fullname)
        synpkg_remainder = ''

        if not synpkg_def:
            # if the parent is a grafted package, we have some special work to do, otherwise just look for stuff on disk
            parent_synpkg_def = _SYNTHETIC_PACKAGES.get(parent_pkg_name)
            if parent_synpkg_def and parent_synpkg_def.get('graft'):
                synpkg_def = parent_synpkg_def
                synpkg_remainder = '.' + fullname.rpartition('.')[2]

        # FUTURE: collapse as much of this back to on-demand as possible (maybe stub packages that get replaced when actually loaded?)
        if synpkg_def:
            pkg_type = synpkg_def.get('type')
            if not pkg_type:
                raise KeyError(
                    'invalid synthetic package type (no package "type" specified)'
                )
            if pkg_type == 'map':
                map_package = synpkg_def.get('map')

                if not map_package:
                    raise KeyError(
                        'invalid synthetic map package definition (no target "map" defined)'
                    )

                if not load:
                    return True, None

                mod = import_module(map_package + synpkg_remainder)

                sys.modules[fullname] = mod

                return True, mod
            elif pkg_type == 'flatmap':
                raise NotImplementedError()
            elif pkg_type == 'pkg_only':
                if not load:
                    return True, None

                newmod = ModuleType(fullname)
                newmod.__package__ = fullname
                newmod.__file__ = '<ansible_synthetic_collection_package>'
                newmod.__loader__ = self
                newmod.__path__ = []

                if not synpkg_def.get('allow_external_subpackages'):
                    # if external subpackages are NOT allowed, we're done
                    sys.modules[fullname] = newmod
                    return True, newmod

                # if external subpackages ARE allowed, check for on-disk implementations and return a normal
                # package if we find one, otherwise return the one we created here

        if not parent_pkg:  # top-level package, look for NS subpackages on all collection paths
            package_paths = [
                self._extend_path_with_ns(p, fullname)
                for p in self.n_collection_paths
            ]
        else:  # subpackage; search in all subpaths (we'll limit later inside a collection)
            package_paths = [
                self._extend_path_with_ns(p, fullname)
                for p in parent_pkg.__path__
            ]

        for candidate_child_path in package_paths:
            code_object = None
            is_package = True
            location = None
            # check for implicit sub-package first
            if os.path.isdir(to_bytes(candidate_child_path)):
                # Py3.x implicit namespace packages don't have a file location, so they don't support get_data
                # (which assumes the parent dir or that the loader has an internal mapping); so we have to provide
                # a bogus leaf file on the __file__ attribute for pkgutil.get_data to strip off
                location = os.path.join(candidate_child_path, '__synthetic__')
            else:
                for source_path in [
                        os.path.join(candidate_child_path, '__init__.py'),
                        candidate_child_path + '.py'
                ]:
                    if not os.path.isfile(to_bytes(source_path)):
                        continue

                    if not load:
                        return True, None

                    with open(to_bytes(source_path), 'rb') as fd:
                        source = fd.read()

                    code_object = compile(source=source,
                                          filename=source_path,
                                          mode='exec',
                                          flags=0,
                                          dont_inherit=True)
                    location = source_path
                    is_package = source_path.endswith('__init__.py')
                    break

                if not location:
                    continue

            newmod = ModuleType(fullname)
            newmod.__file__ = location
            newmod.__loader__ = self

            if is_package:
                if sub_collection:  # we never want to search multiple instances of the same collection; use first found
                    newmod.__path__ = [candidate_child_path]
                else:
                    newmod.__path__ = package_paths

                newmod.__package__ = fullname
            else:
                newmod.__package__ = parent_pkg_name

            sys.modules[fullname] = newmod

            if code_object:
                # FIXME: decide cases where we don't actually want to exec the code?
                exec(code_object, newmod.__dict__)

            return True, newmod

        # even if we didn't find one on disk, fall back to a synthetic package if we have one...
        if newmod:
            sys.modules[fullname] = newmod
            return True, newmod

        # FIXME: need to handle the "no dirs present" case for at least the root and synthetic internal collections like ansible.builtin

        return False, None

    @staticmethod
    def _extend_path_with_ns(path, ns):
        ns_path_add = ns.rsplit('.', 1)[-1]

        return os.path.join(path, ns_path_add)

    def get_data(self, filename):
        with cs_open(filename, 'rb') as fd:
            return fd.read()
Пример #6
0
class CLI(with_metaclass(ABCMeta, object)):
    ''' code behind bin/ansible* programs '''

    VALID_ACTIONS = []

    _ITALIC = re.compile(r"I\(([^)]+)\)")
    _BOLD = re.compile(r"B\(([^)]+)\)")
    _MODULE = re.compile(r"M\(([^)]+)\)")
    _URL = re.compile(r"U\(([^)]+)\)")
    _CONST = re.compile(r"C\(([^)]+)\)")

    PAGER = 'less'

    # -F (quit-if-one-screen) -R (allow raw ansi control chars)
    # -S (chop long lines) -X (disable termcap init and de-init)
    LESS_OPTS = 'FRSX'

    def __init__(self, args, callback=None):
        """
        Base init method for all command line programs
        """

        self.args = args
        self.options = None
        self.parser = None
        self.action = None
        self.callback = callback

    def set_action(self):
        """
        Get the action the user wants to execute from the sys argv list.
        """
        for i in range(0, len(self.args)):
            arg = self.args[i]
            if arg in self.VALID_ACTIONS:
                self.action = arg
                del self.args[i]
                break

        if not self.action:
            # if we're asked for help or version, we don't need an action.
            # have to use a special purpose Option Parser to figure that out as
            # the standard OptionParser throws an error for unknown options and
            # without knowing action, we only know of a subset of the options
            # that could be legal for this command
            tmp_parser = InvalidOptsParser(self.parser)
            tmp_options, tmp_args = tmp_parser.parse_args(self.args)
            if not (hasattr(tmp_options, 'help') and tmp_options.help) or (
                    hasattr(tmp_options, 'version') and tmp_options.version):
                raise AnsibleOptionsError("Missing required action")

    def execute(self):
        """
        Actually runs a child defined method using the execute_<action> pattern
        """
        fn = getattr(self, "execute_%s" % self.action)
        fn()

    @abstractmethod
    def run(self):
        """Run the ansible command

        Subclasses must implement this method.  It does the actual work of
        running an Ansible command.
        """

        display.vv(self.parser.get_version())

        if C.CONFIG_FILE:
            display.v(u"Using %s as config file" % to_text(C.CONFIG_FILE))
        else:
            display.v(u"No config file found; using defaults")

    @staticmethod
    def ask_vault_passwords():
        ''' prompt for vault password and/or password change '''

        vault_pass = None
        try:
            vault_pass = getpass.getpass(prompt="Vault password: "******"New Vault password: "******"Confirm New Vault password: "******"Passwords do not match")
        except EOFError:
            pass

        if new_vault_pass:
            new_vault_pass = to_bytes(new_vault_pass,
                                      errors='surrogate_or_strict',
                                      nonstring='simplerepr').strip()

        return new_vault_pass

    def ask_passwords(self):
        ''' prompt for connection and become passwords if needed '''

        op = self.options
        sshpass = None
        becomepass = None
        become_prompt = ''

        try:
            if op.ask_pass:
                sshpass = getpass.getpass(prompt="SSH password: "******"%s password[defaults to SSH password]: " % op.become_method.upper(
                )
                if sshpass:
                    sshpass = to_bytes(sshpass,
                                       errors='strict',
                                       nonstring='simplerepr')
            else:
                become_prompt = "%s password: "******"become" command line arguments'
                % which, '2.6')

        if self.options.become:
            pass
        elif self.options.sudo:
            self.options.become = True
            self.options.become_method = 'sudo'
            _dep('sudo')
        elif self.options.su:
            self.options.become = True
            self.options.become_method = 'su'
            _dep('su')

        # other deprecations:
        if self.options.ask_sudo_pass or self.options.sudo_user:
            _dep('sudo')
        if self.options.ask_su_pass or self.options.su_user:
            _dep('su')

    def validate_conflicts(self,
                           vault_opts=False,
                           runas_opts=False,
                           fork_opts=False):
        ''' check for conflicting options '''

        op = self.options

        if vault_opts:
            # Check for vault related conflicts
            if (op.ask_vault_pass and op.vault_password_file):
                self.parser.error(
                    "--ask-vault-pass and --vault-password-file are mutually exclusive"
                )

        if runas_opts:
            # Check for privilege escalation conflicts
            if ((op.su or op.su_user) and (op.sudo or op.sudo_user)
                    or (op.su or op.su_user) and (op.become or op.become_user)
                    or (op.sudo or op.sudo_user) and
                (op.become or op.become_user)):

                self.parser.error(
                    "Sudo arguments ('--sudo', '--sudo-user', and '--ask-sudo-pass') and su arguments ('--su', '--su-user', and '--ask-su-pass') "
                    "and become arguments ('--become', '--become-user', and '--ask-become-pass') are exclusive of each other"
                )

        if fork_opts:
            if op.forks < 1:
                self.parser.error(
                    "The number of processes (--forks) must be >= 1")

    @staticmethod
    def unfrack_paths(option, opt, value, parser):
        if isinstance(value, string_types):
            setattr(parser.values, option.dest,
                    [unfrackpath(x) for x in value.split(os.pathsep)])
        elif isinstance(value, list):
            setattr(parser.values, option.dest,
                    [unfrackpath(x) for x in value])
        else:
            pass  # FIXME: should we raise options error?

    @staticmethod
    def unfrack_path(option, opt, value, parser):
        setattr(parser.values, option.dest, unfrackpath(value))

    @staticmethod
    def base_parser(usage="",
                    output_opts=False,
                    runas_opts=False,
                    meta_opts=False,
                    runtask_opts=False,
                    vault_opts=False,
                    module_opts=False,
                    async_opts=False,
                    connect_opts=False,
                    subset_opts=False,
                    check_opts=False,
                    inventory_opts=False,
                    epilog=None,
                    fork_opts=False,
                    runas_prompt_opts=False,
                    desc=None):
        ''' create an options parser for most ansible scripts '''

        # base opts
        parser = SortedOptParser(usage,
                                 version=CLI.version("%prog"),
                                 description=desc,
                                 epilog=epilog)
        parser.add_option(
            '-v',
            '--verbose',
            dest='verbosity',
            default=C.DEFAULT_VERBOSITY,
            action="count",
            help=
            "verbose mode (-vvv for more, -vvvv to enable connection debugging)"
        )

        if inventory_opts:
            parser.add_option(
                '-i',
                '--inventory',
                '--inventory-file',
                dest='inventory',
                action="append",
                help=
                "specify inventory host path (default=[%s]) or comma separated host list. "
                "--inventory-file is deprecated" % C.DEFAULT_HOST_LIST)
            parser.add_option(
                '--list-hosts',
                dest='listhosts',
                action='store_true',
                help=
                'outputs a list of matching hosts; does not execute anything else'
            )
            parser.add_option(
                '-l',
                '--limit',
                default=C.DEFAULT_SUBSET,
                dest='subset',
                help='further limit selected hosts to an additional pattern')

        if module_opts:
            parser.add_option(
                '-M',
                '--module-path',
                dest='module_path',
                default=None,
                help="prepend path(s) to module library (default=%s)" %
                C.DEFAULT_MODULE_PATH,
                action="callback",
                callback=CLI.unfrack_path,
                type='str')
        if runtask_opts:
            parser.add_option(
                '-e',
                '--extra-vars',
                dest="extra_vars",
                action="append",
                help="set additional variables as key=value or YAML/JSON",
                default=[])

        if fork_opts:
            parser.add_option(
                '-f',
                '--forks',
                dest='forks',
                default=C.DEFAULT_FORKS,
                type='int',
                help="specify number of parallel processes to use (default=%s)"
                % C.DEFAULT_FORKS)

        if vault_opts:
            parser.add_option('--ask-vault-pass',
                              default=C.DEFAULT_ASK_VAULT_PASS,
                              dest='ask_vault_pass',
                              action='store_true',
                              help='ask for vault password')
            parser.add_option('--vault-password-file',
                              default=C.DEFAULT_VAULT_PASSWORD_FILE,
                              dest='vault_password_file',
                              help="vault password file",
                              action="callback",
                              callback=CLI.unfrack_path,
                              type='string')
            parser.add_option('--new-vault-password-file',
                              dest='new_vault_password_file',
                              help="new vault password file for rekey",
                              action="callback",
                              callback=CLI.unfrack_path,
                              type='string')
            parser.add_option(
                '--output',
                default=None,
                dest='output_file',
                help=
                'output file name for encrypt or decrypt; use - for stdout',
                action="callback",
                callback=CLI.unfrack_path,
                type='string')

        if subset_opts:
            parser.add_option(
                '-t',
                '--tags',
                dest='tags',
                default=[],
                action='append',
                help="only run plays and tasks tagged with these values")
            parser.add_option(
                '--skip-tags',
                dest='skip_tags',
                default=[],
                action='append',
                help=
                "only run plays and tasks whose tags do not match these values"
            )

        if output_opts:
            parser.add_option('-o',
                              '--one-line',
                              dest='one_line',
                              action='store_true',
                              help='condense output')
            parser.add_option('-t',
                              '--tree',
                              dest='tree',
                              default=None,
                              help='log output to this directory')

        if connect_opts:
            connect_group = optparse.OptionGroup(
                parser, "Connection Options",
                "control as whom and how to connect to hosts")
            connect_group.add_option('-k',
                                     '--ask-pass',
                                     default=C.DEFAULT_ASK_PASS,
                                     dest='ask_pass',
                                     action='store_true',
                                     help='ask for connection password')
            connect_group.add_option(
                '--private-key',
                '--key-file',
                default=C.DEFAULT_PRIVATE_KEY_FILE,
                dest='private_key_file',
                help='use this file to authenticate the connection',
                action="callback",
                callback=CLI.unfrack_path,
                type='string')
            connect_group.add_option('-u',
                                     '--user',
                                     default=C.DEFAULT_REMOTE_USER,
                                     dest='remote_user',
                                     help='connect as this user (default=%s)' %
                                     C.DEFAULT_REMOTE_USER)
            connect_group.add_option(
                '-c',
                '--connection',
                dest='connection',
                default=C.DEFAULT_TRANSPORT,
                help="connection type to use (default=%s)" %
                C.DEFAULT_TRANSPORT)
            connect_group.add_option(
                '-T',
                '--timeout',
                default=C.DEFAULT_TIMEOUT,
                type='int',
                dest='timeout',
                help="override the connection timeout in seconds (default=%s)"
                % C.DEFAULT_TIMEOUT)
            connect_group.add_option(
                '--ssh-common-args',
                default='',
                dest='ssh_common_args',
                help=
                "specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand)"
            )
            connect_group.add_option(
                '--sftp-extra-args',
                default='',
                dest='sftp_extra_args',
                help=
                "specify extra arguments to pass to sftp only (e.g. -f, -l)")
            connect_group.add_option(
                '--scp-extra-args',
                default='',
                dest='scp_extra_args',
                help="specify extra arguments to pass to scp only (e.g. -l)")
            connect_group.add_option(
                '--ssh-extra-args',
                default='',
                dest='ssh_extra_args',
                help="specify extra arguments to pass to ssh only (e.g. -R)")

            parser.add_option_group(connect_group)

        runas_group = None
        rg = optparse.OptionGroup(
            parser, "Privilege Escalation Options",
            "control how and which user you become as on target hosts")
        if runas_opts:
            runas_group = rg
            # priv user defaults to root later on to enable detecting when this option was given here
            runas_group.add_option(
                "-s",
                "--sudo",
                default=C.DEFAULT_SUDO,
                action="store_true",
                dest='sudo',
                help=
                "run operations with sudo (nopasswd) (deprecated, use become)")
            runas_group.add_option(
                '-U',
                '--sudo-user',
                dest='sudo_user',
                default=None,
                help='desired sudo user (default=root) (deprecated, use become)'
            )
            runas_group.add_option(
                '-S',
                '--su',
                default=C.DEFAULT_SU,
                action='store_true',
                help='run operations with su (deprecated, use become)')
            runas_group.add_option(
                '-R',
                '--su-user',
                default=None,
                help=
                'run operations with su as this user (default=%s) (deprecated, use become)'
                % C.DEFAULT_SU_USER)

            # consolidated privilege escalation (become)
            runas_group.add_option(
                "-b",
                "--become",
                default=C.DEFAULT_BECOME,
                action="store_true",
                dest='become',
                help=
                "run operations with become (does not imply password prompting)"
            )
            runas_group.add_option(
                '--become-method',
                dest='become_method',
                default=C.DEFAULT_BECOME_METHOD,
                type='choice',
                choices=C.BECOME_METHODS,
                help=
                "privilege escalation method to use (default=%s), valid choices: [ %s ]"
                % (C.DEFAULT_BECOME_METHOD, ' | '.join(C.BECOME_METHODS)))
            runas_group.add_option(
                '--become-user',
                default=None,
                dest='become_user',
                type='string',
                help='run operations as this user (default=%s)' %
                C.DEFAULT_BECOME_USER)

        if runas_opts or runas_prompt_opts:
            if not runas_group:
                runas_group = rg
            runas_group.add_option(
                '--ask-sudo-pass',
                default=C.DEFAULT_ASK_SUDO_PASS,
                dest='ask_sudo_pass',
                action='store_true',
                help='ask for sudo password (deprecated, use become)')
            runas_group.add_option(
                '--ask-su-pass',
                default=C.DEFAULT_ASK_SU_PASS,
                dest='ask_su_pass',
                action='store_true',
                help='ask for su password (deprecated, use become)')
            runas_group.add_option(
                '-K',
                '--ask-become-pass',
                default=False,
                dest='become_ask_pass',
                action='store_true',
                help='ask for privilege escalation password')

        if runas_group:
            parser.add_option_group(runas_group)

        if async_opts:
            parser.add_option(
                '-P',
                '--poll',
                default=C.DEFAULT_POLL_INTERVAL,
                type='int',
                dest='poll_interval',
                help="set the poll interval if using -B (default=%s)" %
                C.DEFAULT_POLL_INTERVAL)
            parser.add_option(
                '-B',
                '--background',
                dest='seconds',
                type='int',
                default=0,
                help='run asynchronously, failing after X seconds (default=N/A)'
            )

        if check_opts:
            parser.add_option(
                "-C",
                "--check",
                default=False,
                dest='check',
                action='store_true',
                help=
                "don't make any changes; instead, try to predict some of the changes that may occur"
            )
            parser.add_option(
                '--syntax-check',
                dest='syntax',
                action='store_true',
                help=
                "perform a syntax check on the playbook, but do not execute it"
            )
            parser.add_option(
                "-D",
                "--diff",
                default=False,
                dest='diff',
                action='store_true',
                help=
                "when changing (small) files and templates, show the differences in those files; works great with --check"
            )

        if meta_opts:
            parser.add_option('--force-handlers',
                              default=C.DEFAULT_FORCE_HANDLERS,
                              dest='force_handlers',
                              action='store_true',
                              help="run handlers even if a task fails")
            parser.add_option('--flush-cache',
                              dest='flush_cache',
                              action='store_true',
                              help="clear the fact cache")

        return parser

    @abstractmethod
    def parse(self):
        """Parse the command line args

        This method parses the command line arguments.  It uses the parser
        stored in the self.parser attribute and saves the args and options in
        self.args and self.options respectively.

        Subclasses need to implement this method.  They will usually create
        a base_parser, add their own options to the base_parser, and then call
        this method to do the actual parsing.  An implementation will look
        something like this::

            def parse(self):
                parser = super(MyCLI, self).base_parser(usage="My Ansible CLI", inventory_opts=True)
                parser.add_option('--my-option', dest='my_option', action='store')
                self.parser = parser
                super(MyCLI, self).parse()
                # If some additional transformations are needed for the
                # arguments and options, do it here.
        """

        self.options, self.args = self.parser.parse_args(self.args[1:])

        # process tags
        if hasattr(self.options, 'tags') and not self.options.tags:
            # optparse defaults does not do what's expected
            self.options.tags = ['all']
        if hasattr(self.options, 'tags') and self.options.tags:
            if not C.MERGE_MULTIPLE_CLI_TAGS:
                if len(self.options.tags) > 1:
                    display.deprecated(
                        'Specifying --tags multiple times on the command line currently uses the last specified value. '
                        'In 2.4, values will be merged instead.  Set merge_multiple_cli_tags=True in ansible.cfg to get this behavior now.',
                        version=2.5,
                        removed=False)
                    self.options.tags = [self.options.tags[-1]]

            tags = set()
            for tag_set in self.options.tags:
                for tag in tag_set.split(u','):
                    tags.add(tag.strip())
            self.options.tags = list(tags)

        # process skip_tags
        if hasattr(self.options, 'skip_tags') and self.options.skip_tags:
            if not C.MERGE_MULTIPLE_CLI_TAGS:
                if len(self.options.skip_tags) > 1:
                    display.deprecated(
                        'Specifying --skip-tags multiple times on the command line currently uses the last specified value. '
                        'In 2.4, values will be merged instead.  Set merge_multiple_cli_tags=True in ansible.cfg to get this behavior now.',
                        version=2.5,
                        removed=False)
                    self.options.skip_tags = [self.options.skip_tags[-1]]

            skip_tags = set()
            for tag_set in self.options.skip_tags:
                for tag in tag_set.split(u','):
                    skip_tags.add(tag.strip())
            self.options.skip_tags = list(skip_tags)

        # process inventory options
        if hasattr(self.options, 'inventory'):

            if self.options.inventory:

                # should always be list
                if isinstance(self.options.inventory, string_types):
                    self.options.inventory = [self.options.inventory]

                # Ensure full paths when needed
                self.options.inventory = [
                    unfrackpath(opt) if ',' not in opt else opt
                    for opt in self.options.inventory
                ]

            else:
                # set default if it exists
                if os.path.exists(C.DEFAULT_HOST_LIST):
                    self.options.inventory = [C.DEFAULT_HOST_LIST]

    @staticmethod
    def version(prog):
        ''' return ansible version '''
        result = "{0} {1}".format(prog, __version__)
        gitinfo = CLI._gitinfo()
        if gitinfo:
            result = result + " {0}".format(gitinfo)
        result += "\n  config file = %s" % C.CONFIG_FILE
        if C.DEFAULT_MODULE_PATH is None:
            cpath = "Default w/o overrides"
        else:
            cpath = C.DEFAULT_MODULE_PATH
        result = result + "\n  configured module search path = %s" % cpath
        result = result + "\n  ansible python module location = %s" % ':'.join(
            ansible.__path__)
        result = result + "\n  executable location = %s" % sys.argv[0]
        result = result + "\n  python version = %s" % ''.join(
            sys.version.splitlines())
        return result

    @staticmethod
    def version_info(gitinfo=False):
        ''' return full ansible version info '''
        if gitinfo:
            # expensive call, user with care
            ansible_version_string = CLI.version('')
        else:
            ansible_version_string = __version__
        ansible_version = ansible_version_string.split()[0]
        ansible_versions = ansible_version.split('.')
        for counter in range(len(ansible_versions)):
            if ansible_versions[counter] == "":
                ansible_versions[counter] = 0
            try:
                ansible_versions[counter] = int(ansible_versions[counter])
            except:
                pass
        if len(ansible_versions) < 3:
            for counter in range(len(ansible_versions), 3):
                ansible_versions.append(0)
        return {
            'string': ansible_version_string.strip(),
            'full': ansible_version,
            'major': ansible_versions[0],
            'minor': ansible_versions[1],
            'revision': ansible_versions[2]
        }

    @staticmethod
    def _git_repo_info(repo_path):
        ''' returns a string containing git branch, commit id and commit date '''
        result = None
        if os.path.exists(repo_path):
            # Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
            if os.path.isfile(repo_path):
                try:
                    gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
                    # There is a possibility the .git file to have an absolute path.
                    if os.path.isabs(gitdir):
                        repo_path = gitdir
                    else:
                        repo_path = os.path.join(repo_path[:-4], gitdir)
                except (IOError, AttributeError):
                    return ''
            f = open(os.path.join(repo_path, "HEAD"))
            line = f.readline().rstrip("\n")
            if line.startswith("ref:"):
                branch_path = os.path.join(repo_path, line[5:])
            else:
                branch_path = None
            f.close()
            if branch_path and os.path.exists(branch_path):
                branch = '/'.join(line.split('/')[2:])
                f = open(branch_path)
                commit = f.readline()[:10]
                f.close()
            else:
                # detached HEAD
                commit = line[:10]
                branch = 'detached HEAD'
                branch_path = os.path.join(repo_path, "HEAD")

            date = time.localtime(os.stat(branch_path).st_mtime)
            if time.daylight == 0:
                offset = time.timezone
            else:
                offset = time.altzone
            result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(
                branch, commit, time.strftime("%Y/%m/%d %H:%M:%S", date),
                int(offset / -36))
        else:
            result = ''
        return result

    @staticmethod
    def _gitinfo():
        basedir = os.path.join(os.path.dirname(__file__), '..', '..', '..')
        repo_path = os.path.join(basedir, '.git')
        result = CLI._git_repo_info(repo_path)
        submodules = os.path.join(basedir, '.gitmodules')
        if not os.path.exists(submodules):
            return result
        f = open(submodules)
        for line in f:
            tokens = line.strip().split(' ')
            if tokens[0] == 'path':
                submodule_path = tokens[2]
                submodule_info = CLI._git_repo_info(
                    os.path.join(basedir, submodule_path, '.git'))
                if not submodule_info:
                    submodule_info = ' not found - use git submodule update --init ' + submodule_path
                result += "\n  {0}: {1}".format(submodule_path, submodule_info)
        f.close()
        return result

    def pager(self, text):
        ''' find reasonable way to display text '''
        # this is a much simpler form of what is in pydoc.py
        if not sys.stdout.isatty():
            display.display(text, screen_only=True)
        elif 'PAGER' in os.environ:
            if sys.platform == 'win32':
                display.display(text, screen_only=True)
            else:
                self.pager_pipe(text, os.environ['PAGER'])
        else:
            p = subprocess.Popen('less --version',
                                 shell=True,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
            p.communicate()
            if p.returncode == 0:
                self.pager_pipe(text, 'less')
            else:
                display.display(text, screen_only=True)

    @staticmethod
    def pager_pipe(text, cmd):
        ''' pipe text through a pager '''
        if 'LESS' not in os.environ:
            os.environ['LESS'] = CLI.LESS_OPTS
        try:
            cmd = subprocess.Popen(cmd,
                                   shell=True,
                                   stdin=subprocess.PIPE,
                                   stdout=sys.stdout)
            cmd.communicate(input=to_bytes(text))
        except IOError:
            pass
        except KeyboardInterrupt:
            pass

    @classmethod
    def tty_ify(cls, text):

        t = cls._ITALIC.sub("`" + r"\1" + "'", text)  # I(word) => `word'
        t = cls._BOLD.sub("*" + r"\1" + "*", t)  # B(word) => *word*
        t = cls._MODULE.sub("[" + r"\1" + "]", t)  # M(word) => [word]
        t = cls._URL.sub(r"\1", t)  # U(word) => word
        t = cls._CONST.sub("`" + r"\1" + "'", t)  # C(word) => `word'

        return t

    @staticmethod
    def read_vault_password_file(vault_password_file, loader):
        """
        Read a vault password from a file or if executable, execute the script and
        retrieve password from STDOUT
        """

        this_path = os.path.realpath(os.path.expanduser(vault_password_file))
        if not os.path.exists(this_path):
            raise AnsibleError("The vault password file %s was not found" %
                               this_path)

        if loader.is_executable(this_path):
            try:
                # STDERR not captured to make it easier for users to prompt for input in their scripts
                p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
            except OSError as e:
                raise AnsibleError(
                    "Problem running vault password script %s (%s). If this is not a script, "
                    "remove the executable bit from the file." %
                    (' '.join(this_path), e))
            stdout, stderr = p.communicate()
            if p.returncode != 0:
                raise AnsibleError(
                    "Vault password script %s returned non-zero (%s): %s" %
                    (this_path, p.returncode, p.stderr))
            vault_pass = stdout.strip(b'\r\n')
        else:
            try:
                f = open(this_path, "rb")
                vault_pass = f.read().strip()
                f.close()
            except (OSError, IOError) as e:
                raise AnsibleError(
                    "Could not read vault password file %s: %s" %
                    (this_path, e))

        return vault_pass

    @staticmethod
    def _play_prereqs(options):

        # all needs loader
        loader = DataLoader()

        # vault
        b_vault_pass = None
        if options.vault_password_file:
            # read vault_pass from a file
            b_vault_pass = CLI.read_vault_password_file(
                options.vault_password_file, loader=loader)
        elif options.ask_vault_pass:
            b_vault_pass = CLI.ask_vault_passwords()

        if b_vault_pass is not None:
            loader.set_vault_password(b_vault_pass)

        # create the inventory, and filter it based on the subset specified (if any)
        inventory = InventoryManager(loader=loader, sources=options.inventory)

        # create the variable manager, which will be shared throughout
        # the code, ensuring a consistent view of global variables
        variable_manager = VariableManager(loader=loader, inventory=inventory)

        # load vars from cli options
        variable_manager.extra_vars = load_extra_vars(loader=loader,
                                                      options=options)
        variable_manager.options_vars = load_options_vars(
            options, CLI.version_info(gitinfo=False))

        return loader, inventory, variable_manager
class AnsibleCollectionConfig(with_metaclass(_AnsibleCollectionConfig)):
    pass
Пример #8
0
class AnsiblePlugin(with_metaclass(ABCMeta, object)):
    def get_option(self, option):
        return C.get_plugin_option(get_plugin_class(self), self.name, option)
Пример #9
0
class NutanixClient(with_metaclass(Singleton, object)):
    def __init__(self, module):
        super(NutanixClient, self).__init__()
        self._config = Configuration()
        self.default_url = module.params.get('default_url')
        self.user_name = module.params.get('user_name')
        self.user_password = module.params.get('user_password')
        self.verify = module.params.get('verify')
        self.validate_params(module)
        self._session = self.get_ntnx_connection_info()

    def raise_for_task_result(self, task_uuid):
        uri = '/tasks/{0}'.format(task_uuid)
        url = '{0}{1}'.format(self.default_url, uri)
        r = self._session.get(url)
        r.raise_for_status()
        task = r.json()
        if task.get('meta_response', None) is not None:
            if task.get('meta_response').get('error_code') > 0:
                raise RequestException(
                    task.get('meta_response').get('error_detail'))

    def open_url(self, method='get', uri=None, payload=dict()):
        url = '{0}{1}'.format(self.default_url, uri)
        if method == 'get':
            r = getattr(self._session, method)(url, params=payload, timeout=5)

        else:
            r = getattr(self._session, method)(url, json=payload, timeout=5)

        r.raise_for_status()
        data = r.json()
        if data.get('task_uuid', None) is not None:
            self.raise_for_task_result(data.get('task_uuid'))

        return data

    def validate_params(self, module):
        if self.default_url is None:
            if os.environ.get('NUTANIX_DEFAULT_URL'):
                self.default_url = os.environ.get('NUTANIX_DEFAULT_URL')

            elif self._config.default_url:
                self.default_url = self._config.default_url

            else:
                module.fail_json(msg='Undefined default url')

        if self.user_name is None:
            if os.environ.get('NUTANIX_USER_NAME'):
                self.user_name = os.environ.get('NUTANIX_USER_NAME')

            elif self._config.user_name:
                self.user_name = self._config.user_name

            else:
                module.fail_json(msg='Undefined user name')

        if self.user_password is None:
            if os.environ.get('NUTANIX_USER_PASSWORD'):
                self.user_password = os.environ.get('NUTANIX_USER_PASSWORD')

            elif self._config.user_password:
                self.user_password = self._config.user_password

            else:
                module.fail_json(msg='Undefined user password')

    def get_ntnx_connection_info(self):

        # Check module args for credentials, then check environment var

        headers = {'Content-Type': 'application/json'}
        s = requests.Session()
        s.auth = (self.user_name, self.user_password)
        s.verify = self.verify
        s.headers.update(headers)
        return s
Пример #10
0
class FieldAttributeBase(with_metaclass(BaseMeta, object)):
    def __init__(self):

        # initialize the data loader and variable manager, which will be provided
        # later when the object is actually loaded
        self._loader = None
        self._variable_manager = None

        # other internal params
        self._validated = False
        self._squashed = False
        self._finalized = False

        # every object gets a random uuid:
        self._uuid = get_unique_id()

        # we create a copy of the attributes here due to the fact that
        # it was initialized as a class param in the meta class, so we
        # need a unique object here (all members contained within are
        # unique already).
        self._attributes = self.__class__._attributes.copy()
        self._attr_defaults = self.__class__._attr_defaults.copy()
        for key, value in self._attr_defaults.items():
            if callable(value):
                self._attr_defaults[key] = value()

        # and init vars, avoid using defaults in field declaration as it lives across plays
        self.vars = dict()

    @property
    def finalized(self):
        return self._finalized

    def dump_me(self, depth=0):
        ''' this is never called from production code, it is here to be used when debugging as a 'complex print' '''
        if depth == 0:
            display.debug(
                "DUMPING OBJECT ------------------------------------------------------"
            )
        display.debug("%s- %s (%s, id=%s)" %
                      (" " * depth, self.__class__.__name__, self, id(self)))
        if hasattr(self, '_parent') and self._parent:
            self._parent.dump_me(depth + 2)
            dep_chain = self._parent.get_dep_chain()
            if dep_chain:
                for dep in dep_chain:
                    dep.dump_me(depth + 2)
        if hasattr(self, '_play') and self._play:
            self._play.dump_me(depth + 2)

    def preprocess_data(self, ds):
        ''' infrequently used method to do some pre-processing of legacy terms '''
        return ds

    def load_data(self, ds, variable_manager=None, loader=None):
        ''' walk the input datastructure and assign any values '''

        if ds is None:
            raise AnsibleAssertionError(
                'ds (%s) should not be None but it is.' % ds)

        # cache the datastructure internally
        setattr(self, '_ds', ds)

        # the variable manager class is used to manage and merge variables
        # down to a single dictionary for reference in templating, etc.
        self._variable_manager = variable_manager

        # the data loader class is used to parse data from strings and files
        if loader is not None:
            self._loader = loader
        else:
            self._loader = DataLoader()

        # call the preprocess_data() function to massage the data into
        # something we can more easily parse, and then call the validation
        # function on it to ensure there are no incorrect key values
        ds = self.preprocess_data(ds)
        self._validate_attributes(ds)

        # Walk all attributes in the class. We sort them based on their priority
        # so that certain fields can be loaded before others, if they are dependent.
        for name, attr in sorted(iteritems(self._valid_attrs),
                                 key=operator.itemgetter(1)):
            # copy the value over unless a _load_field method is defined
            target_name = name
            if name in self._alias_attrs:
                target_name = self._alias_attrs[name]
            if name in ds:
                method = getattr(self, '_load_%s' % name, None)
                if method:
                    self._attributes[target_name] = method(name, ds[name])
                else:
                    self._attributes[target_name] = ds[name]

        # run early, non-critical validation
        self.validate()

        # return the constructed object
        return self

    def get_ds(self):
        try:
            return getattr(self, '_ds')
        except AttributeError:
            return None

    def get_loader(self):
        return self._loader

    def get_variable_manager(self):
        return self._variable_manager

    def _post_validate_debugger(self, attr, value, templar):
        value = templar.template(value)
        valid_values = frozenset(
            ('always', 'on_failed', 'on_unreachable', 'on_skipped', 'never'))
        if value and isinstance(value,
                                string_types) and value not in valid_values:
            raise AnsibleParserError(
                "'%s' is not a valid value for debugger. Must be one of %s" %
                (value, ', '.join(valid_values)),
                obj=self.get_ds())
        return value

    def _validate_attributes(self, ds):
        '''
        Ensures that there are no keys in the datastructure which do
        not map to attributes for this object.
        '''

        valid_attrs = frozenset(self._valid_attrs.keys())
        for key in ds:
            if key not in valid_attrs:
                raise AnsibleParserError(
                    "'%s' is not a valid attribute for a %s" %
                    (key, self.__class__.__name__),
                    obj=ds)

    def validate(self, all_vars=None):
        ''' validation that is done at parse time, not load time '''
        all_vars = {} if all_vars is None else all_vars

        if not self._validated:
            # walk all fields in the object
            for (name, attribute) in iteritems(self._valid_attrs):

                if name in self._alias_attrs:
                    name = self._alias_attrs[name]

                # run validator only if present
                method = getattr(self, '_validate_%s' % name, None)
                if method:
                    method(attribute, name, getattr(self, name))
                else:
                    # and make sure the attribute is of the type it should be
                    value = self._attributes[name]
                    if value is not None:
                        if attribute.isa == 'string' and isinstance(
                                value, (list, dict)):
                            raise AnsibleParserError(
                                "The field '%s' is supposed to be a string type,"
                                " however the incoming data structure is a %s"
                                % (name, type(value)),
                                obj=self.get_ds())

        self._validated = True

    def _load_module_defaults(self, name, value):
        if value is None:
            return

        if not isinstance(value, list):
            value = [value]

        validated_module_defaults = []
        for defaults_dict in value:
            if not isinstance(defaults_dict, dict):
                raise AnsibleParserError(
                    "The field 'module_defaults' is supposed to be a dictionary or list of dictionaries, "
                    "the keys of which must be static action, module, or group names. Only the values may contain "
                    "templates. For example: {'ping': \"{{ ping_defaults }}\"}"
                )

            validated_defaults_dict = {}
            for defaults_entry, defaults in defaults_dict.items():
                # module_defaults do not use the 'collections' keyword, so actions and
                # action_groups that are not fully qualified are part of the 'ansible.legacy'
                # collection. Update those entries here, so module_defaults contains
                # fully qualified entries.
                if defaults_entry.startswith('group/'):
                    group_name = defaults_entry.split('group/')[-1]

                    # The resolved action_groups cache is associated saved on the current Play
                    if self.play is not None:
                        group_name, dummy = self._resolve_group(group_name)

                    defaults_entry = 'group/' + group_name
                    validated_defaults_dict[defaults_entry] = defaults

                else:
                    action_names = []
                    if len(defaults_entry.split('.')) < 3:
                        defaults_entry = 'ansible.legacy.' + defaults_entry

                    resolved_action = self._resolve_action(defaults_entry)
                    if resolved_action:
                        validated_defaults_dict[resolved_action] = defaults

                    # If the defaults_entry is an ansible.legacy plugin, these defaults
                    # are inheritable by the 'ansible.builtin' subset, but are not
                    # required to exist.
                    if defaults_entry.startswith('ansible.legacy.'):
                        resolved_action = self._resolve_action(
                            defaults_entry.replace('ansible.legacy.',
                                                   'ansible.builtin.'),
                            mandatory=False)
                        if resolved_action:
                            validated_defaults_dict[resolved_action] = defaults

            validated_module_defaults.append(validated_defaults_dict)

        return validated_module_defaults

    @property
    def play(self):
        if hasattr(self, '_play'):
            play = self._play
        elif hasattr(self, '_parent') and hasattr(self._parent, '_play'):
            play = self._parent._play
        else:
            play = self

        if play.__class__.__name__ != 'Play':
            # Should never happen, but handle gracefully by returning None, just in case
            return None

        return play

    def _resolve_group(self, fq_group_name, mandatory=True):
        if not AnsibleCollectionRef.is_valid_fqcr(fq_group_name):
            collection_name = 'ansible.builtin'
            fq_group_name = collection_name + '.' + fq_group_name
        else:
            collection_name = '.'.join(fq_group_name.split('.')[0:2])

        # Check if the group has already been resolved and cached
        if fq_group_name in self.play._group_actions:
            return fq_group_name, self.play._group_actions[fq_group_name]

        try:
            action_groups = _get_collection_metadata(collection_name).get(
                'action_groups', {})
        except ValueError:
            if not mandatory:
                display.vvvvv(
                    "Error loading module_defaults: could not resolve the module_defaults group %s"
                    % fq_group_name)
                return fq_group_name, []

            raise AnsibleParserError(
                "Error loading module_defaults: could not resolve the module_defaults group %s"
                % fq_group_name)

        # The collection may or may not use the fully qualified name
        # Don't fail if the group doesn't exist in the collection
        resource_name = fq_group_name.split(collection_name + '.')[-1]
        action_group = action_groups.get(fq_group_name,
                                         action_groups.get(resource_name))
        if action_group is None:
            if not mandatory:
                display.vvvvv(
                    "Error loading module_defaults: could not resolve the module_defaults group %s"
                    % fq_group_name)
                return fq_group_name, []
            raise AnsibleParserError(
                "Error loading module_defaults: could not resolve the module_defaults group %s"
                % fq_group_name)

        resolved_actions = []
        include_groups = []

        found_group_metadata = False
        for action in action_group:
            # Everything should be a string except the metadata entry
            if not isinstance(action, string_types):
                _validate_action_group_metadata(action, found_group_metadata,
                                                fq_group_name)

                if isinstance(action['metadata'], dict):
                    found_group_metadata = True

                    include_groups = action['metadata'].get('extend_group', [])
                    if isinstance(include_groups, string_types):
                        include_groups = [include_groups]
                    if not isinstance(include_groups, list):
                        # Bad entries may be a warning above, but prevent tracebacks by setting it back to the acceptable type.
                        include_groups = []
                continue

            # The collection may or may not use the fully qualified name.
            # If not, it's part of the current collection.
            if not AnsibleCollectionRef.is_valid_fqcr(action):
                action = collection_name + '.' + action
            resolved_action = self._resolve_action(action, mandatory=False)
            if resolved_action:
                resolved_actions.append(resolved_action)

        for action in resolved_actions:
            if action not in self.play._action_groups:
                self.play._action_groups[action] = []
            self.play._action_groups[action].append(fq_group_name)

        self.play._group_actions[fq_group_name] = resolved_actions

        # Resolve extended groups last, after caching the group in case they recursively refer to each other
        for include_group in include_groups:
            if not AnsibleCollectionRef.is_valid_fqcr(include_group):
                include_group_collection = collection_name
                include_group = collection_name + '.' + include_group
            else:
                include_group_collection = '.'.join(
                    include_group.split('.')[0:2])

            dummy, group_actions = self._resolve_group(include_group,
                                                       mandatory=False)

            for action in group_actions:
                if action not in self.play._action_groups:
                    self.play._action_groups[action] = []
                self.play._action_groups[action].append(fq_group_name)

            self.play._group_actions[fq_group_name].extend(group_actions)
            resolved_actions.extend(group_actions)

        return fq_group_name, resolved_actions

    def _resolve_action(self, action_name, mandatory=True):
        context = action_loader.find_plugin_with_context(action_name)
        if not context.resolved:
            context = module_loader.find_plugin_with_context(action_name)

        if context.resolved:
            return context.resolved_fqcn
        if mandatory:
            raise AnsibleParserError(
                "Could not resolve action %s in module_defaults" % action_name)
        display.vvvvv("Could not resolve action %s in module_defaults" %
                      action_name)

    def squash(self):
        '''
        Evaluates all attributes and sets them to the evaluated version,
        so that all future accesses of attributes do not need to evaluate
        parent attributes.
        '''
        if not self._squashed:
            for name in self._valid_attrs.keys():
                self._attributes[name] = getattr(self, name)
            self._squashed = True

    def copy(self):
        '''
        Create a copy of this object and return it.
        '''

        try:
            new_me = self.__class__()
        except RuntimeError as e:
            raise AnsibleError(
                "Exceeded maximum object depth. This may have been caused by excessive role recursion",
                orig_exc=e)

        for name in self._valid_attrs.keys():
            if name in self._alias_attrs:
                continue
            new_me._attributes[name] = shallowcopy(self._attributes[name])
            new_me._attr_defaults[name] = shallowcopy(
                self._attr_defaults[name])

        new_me._loader = self._loader
        new_me._variable_manager = self._variable_manager
        new_me._validated = self._validated
        new_me._finalized = self._finalized
        new_me._uuid = self._uuid

        # if the ds value was set on the object, copy it to the new copy too
        if hasattr(self, '_ds'):
            new_me._ds = self._ds

        return new_me

    def get_validated_value(self, name, attribute, value, templar):
        if attribute.isa == 'string':
            value = to_text(value)
        elif attribute.isa == 'int':
            value = int(value)
        elif attribute.isa == 'float':
            value = float(value)
        elif attribute.isa == 'bool':
            value = boolean(value, strict=True)
        elif attribute.isa == 'percent':
            # special value, which may be an integer or float
            # with an optional '%' at the end
            if isinstance(value, string_types) and '%' in value:
                value = value.replace('%', '')
            value = float(value)
        elif attribute.isa == 'list':
            if value is None:
                value = []
            elif not isinstance(value, list):
                value = [value]
            if attribute.listof is not None:
                for item in value:
                    if not isinstance(item, attribute.listof):
                        raise AnsibleParserError(
                            "the field '%s' should be a list of %s, "
                            "but the item '%s' is a %s" %
                            (name, attribute.listof, item, type(item)),
                            obj=self.get_ds())
                    elif attribute.required and attribute.listof == string_types:
                        if item is None or item.strip() == "":
                            raise AnsibleParserError(
                                "the field '%s' is required, and cannot have empty values"
                                % (name, ),
                                obj=self.get_ds())
        elif attribute.isa == 'set':
            if value is None:
                value = set()
            elif not isinstance(value, (list, set)):
                if isinstance(value, string_types):
                    value = value.split(',')
                else:
                    # Making a list like this handles strings of
                    # text and bytes properly
                    value = [value]
            if not isinstance(value, set):
                value = set(value)
        elif attribute.isa == 'dict':
            if value is None:
                value = dict()
            elif not isinstance(value, dict):
                raise TypeError("%s is not a dictionary" % value)
        elif attribute.isa == 'class':
            if not isinstance(value, attribute.class_type):
                raise TypeError("%s is not a valid %s (got a %s instead)" %
                                (name, attribute.class_type, type(value)))
            value.post_validate(templar=templar)
        return value

    def post_validate(self, templar):
        '''
        we can't tell that everything is of the right type until we have
        all the variables.  Run basic types (from isa) as well as
        any _post_validate_<foo> functions.
        '''

        # save the omit value for later checking
        omit_value = templar.available_variables.get('omit')

        for (name, attribute) in iteritems(self._valid_attrs):

            if attribute.static:
                value = getattr(self, name)

                # we don't template 'vars' but allow template as values for later use
                if name not in ('vars', ) and templar.is_template(value):
                    display.warning(
                        '"%s" is not templatable, but we found: %s, '
                        'it will not be templated and will be used "as is".' %
                        (name, value))
                continue

            if getattr(self, name) is None:
                if not attribute.required:
                    continue
                else:
                    raise AnsibleParserError(
                        "the field '%s' is required but was not set" % name)
            elif not attribute.always_post_validate and self.__class__.__name__ not in (
                    'Task', 'Handler', 'PlayContext'):
                # Intermediate objects like Play() won't have their fields validated by
                # default, as their values are often inherited by other objects and validated
                # later, so we don't want them to fail out early
                continue

            try:
                # Run the post-validator if present. These methods are responsible for
                # using the given templar to template the values, if required.
                method = getattr(self, '_post_validate_%s' % name, None)
                if method:
                    value = method(attribute, getattr(self, name), templar)
                elif attribute.isa == 'class':
                    value = getattr(self, name)
                else:
                    # if the attribute contains a variable, template it now
                    value = templar.template(getattr(self, name))

                # if this evaluated to the omit value, set the value back to
                # the default specified in the FieldAttribute and move on
                if omit_value is not None and value == omit_value:
                    if callable(attribute.default):
                        setattr(self, name, attribute.default())
                    else:
                        setattr(self, name, attribute.default)
                    continue

                # and make sure the attribute is of the type it should be
                if value is not None:
                    value = self.get_validated_value(name, attribute, value,
                                                     templar)

                # and assign the massaged value back to the attribute field
                setattr(self, name, value)
            except (TypeError, ValueError) as e:
                value = getattr(self, name)
                raise AnsibleParserError(
                    "the field '%s' has an invalid value (%s), and could not be converted to an %s."
                    "The error was: %s" % (name, value, attribute.isa, e),
                    obj=self.get_ds(),
                    orig_exc=e)
            except (AnsibleUndefinedVariable, UndefinedError) as e:
                if templar._fail_on_undefined_errors and name != 'name':
                    if name == 'args':
                        msg = "The task includes an option with an undefined variable. The error was: %s" % (
                            to_native(e))
                    else:
                        msg = "The field '%s' has an invalid value, which includes an undefined variable. The error was: %s" % (
                            name, to_native(e))
                    raise AnsibleParserError(msg,
                                             obj=self.get_ds(),
                                             orig_exc=e)

        self._finalized = True

    def _load_vars(self, attr, ds):
        '''
        Vars in a play can be specified either as a dictionary directly, or
        as a list of dictionaries. If the later, this method will turn the
        list into a single dictionary.
        '''
        def _validate_variable_keys(ds):
            for key in ds:
                if not isidentifier(key):
                    raise TypeError("'%s' is not a valid variable name" % key)

        try:
            if isinstance(ds, dict):
                _validate_variable_keys(ds)
                return combine_vars(self.vars, ds)
            elif isinstance(ds, list):
                all_vars = self.vars
                for item in ds:
                    if not isinstance(item, dict):
                        raise ValueError
                    _validate_variable_keys(item)
                    all_vars = combine_vars(all_vars, item)
                return all_vars
            elif ds is None:
                return {}
            else:
                raise ValueError
        except ValueError as e:
            raise AnsibleParserError(
                "Vars in a %s must be specified as a dictionary, or a list of dictionaries"
                % self.__class__.__name__,
                obj=ds,
                orig_exc=e)
        except TypeError as e:
            raise AnsibleParserError(
                "Invalid variable name in vars specified for %s: %s" %
                (self.__class__.__name__, e),
                obj=ds,
                orig_exc=e)

    def _extend_value(self, value, new_value, prepend=False):
        '''
        Will extend the value given with new_value (and will turn both
        into lists if they are not so already). The values are run through
        a set to remove duplicate values.
        '''

        if not isinstance(value, list):
            value = [value]
        if not isinstance(new_value, list):
            new_value = [new_value]

        # Due to where _extend_value may run for some attributes
        # it is possible to end up with Sentinel in the list of values
        # ensure we strip them
        value = [v for v in value if v is not Sentinel]
        new_value = [v for v in new_value if v is not Sentinel]

        if prepend:
            combined = new_value + value
        else:
            combined = value + new_value

        return [i for i, _ in itertools.groupby(combined) if i is not None]

    def dump_attrs(self):
        '''
        Dumps all attributes to a dictionary
        '''
        attrs = {}
        for (name, attribute) in iteritems(self._valid_attrs):
            attr = getattr(self, name)
            if attribute.isa == 'class' and hasattr(attr, 'serialize'):
                attrs[name] = attr.serialize()
            else:
                attrs[name] = attr
        return attrs

    def from_attrs(self, attrs):
        '''
        Loads attributes from a dictionary
        '''
        for (attr, value) in iteritems(attrs):
            if attr in self._valid_attrs:
                attribute = self._valid_attrs[attr]
                if attribute.isa == 'class' and isinstance(value, dict):
                    obj = attribute.class_type()
                    obj.deserialize(value)
                    setattr(self, attr, obj)
                else:
                    setattr(self, attr, value)

        # from_attrs is only used to create a finalized task
        # from attrs from the Worker/TaskExecutor
        # Those attrs are finalized and squashed in the TE
        # and controller side use needs to reflect that
        self._finalized = True
        self._squashed = True

    def serialize(self):
        '''
        Serializes the object derived from the base object into
        a dictionary of values. This only serializes the field
        attributes for the object, so this may need to be overridden
        for any classes which wish to add additional items not stored
        as field attributes.
        '''

        repr = self.dump_attrs()

        # serialize the uuid field
        repr['uuid'] = self._uuid
        repr['finalized'] = self._finalized
        repr['squashed'] = self._squashed

        return repr

    def deserialize(self, data):
        '''
        Given a dictionary of values, load up the field attributes for
        this object. As with serialize(), if there are any non-field
        attribute data members, this method will need to be overridden
        and extended.
        '''

        if not isinstance(data, dict):
            raise AnsibleAssertionError(
                'data (%s) should be a dict but is a %s' % (data, type(data)))

        for (name, attribute) in iteritems(self._valid_attrs):
            if name in data:
                setattr(self, name, data[name])
            else:
                if callable(attribute.default):
                    setattr(self, name, attribute.default())
                else:
                    setattr(self, name, attribute.default)

        # restore the UUID field
        setattr(self, '_uuid', data.get('uuid'))
        self._finalized = data.get('finalized', False)
        self._squashed = data.get('squashed', False)
Пример #11
0
class LookupBase(with_metaclass(ABCMeta, object)):
    def __init__(self, loader=None, templar=None, **kwargs):
        self._loader = loader
        self._templar = templar
        # Backwards compat: self._display isn't really needed, just import the global display and use that.
        self._display = display

    def get_basedir(self, variables):
        if 'role_path' in variables:
            return variables['role_path']
        else:
            return self._loader.get_basedir()

    @staticmethod
    def _flatten(terms):
        ret = []
        for term in terms:
            if isinstance(term, (list, tuple)):
                ret.extend(term)
            else:
                ret.append(term)
        return ret

    @staticmethod
    def _combine(a, b):
        results = []
        for x in a:
            for y in b:
                results.append(LookupBase._flatten([x, y]))
        return results

    @staticmethod
    def _flatten_hash_to_list(terms):
        ret = []
        for key in terms:
            ret.append({'key': key, 'value': terms[key]})
        return ret

    @abstractmethod
    def run(self, terms, variables=None, **kwargs):
        """
        When the playbook specifies a lookup, this method is run.  The
        arguments to the lookup become the arguments to this method.  One
        additional keyword argument named ``variables`` is added to the method
        call.  It contains the variables available to ansible at the time the
        lookup is templated.  For instance::

            "{{ lookup('url', 'https://toshio.fedorapeople.org/one.txt', validate_certs=True) }}"

        would end up calling the lookup plugin named url's run method like this::
            run(['https://toshio.fedorapeople.org/one.txt'], variables=available_variables, validate_certs=True)

        Lookup plugins can be used within playbooks for looping.  When this
        happens, the first argument is a list containing the terms.  Lookup
        plugins can also be called from within playbooks to return their
        values into a variable or parameter.  If the user passes a string in
        this case, it is converted into a list.

        Errors encountered during execution should be returned by raising
        AnsibleError() with a message describing the error.

        Any strings returned by this method that could ever contain non-ascii
        must be converted into python's unicode type as the strings will be run
        through jinja2 which has this requirement.  You can use::

            from ansible.module_utils._text import to_text
            result_string = to_text(result_string)
        """
        pass

    def find_file_in_search_path(self,
                                 myvars,
                                 subdir,
                                 needle,
                                 ignore_missing=False):
        '''
        Return a file (needle) in the task's expected search path.
        '''

        if 'ansible_search_path' in myvars:
            paths = myvars['ansible_search_path']
        else:
            paths = self.get_basedir(myvars)

        result = self._loader.path_dwim_relative_stack(paths, subdir, needle)
        if result is None and not ignore_missing:
            self._display.warning("Unable to find '%s' in expected paths." %
                                  needle)

        return result
Пример #12
0
class CliconfBase(with_metaclass(ABCMeta, object)):
    """
    A base class for implementing cli connections

    .. note:: Unlike most of Ansible, nearly all strings in
        :class:`CliconfBase` plugins are byte strings.  This is because of
        how close to the underlying platform these plugins operate.  Remember
        to mark literal strings as byte string (``b"string"``) and to use
        :func:`~ansible.module_utils._text.to_bytes` and
        :func:`~ansible.module_utils._text.to_text` to avoid unexpected
        problems.

    List of supported rpc's:
        :get_config: Retrieves the specified configuration from the device
        :edit_config: Loads the specified commands into the remote device
        :get: Execute specified command on remote device
        :get_capabilities: Retrieves device information and supported rpc methods
        :commit: Load configuration from candidate to running
        :discard_changes: Discard changes to candidate datastore

    Note: List of supported rpc's for remote device can be extracted from
          output of get_capabilities()

    :returns: Returns output received from remote device as byte string

            Usage:
            from ansible.module_utils.connection import Connection

            conn = Connection()
            conn.get('show lldp neighbors detail'')
            conn.get_config('running')
            conn.edit_config(['hostname test', 'netconf ssh'])
    """
    def __init__(self, connection):
        self._connection = connection

    def _alarm_handler(self, signum, frame):
        raise AnsibleConnectionFailure(
            'timeout waiting for command to complete')

    def send_command(self, command, prompt=None, answer=None, sendonly=False):
        """Executes a cli command and returns the results
        This method will execute the CLI command on the connection and return
        the results to the caller.  The command output will be returned as a
        string
        """
        timeout = self._connection._play_context.timeout or 30
        signal.signal(signal.SIGALRM, self._alarm_handler)
        signal.alarm(timeout)
        display.display("command: %s" % command, log_only=True)
        resp = self._connection.send(command, prompt, answer, sendonly)
        signal.alarm(0)
        return resp

    def get_prompt(self):
        """Returns the current prompt from the device"""
        return self._connection._matched_prompt

    def get_base_rpc(self):
        """Returns list of base rpc method supported by remote device"""
        return ['get_config', 'edit_config', 'get_capabilities', 'get']

    @abstractmethod
    def get_config(self, source='running', format='text'):
        """Retrieves the specified configuration from the device
        This method will retrieve the configuration specified by source and
        return it to the caller as a string.  Subsequent calls to this method
        will retrieve a new configuration from the device
        :args:
            arg[0] source: Datastore from which configuration should be retrieved eg: running/candidate/startup. (optional)
                           default is running.
            arg[1] format: Output format in which configuration is retrieved
                           Note: Specified datastore should be supported by remote device.
        :kwargs:
          Keywords supported
            :command: the command string to execute
            :source: Datastore from which configuration should be retrieved
            :format: Output format in which configuration is retrieved
        :returns: Returns output received from remote device as byte string
        """
        pass

    @abstractmethod
    def edit_config(self, commands):
        """Loads the specified commands into the remote device
        This method will load the commands into the remote device.  This
        method will make sure the device is in the proper context before
        send the commands (eg config mode)
        :args:
            arg[0] command: List of configuration commands
        :kwargs:
          Keywords supported
            :command: the command string to execute
        :returns: Returns output received from remote device as byte string
        """
        pass

    @abstractmethod
    def get(self, *args, **kwargs):
        """Execute specified command on remote device
        This method will retrieve the specified data and
        return it to the caller as a string.
        :args:
             arg[0] command: command in string format to be executed on remote device
             arg[1] prompt: the expected prompt generated by executing command.
                            This can be a string or a list of strings (optional)
             arg[2] answer: the string to respond to the prompt with (optional)
             arg[3] sendonly: bool to disable waiting for response, default is false (optional)
        :kwargs:
            :command: the command string to execute
            :prompt: the expected prompt generated by executing command.
                     This can be a string or a list of strings
            :answer: the string to respond to the prompt with
            :sendonly: bool to disable waiting for response
        :returns: Returns output received from remote device as byte string
        """
        pass

    @abstractmethod
    def get_capabilities(self):
        """Retrieves device information and supported
        rpc methods by device platform and return result
        as a string
        :returns: Returns output received from remote device as byte string
        """
        pass

    def commit(self, comment=None):
        """Commit configuration changes"""
        return self._connection.method_not_found(
            "commit is not supported by network_os %s" %
            self._play_context.network_os)

    def discard_changes(self):
        "Discard changes in candidate datastore"
        return self._connection.method_not_found(
            "discard_changes is not supported by network_os %s" %
            self._play_context.network_os)

    def put_file(self, source, destination):
        """Copies file over scp to remote device"""
        pass

    def fetch_file(self, source, destination):
        """Fetch file over scp from remote device"""
        pass
Пример #13
0
class YumDnf(with_metaclass(ABCMeta, object)):
    """
    Abstract class that handles the population of instance variables that should
    be identical between both YUM and DNF modules because of the feature parity
    and shared argument spec
    """
    def __init__(self, module):

        self.module = module

        self.allow_downgrade = self.module.params['allow_downgrade']
        self.autoremove = self.module.params['autoremove']
        self.bugfix = self.module.params['bugfix']
        self.conf_file = self.module.params['conf_file']
        self.disable_excludes = self.module.params['disable_excludes']
        self.disable_gpg_check = self.module.params['disable_gpg_check']
        self.disable_plugin = self.module.params['disable_plugin']
        self.disablerepo = self.module.params.get('disablerepo', [])
        self.download_only = self.module.params['download_only']
        self.enable_plugin = self.module.params['enable_plugin']
        self.enablerepo = self.module.params.get('enablerepo', [])
        self.exclude = self.module.params['exclude']
        self.installroot = self.module.params['installroot']
        self.install_repoquery = self.module.params['install_repoquery']
        self.list = self.module.params['list']
        self.names = [p.strip() for p in self.module.params['name']]
        self.releasever = self.module.params['releasever']
        self.security = self.module.params['security']
        self.skip_broken = self.module.params['skip_broken']
        self.state = self.module.params['state']
        self.update_only = self.module.params['update_only']
        self.update_cache = self.module.params['update_cache']
        self.validate_certs = self.module.params['validate_certs']

        # It's possible someone passed a comma separated string since it used
        # to be a string type, so we should handle that
        self.names = self.listify_comma_sep_strings_in_list(self.names)
        self.disablerepo = self.listify_comma_sep_strings_in_list(
            self.disablerepo)
        self.enablerepo = self.listify_comma_sep_strings_in_list(
            self.enablerepo)
        self.exclude = self.listify_comma_sep_strings_in_list(self.exclude)

    def listify_comma_sep_strings_in_list(self, some_list):
        """
        method to accept a list of strings as the parameter, find any strings
        in that list that are comma separated, remove them from the list and add
        their comma separated elements to the original list
        """
        new_list = []
        remove_from_original_list = []
        for element in some_list:
            if ',' in element:
                remove_from_original_list.append(element)
                new_list.extend([e.strip() for e in element.split(',')])

        for element in remove_from_original_list:
            some_list.remove(element)

        some_list.extend(new_list)

        return some_list

    @abstractmethod
    def run(self):
        raise NotImplementedError
Пример #14
0
class CliconfBase(with_metaclass(ABCMeta, object)):
    """
    A base class for implementing cli connections

    .. note:: String inputs to :meth:`send_command` will be cast to byte strings
         within this method and as such are not required to be made byte strings
         beforehand.  Please avoid using literal byte strings (``b'string'``) in
         :class:`CliConfBase` plugins as this can lead to unexpected errors when
         running on Python 3

    List of supported rpc's:
        :get_config: Retrieves the specified configuration from the device
        :edit_config: Loads the specified commands into the remote device
        :get: Execute specified command on remote device
        :get_capabilities: Retrieves device information and supported rpc methods
        :commit: Load configuration from candidate to running
        :discard_changes: Discard changes to candidate datastore

    Note: List of supported rpc's for remote device can be extracted from
          output of get_capabilities()

    :returns: Returns output received from remote device as byte string

            Usage:
            from ansible.module_utils.connection import Connection

            conn = Connection()
            conn.get('show lldp neighbors detail'')
            conn.get_config('running')
            conn.edit_config(['hostname test', 'netconf ssh'])
    """

    __rpc__ = [
        'get_config', 'edit_config', 'get_capabilities', 'get',
        'enable_response_logging', 'disable_response_logging'
    ]

    def __init__(self, connection):
        self._connection = connection
        self.history = list()
        self.response_logging = False

    def _alarm_handler(self, signum, frame):
        """Alarm handler raised in case of command timeout """
        display.display('closing shell due to command timeout (%s seconds).' %
                        self._connection._play_context.timeout,
                        log_only=True)
        self.close()

    def send_command(self,
                     command,
                     prompt=None,
                     answer=None,
                     sendonly=False,
                     newline=True,
                     prompt_retry_check=False):
        """Executes a command over the device connection

        This method will execute a command over the device connection and
        return the results to the caller.  This method will also perform
        logging of any commands based on the `nolog` argument.

        :param command: The command to send over the connection to the device
        :param prompt: A regex pattern to evalue the expected prompt from the command
        :param answer: The answer to respond with if the prompt is matched.
        :param sendonly: Bool value that will send the command but not wait for a result.
        :param newline: Bool value that will append the newline character to the command
        :param prompt_retry_check: Bool value for trying to detect more prompts

        :returns: The output from the device after executing the command
        """
        kwargs = {
            'command': to_bytes(command),
            'sendonly': sendonly,
            'newline': newline,
            'prompt_retry_check': prompt_retry_check
        }

        if prompt is not None:
            kwargs['prompt'] = to_bytes(prompt)
        if answer is not None:
            kwargs['answer'] = to_bytes(answer)

        resp = self._connection.send(**kwargs)

        if not self.response_logging:
            self.history.append(('*****', '*****'))
        else:
            self.history.append((kwargs['command'], resp))

        return resp

    def get_base_rpc(self):
        """Returns list of base rpc method supported by remote device"""
        return self.__rpc__

    def get_history(self):
        """ Returns the history file for all commands

        This will return a log of all the commands that have been sent to
        the device and all of the output received.  By default, all commands
        and output will be redacted unless explicitly configured otherwise.

        :return: An ordered list of command, output pairs
        """
        return self.history

    def reset_history(self):
        """ Resets the history of run commands
        :return: None
        """
        self.history = list()

    def enable_response_logging(self):
        """Enable logging command response"""
        self.response_logging = True

    def disable_response_logging(self):
        """Disable logging command response"""
        self.response_logging = False

    @abstractmethod
    def get_config(self, source='running', filter=None, format='text'):
        """Retrieves the specified configuration from the device

        This method will retrieve the configuration specified by source and
        return it to the caller as a string.  Subsequent calls to this method
        will retrieve a new configuration from the device

        :param source: The configuration source to return from the device.
            This argument accepts either `running` or `startup` as valid values.

        :param filter: For devices that support configuration filtering, this
            keyword argument is used to filter the returned configuration.
            The use of this keyword argument is device dependent adn will be
            silently ignored on devices that do not support it.

        :param format: For devices that support fetching different configuration
            format, this keyword argument is used to specify the format in which
            configuration is to be retrieved.

        :return: The device configuration as specified by the source argument.
        """
        pass

    @abstractmethod
    def edit_config(self, candidate, check_mode=False, replace=None):
        """Loads the candidate configuration into the network device

        This method will load the specified candidate config into the device
        and merge with the current configuration unless replace is set to
        True.  If the device does not support config replace an errors
        is returned.

        :param candidate: The configuration to load into the device and merge
            with the current running configuration

        :param check_mode: Boolean value that indicates if the device candidate
            configuration should be  pushed in the running configuration or discarded.

        :param replace: Specifies the way in which provided config value should replace
            the configuration running on the remote device. If the device
            doesn't support config replace, an error is return.

        :return: Returns response of executing the configuration command received
             from remote host
        """
        pass

    @abstractmethod
    def get(self,
            command,
            prompt=None,
            answer=None,
            sendonly=False,
            newline=True):
        """Execute specified command on remote device
        This method will retrieve the specified data and
        return it to the caller as a string.
        :param command: command in string format to be executed on remote device
        :param prompt: the expected prompt generated by executing command, this can
                       be a string or a list of strings
        :param answer: the string to respond to the prompt with
        :param sendonly: bool to disable waiting for response, default is false
        :param newline: bool to indicate if newline should be added at end of answer or not
        :return:
        """
        pass

    @abstractmethod
    def get_capabilities(self):
        """Returns the basic capabilities of the network device
        This method will provide some basic facts about the device and
        what capabilities it has to modify the configuration.  The minimum
        return from this method takes the following format.
        eg:
            {

                'rpc': [list of supported rpcs],
                'network_api': <str>,            # the name of the transport
                'device_info': {
                    'network_os': <str>,
                    'network_os_version': <str>,
                    'network_os_model': <str>,
                    'network_os_hostname': <str>,
                    'network_os_image': <str>,
                    'network_os_platform': <str>,
                },
                'device_operations': {
                    'supports_replace': <bool>,            # identify if config should be merged or replaced is supported
                    'supports_commit': <bool>,             # identify if commit is supported by device or not
                    'supports_rollback': <bool>,           # identify if rollback is supported or not
                    'supports_defaults': <bool>,           # identify if fetching running config with default is supported
                    'supports_commit_comment': <bool>,     # identify if adding comment to commit is supported of not
                    'supports_onbox_diff: <bool>,          # identify if on box diff capability is supported or not
                    'supports_generate_diff: <bool>,       # identify if diff capability is supported within plugin
                    'supports_multiline_delimiter: <bool>, # identify if multiline demiliter is supported within config
                    'support_match: <bool>,                # identify if match is supported
                    'support_diff_ignore_lines: <bool>,    # identify if ignore line in diff is supported
                }
                'format': [list of supported configuration format],
                'match': ['line', 'strict', 'exact', 'none'],
                'replace': ['line', 'block', 'config'],
            }
        :return: capability as json string
        """
        pass

    def commit(self, comment=None):
        """Commit configuration changes

        This method will perform the commit operation on a previously loaded
        candidate configuration that was loaded using `edit_config()`.  If
        there is a candidate configuration, it will be committed to the
        active configuration.  If there is not a candidate configuration, this
        method should just silently return.

        :return: None
        """
        return self._connection.method_not_found(
            "commit is not supported by network_os %s" %
            self._play_context.network_os)

    def discard_changes(self):
        """Discard candidate configuration

        This method will discard the current candidate configuration if one
        is present.  If there is no candidate configuration currently loaded,
        then this method should just silently return

        :returns: None
        """
        return self._connection.method_not_found(
            "discard_changes is not supported by network_os %s" %
            self._play_context.network_os)

    def copy_file(self,
                  source=None,
                  destination=None,
                  proto='scp',
                  timeout=30):
        """Copies file over scp/sftp to remote device

        :param source: Source file path
        :param destination: Destination file path on remote device
        :param proto: Protocol to be used for file transfer,
                      supported protocol: scp and sftp
        :param timeout: Specifies the wait time to receive response from
                        remote host before triggering timeout exception
        :return: None
        """
        ssh = self._connection.paramiko_conn._connect_uncached()
        if proto == 'scp':
            if not HAS_SCP:
                raise AnsibleError(
                    "Required library scp is not installed.  Please install it using `pip install scp`"
                )
            with SCPClient(ssh.get_transport(), socket_timeout=timeout) as scp:
                out = scp.put(source, destination)
        elif proto == 'sftp':
            with ssh.open_sftp() as sftp:
                sftp.put(source, destination)

    def get_file(self, source=None, destination=None, proto='scp', timeout=30):
        """Fetch file over scp/sftp from remote device
        :param source: Source file path
        :param destination: Destination file path
        :param proto: Protocol to be used for file transfer,
                      supported protocol: scp and sftp
        :param timeout: Specifies the wait time to receive response from
                        remote host before triggering timeout exception
        :return: None
        """
        """Fetch file over scp/sftp from remote device"""
        ssh = self._connection.paramiko_conn._connect_uncached()
        if proto == 'scp':
            if not HAS_SCP:
                raise AnsibleError(
                    "Required library scp is not installed.  Please install it using `pip install scp`"
                )
            with SCPClient(ssh.get_transport(), socket_timeout=timeout) as scp:
                scp.get(source, destination)
        elif proto == 'sftp':
            with ssh.open_sftp() as sftp:
                sftp.get(source, destination)
Пример #15
0
class F5BaseObject(with_metaclass(ABCMeta)):
    """Base abstract class for all F5 objects

    It represents an F5 resource configurable by Ansible.
    """
    def __init__(self, **kwargs):
        """Prepare the parameters needed by this module."""
        super(F5BaseObject, self).__init__()

        # Required params
        self._required_create_params = set()
        self._required_load_params = set()
        self._required_update_params = set()

        # Store and remove BIG-IP and Ansible params
        self._provider = {
            'f5_hostname': kwargs.pop('f5_hostname', None),
            'f5_username': kwargs.pop('f5_username', None),
            'f5_password': kwargs.pop('f5_password', None),
            'f5_port': kwargs.pop('f5_port', None),
            'f5_verify': kwargs.pop('f5_verify', None)
        }
        self._state = kwargs.pop('state', None)
        self._check_mode = kwargs.pop('check_mode', None)
        self._tr = kwargs.pop('tr', None)

        # Change Snake to Camel naming convention of the params that are sent to the module
        self._params = change_dict_naming_convention(kwargs, snake_to_camel)

        # Set CRUD methods
        self._methods = {}
        self._set_crud_methods()

        # Translate conflictual params (eg 'state')
        if self._tr is not None:
            for k, v in iteritems(self._tr):
                kc = snake_to_camel(k)
                if kc in self._params:
                    self._params[snake_to_camel(v)] = self._params[kc]
                    del self._params[kc]

        # Call property objects
        for k in self._params:
            try:
                ks = camel_to_snake(k)
                if getattr(self, ks) is not None:
                    self._params[k] = getattr(self, ks)
            except AttributeError:
                pass

        # The object
        self._obj = None

    @abstractmethod
    def _set_crud_methods(self):
        """Set the CRUD methods for this object.

        Any class inheriting from F5BaseObject should implement and override this method.
        """
        pass

    def _check_create_params(self):
        """Params given to create should satisfy required params."""
        check = missing_required_params(self._required_create_params,
                                        self._params)
        if check:
            raise AnsibleF5Error("Missing required create params: %s" % check)

    def _check_load_params(self):
        """Params given to load should satisfy required params."""
        check = missing_required_params(self._required_load_params,
                                        self._params)
        if check:
            raise AnsibleF5Error("Missing required load params: %s" % check)

    def _check_update_params(self):
        """Params given to update should satisfy required params."""
        check = missing_required_params(self._required_update_params,
                                        self._params)
        if check:
            raise AnsibleF5Error("Missing required update params: %s" % check)

    @abstractmethod
    def _read(self):
        """Load an already configured object from the F5 system.

        Any class inheriting from F5BaseObject should implement and override this method.
        """
        pass

    def _update(self):
        """Update an object on the F5 system."""
        # Load the object
        self._obj = self._read()

        # Check params
        self._check_update_params()

        changed = False
        cparams = {}  # The params that have changed

        # Determine if some params have changed
        for key, new_val in iteritems(self._params):
            if new_val is not None:
                if hasattr(self._obj, key):
                    cur_val = convert(getattr(self._obj, key))
                    ddiff = DeepDiff(cur_val,
                                     new_val,
                                     ignore_order=True,
                                     exclude_paths={
                                         "root['nameReference']",
                                         "root['poolReference']"
                                     })
                    if ddiff:
                        cparams[key] = new_val
                else:
                    if new_val:
                        cparams[key] = new_val

        # If changed params, update the object
        if cparams:
            changed = True

            if self._check_mode:
                return changed

            if 'modify' in self._methods:
                self._obj.modify(**cparams)
            else:
                self._obj.update(**cparams)
            self._obj.refresh()

        return changed

    @abstractmethod
    def flush(self):
        """Send the buffered object to the F5 system.

        Any class inheriting from F5BaseObject should implement and override this method."""
        pass
Пример #16
0
class Spec(with_metaclass(abc.ABCMeta, object)):
    """
    A source for variables that comprise a connection configuration.
    """

    @abc.abstractmethod
    def transport(self):
        """
        The name of the Ansible plug-in implementing the connection.
        """

    @abc.abstractmethod
    def inventory_name(self):
        """
        The name of the target being connected to as it appears in Ansible's
        inventory.
        """

    @abc.abstractmethod
    def remote_addr(self):
        """
        The network address of the target, or for container and other special
        targets, some other unique identifier.
        """

    @abc.abstractmethod
    def remote_user(self):
        """
        The username of the login account on the target.
        """

    @abc.abstractmethod
    def password(self):
        """
        The password of the login account on the target.
        """

    @abc.abstractmethod
    def become(self):
        """
        :data:`True` if privilege escalation should be active.
        """

    @abc.abstractmethod
    def become_method(self):
        """
        The name of the Ansible become method to use.
        """

    @abc.abstractmethod
    def become_user(self):
        """
        The username of the target account for become.
        """

    @abc.abstractmethod
    def become_pass(self):
        """
        The password of the target account for become.
        """

    @abc.abstractmethod
    def port(self):
        """
        The port of the login service on the target machine.
        """

    @abc.abstractmethod
    def python_path(self):
        """
        Path to the Python interpreter on the target machine.
        """

    @abc.abstractmethod
    def private_key_file(self):
        """
        Path to the SSH private key file to use to login.
        """

    @abc.abstractmethod
    def ssh_executable(self):
        """
        Path to the SSH executable.
        """

    @abc.abstractmethod
    def timeout(self):
        """
        The generic timeout for all connections.
        """

    @abc.abstractmethod
    def ansible_ssh_timeout(self):
        """
        The SSH-specific timeout for a connection.
        """

    @abc.abstractmethod
    def ssh_args(self):
        """
        The list of additional arguments that should be included in an SSH
        invocation.
        """

    @abc.abstractmethod
    def become_exe(self):
        """
        The path to the executable implementing the become method on the remote
        machine.
        """

    @abc.abstractmethod
    def sudo_args(self):
        """
        The list of additional arguments that should be included in a become
        invocation.
        """
        # TODO: split out into sudo_args/become_args.

    @abc.abstractmethod
    def mitogen_via(self):
        """
        The value of the mitogen_via= variable for this connection. Indicates
        the connection should be established via an intermediary.
        """

    @abc.abstractmethod
    def mitogen_kind(self):
        """
        The type of container to use with the "setns" transport.
        """

    @abc.abstractmethod
    def mitogen_mask_remote_name(self):
        """
        Specifies whether to set a fixed "remote_name" field. The remote_name
        is the suffix of `argv[0]` for remote interpreters. By default it
        includes identifying information from the local process, which may be
        undesirable in some circumstances.
        """

    @abc.abstractmethod
    def mitogen_buildah_path(self):
        """
        The path to the "buildah" program for the 'buildah' transport.
        """

    @abc.abstractmethod
    def mitogen_docker_path(self):
        """
        The path to the "docker" program for the 'docker' transport.
        """

    @abc.abstractmethod
    def mitogen_kubectl_path(self):
        """
        The path to the "kubectl" program for the 'docker' transport.
        """

    @abc.abstractmethod
    def mitogen_lxc_path(self):
        """
        The path to the "lxc" program for the 'lxd' transport.
        """

    @abc.abstractmethod
    def mitogen_lxc_attach_path(self):
        """
        The path to the "lxc-attach" program for the 'lxc' transport.
        """

    @abc.abstractmethod
    def mitogen_lxc_info_path(self):
        """
        The path to the "lxc-info" program for the 'lxc' transport.
        """

    @abc.abstractmethod
    def mitogen_machinectl_path(self):
        """
        The path to the "machinectl" program for the 'setns' transport.
        """

    @abc.abstractmethod
    def mitogen_ssh_keepalive_interval(self):
        """
        The SSH ServerAliveInterval.
        """

    @abc.abstractmethod
    def mitogen_ssh_keepalive_count(self):
        """
        The SSH ServerAliveCount.
        """

    @abc.abstractmethod
    def mitogen_ssh_debug_level(self):
        """
        The SSH debug level.
        """

    @abc.abstractmethod
    def mitogen_ssh_compression(self):
        """
        Whether SSH compression is enabled.
        """

    @abc.abstractmethod
    def extra_args(self):
        """
        Connection-specific arguments.
        """

    @abc.abstractmethod
    def ansible_doas_exe(self):
        """
Пример #17
0
class NutanixClient(with_metaclass(Singleton, object)):
    def __init__(self, module):
        super(NutanixClient, self).__init__()
        self._module = module
        self._default_url = self._module.params.get('default_url')
        self._user_name = self._module.params.get('user_name')
        self._user_password = self._module.params.get('user_password')
        is_validate = self.validate_params(default_url=self._default_url,
                                           user_name=self._user_name,
                                           user_password=self._user_password)
        if not is_validate:
            module.fail_json(msg='undefined user config parameter')

    '''
    def raise_for_task_result(self, task_uuid):
        uri = '/tasks/{0}'.format(task_uuid)
        url = '{0}{1}'.format(self.default_url, uri)
        r = self._session.get(url)
        #r.raise_for_status()
        task = r.json()
        if task.get('meta_response', None) is not None:
            if task.get('meta_response').get('error_code') > 0:
                raise RequestException(task.get('meta_response').get('error_detail'))
    '''

    def ntnx_open_url(self, method='get', uri=None, data={}):
        try:
            r = open_url(url=self._default_url + uri,
                         method=method,
                         headers={'Content-Type': 'application/json'},
                         url_username=self._user_name,
                         url_password=self._user_password,
                         force_basic_auth=True,
                         data=json.dumps(data))

            return json.loads(r.read())

        except HTTPError as e:
            self._module.fail_json(msg=e, exception=traceback.format_exc())

    def validate_params(self,
                        default_url=None,
                        user_name=None,
                        user_password=None):
        config = Configuration()
        is_validate = True

        if default_url is None:
            if os.environ.get('NUTANIX_DEFAULT_URL'):
                self._default_url = os.environ.get('NUTANIX_DEFAULT_URL')

            elif config.default_url:
                self._default_url = config.default_url

            else:
                is_validate = False

        if user_name is None:
            if os.environ.get('NUTANIX_USER_NAME'):
                self._user_name = os.environ.get('NUTANIX_USER_NAME')

            elif config.user_name:
                self._user_name = config.user_name

            else:
                is_validate = False

        if user_password is None:
            if os.environ.get('NUTANIX_USER_PASSWORD'):
                self._user_password = os.environ.get('NUTANIX_USER_PASSWORD')

            elif config.user_password:
                self._user_password = config.user_password

            else:
                is_validate = False

        return is_validate
Пример #18
0
class CLI(with_metaclass(ABCMeta, object)):
    ''' code behind bin/ansible* programs '''

    _ITALIC = re.compile(r"I\(([^)]+)\)")
    _BOLD = re.compile(r"B\(([^)]+)\)")
    _MODULE = re.compile(r"M\(([^)]+)\)")
    _URL = re.compile(r"U\(([^)]+)\)")
    _CONST = re.compile(r"C\(([^)]+)\)")

    PAGER = 'less'

    # -F (quit-if-one-screen) -R (allow raw ansi control chars)
    # -S (chop long lines) -X (disable termcap init and de-init)
    LESS_OPTS = 'FRSX'
    SKIP_INVENTORY_DEFAULTS = False

    def __init__(self, args, callback=None):
        """
        Base init method for all command line programs
        """

        if not args:
            raise ValueError('A non-empty list for args is required')

        self.args = args
        self.parser = None
        self.callback = callback

        if C.DEVEL_WARNING and __version__.endswith('dev0'):
            display.warning(
                'You are running the development version of Ansible. You should only run Ansible from "devel" if '
                'you are modifying the Ansible engine, or trying out features under development. This is a rapidly '
                'changing source of code and can become unstable at any point.'
            )

    @abstractmethod
    def run(self):
        """Run the ansible command

        Subclasses must implement this method.  It does the actual work of
        running an Ansible command.
        """
        self.parse()

        display.vv(to_text(opt_help.version(self.parser.prog)))

        if C.CONFIG_FILE:
            display.v(u"Using %s as config file" % to_text(C.CONFIG_FILE))
        else:
            display.v(u"No config file found; using defaults")

        # warn about deprecated config options
        for deprecated in C.config.DEPRECATED:
            name = deprecated[0]
            why = deprecated[1]['why']
            if 'alternatives' in deprecated[1]:
                alt = ', use %s instead' % deprecated[1]['alternatives']
            else:
                alt = ''
            ver = deprecated[1]['version']
            display.deprecated("%s option, %s %s" % (name, why, alt),
                               version=ver)

    @staticmethod
    def split_vault_id(vault_id):
        # return (before_@, after_@)
        # if no @, return whole string as after_
        if '@' not in vault_id:
            return (None, vault_id)

        parts = vault_id.split('@', 1)
        ret = tuple(parts)
        return ret

    @staticmethod
    def build_vault_ids(vault_ids,
                        vault_password_files=None,
                        ask_vault_pass=None,
                        create_new_password=None,
                        auto_prompt=True):
        vault_password_files = vault_password_files or []
        vault_ids = vault_ids or []

        # convert vault_password_files into vault_ids slugs
        for password_file in vault_password_files:
            id_slug = u'%s@%s' % (C.DEFAULT_VAULT_IDENTITY, password_file)

            # note this makes --vault-id higher precedence than --vault-password-file
            # if we want to intertwingle them in order probably need a cli callback to populate vault_ids
            # used by --vault-id and --vault-password-file
            vault_ids.append(id_slug)

        # if an action needs an encrypt password (create_new_password=True) and we dont
        # have other secrets setup, then automatically add a password prompt as well.
        # prompts cant/shouldnt work without a tty, so dont add prompt secrets
        if ask_vault_pass or (not vault_ids and auto_prompt):

            id_slug = u'%s@%s' % (C.DEFAULT_VAULT_IDENTITY,
                                  u'prompt_ask_vault_pass')
            vault_ids.append(id_slug)

        return vault_ids

    # TODO: remove the now unused args
    @staticmethod
    def setup_vault_secrets(loader,
                            vault_ids,
                            vault_password_files=None,
                            ask_vault_pass=None,
                            create_new_password=False,
                            auto_prompt=True):
        # list of tuples
        vault_secrets = []

        # Depending on the vault_id value (including how --ask-vault-pass / --vault-password-file create a vault_id)
        # we need to show different prompts. This is for compat with older Towers that expect a
        # certain vault password prompt format, so 'promp_ask_vault_pass' vault_id gets the old format.
        prompt_formats = {}

        # If there are configured default vault identities, they are considered 'first'
        # so we prepend them to vault_ids (from cli) here

        vault_password_files = vault_password_files or []
        if C.DEFAULT_VAULT_PASSWORD_FILE:
            vault_password_files.append(C.DEFAULT_VAULT_PASSWORD_FILE)

        if create_new_password:
            prompt_formats['prompt'] = [
                'New vault password (%(vault_id)s): ',
                'Confirm new vault password (%(vault_id)s): '
            ]
            # 2.3 format prompts for --ask-vault-pass
            prompt_formats['prompt_ask_vault_pass'] = [
                'New Vault password: '******'Confirm New Vault password: '******'prompt'] = ['Vault password (%(vault_id)s): ']
            # The format when we use just --ask-vault-pass needs to match 'Vault password:\s*?$'
            prompt_formats['prompt_ask_vault_pass'] = ['Vault password: '******'prompt', 'prompt_ask_vault_pass']:

                # --vault-id some_name@prompt_ask_vault_pass --vault-id other_name@prompt_ask_vault_pass will be a little
                # confusing since it will use the old format without the vault id in the prompt
                built_vault_id = vault_id_name or C.DEFAULT_VAULT_IDENTITY

                # choose the prompt based on --vault-id=prompt or --ask-vault-pass. --ask-vault-pass
                # always gets the old format for Tower compatibility.
                # ie, we used --ask-vault-pass, so we need to use the old vault password prompt
                # format since Tower needs to match on that format.
                prompted_vault_secret = PromptVaultSecret(
                    prompt_formats=prompt_formats[vault_id_value],
                    vault_id=built_vault_id)

                # a empty or invalid password from the prompt will warn and continue to the next
                # without erroring globally
                try:
                    prompted_vault_secret.load()
                except AnsibleError as exc:
                    display.warning('Error in vault password prompt (%s): %s' %
                                    (vault_id_name, exc))
                    raise

                vault_secrets.append((built_vault_id, prompted_vault_secret))

                # update loader with new secrets incrementally, so we can load a vault password
                # that is encrypted with a vault secret provided earlier
                loader.set_vault_secrets(vault_secrets)
                continue

            # assuming anything else is a password file
            display.vvvvv('Reading vault password file: %s' % vault_id_value)
            # read vault_pass from a file
            file_vault_secret = get_file_vault_secret(filename=vault_id_value,
                                                      vault_id=vault_id_name,
                                                      loader=loader)

            # an invalid password file will error globally
            try:
                file_vault_secret.load()
            except AnsibleError as exc:
                display.warning(
                    'Error in vault password file loading (%s): %s' %
                    (vault_id_name, to_text(exc)))
                raise

            if vault_id_name:
                vault_secrets.append((vault_id_name, file_vault_secret))
            else:
                vault_secrets.append(
                    (C.DEFAULT_VAULT_IDENTITY, file_vault_secret))

            # update loader with as-yet-known vault secrets
            loader.set_vault_secrets(vault_secrets)

        return vault_secrets

    @staticmethod
    def ask_passwords():
        ''' prompt for connection and become passwords if needed '''

        op = context.CLIARGS
        sshpass = None
        becomepass = None
        become_prompt = ''

        become_prompt_method = "BECOME" if C.AGNOSTIC_BECOME_PROMPT else op[
            'become_method'].upper()

        try:
            if op['ask_pass']:
                sshpass = getpass.getpass(prompt="SSH password: "******"%s password[defaults to SSH password]: " % become_prompt_method
            else:
                become_prompt = "%s password: "******"The number of processes (--forks) must be >= 1")

        return op

    @abstractmethod
    def init_parser(self, usage="", desc=None, epilog=None):
        """
        Create an options parser for most ansible scripts

        Subclasses need to implement this method.  They will usually call the base class's
        init_parser to create a basic version and then add their own options on top of that.

        An implementation will look something like this::

            def init_parser(self):
                super(MyCLI, self).init_parser(usage="My Ansible CLI", inventory_opts=True)
                ansible.arguments.option_helpers.add_runas_options(self.parser)
                self.parser.add_option('--my-option', dest='my_option', action='store')
        """
        self.parser = opt_help.create_base_parser(
            os.path.basename(self.args[0]),
            usage=usage,
            desc=desc,
            epilog=epilog,
        )

    @abstractmethod
    def post_process_args(self, options):
        """Process the command line args

        Subclasses need to implement this method.  This method validates and transforms the command
        line arguments.  It can be used to check whether conflicting values were given, whether filenames
        exist, etc.

        An implementation will look something like this::

            def post_process_args(self, options):
                options = super(MyCLI, self).post_process_args(options)
                if options.addition and options.subtraction:
                    raise AnsibleOptionsError('Only one of --addition and --subtraction can be specified')
                if isinstance(options.listofhosts, string_types):
                    options.listofhosts = string_types.split(',')
                return options
        """

        # process tags
        if hasattr(options, 'tags') and not options.tags:
            # optparse defaults does not do what's expected
            options.tags = ['all']
        if hasattr(options, 'tags') and options.tags:
            tags = set()
            for tag_set in options.tags:
                for tag in tag_set.split(u','):
                    tags.add(tag.strip())
            options.tags = list(tags)

        # process skip_tags
        if hasattr(options, 'skip_tags') and options.skip_tags:
            skip_tags = set()
            for tag_set in options.skip_tags:
                for tag in tag_set.split(u','):
                    skip_tags.add(tag.strip())
            options.skip_tags = list(skip_tags)

        # process inventory options except for CLIs that require their own processing
        if hasattr(options, 'inventory') and not self.SKIP_INVENTORY_DEFAULTS:

            if options.inventory:

                # should always be list
                if isinstance(options.inventory, string_types):
                    options.inventory = [options.inventory]

                # Ensure full paths when needed
                options.inventory = [
                    unfrackpath(opt, follow=False) if ',' not in opt else opt
                    for opt in options.inventory
                ]
            else:
                options.inventory = C.DEFAULT_HOST_LIST

        # Dup args set on the root parser and sub parsers results in the root parser ignoring the args. e.g. doing
        # 'ansible-galaxy -vvv init' has no verbosity set but 'ansible-galaxy init -vvv' sets a level of 3. To preserve
        # back compat with pre-argparse changes we manually scan and set verbosity based on the argv values.
        if self.parser.prog in ['ansible-galaxy', 'ansible-vault'
                                ] and not options.verbosity:
            verbosity_arg = next(
                iter([arg for arg in self.args if arg.startswith('-v')]), None)
            if verbosity_arg:
                display.deprecated(
                    "Setting verbosity before the arg sub command is deprecated, set the verbosity "
                    "after the sub command", "2.13")
                options.verbosity = verbosity_arg.count('v')

        return options

    def parse(self):
        """Parse the command line args

        This method parses the command line arguments.  It uses the parser
        stored in the self.parser attribute and saves the args and options in
        context.CLIARGS.

        Subclasses need to implement two helper methods, init_parser() and post_process_args() which
        are called from this function before and after parsing the arguments.
        """
        self.init_parser()

        if HAS_ARGCOMPLETE:
            argcomplete.autocomplete(self.parser)

        options = self.parser.parse_args(self.args[1:])
        options = self.post_process_args(options)
        context._init_global_context(options)

    @staticmethod
    def version_info(gitinfo=False):
        ''' return full ansible version info '''
        if gitinfo:
            # expensive call, user with care
            ansible_version_string = opt_help.version()
        else:
            ansible_version_string = __version__
        ansible_version = ansible_version_string.split()[0]
        ansible_versions = ansible_version.split('.')
        for counter in range(len(ansible_versions)):
            if ansible_versions[counter] == "":
                ansible_versions[counter] = 0
            try:
                ansible_versions[counter] = int(ansible_versions[counter])
            except Exception:
                pass
        if len(ansible_versions) < 3:
            for counter in range(len(ansible_versions), 3):
                ansible_versions.append(0)
        return {
            'string': ansible_version_string.strip(),
            'full': ansible_version,
            'major': ansible_versions[0],
            'minor': ansible_versions[1],
            'revision': ansible_versions[2]
        }

    @staticmethod
    def pager(text):
        ''' find reasonable way to display text '''
        # this is a much simpler form of what is in pydoc.py
        if not sys.stdout.isatty():
            display.display(text, screen_only=True)
        elif 'PAGER' in os.environ:
            if sys.platform == 'win32':
                display.display(text, screen_only=True)
            else:
                CLI.pager_pipe(text, os.environ['PAGER'])
        else:
            p = subprocess.Popen('less --version',
                                 shell=True,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
            p.communicate()
            if p.returncode == 0:
                CLI.pager_pipe(text, 'less')
            else:
                display.display(text, screen_only=True)

    @staticmethod
    def pager_pipe(text, cmd):
        ''' pipe text through a pager '''
        if 'LESS' not in os.environ:
            os.environ['LESS'] = CLI.LESS_OPTS
        try:
            cmd = subprocess.Popen(cmd,
                                   shell=True,
                                   stdin=subprocess.PIPE,
                                   stdout=sys.stdout)
            cmd.communicate(input=to_bytes(text))
        except IOError:
            pass
        except KeyboardInterrupt:
            pass

    @classmethod
    def tty_ify(cls, text):

        t = cls._ITALIC.sub("`" + r"\1" + "'", text)  # I(word) => `word'
        t = cls._BOLD.sub("*" + r"\1" + "*", t)  # B(word) => *word*
        t = cls._MODULE.sub("[" + r"\1" + "]", t)  # M(word) => [word]
        t = cls._URL.sub(r"\1", t)  # U(word) => word
        t = cls._CONST.sub("`" + r"\1" + "'", t)  # C(word) => `word'

        return t

    @staticmethod
    def _play_prereqs():
        options = context.CLIARGS

        # all needs loader
        loader = DataLoader()

        basedir = options.get('basedir', False)
        if basedir:
            loader.set_basedir(basedir)
            add_all_plugin_dirs(basedir)
            set_collection_playbook_paths(basedir)
            default_collection = get_collection_name_from_path(basedir)
            if default_collection:
                display.warning(u'running with default collection {0}'.format(
                    default_collection))
                AnsibleCollectionLoader().set_default_collection(
                    default_collection)

        vault_ids = list(options['vault_ids'])
        default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST
        vault_ids = default_vault_ids + vault_ids

        vault_secrets = CLI.setup_vault_secrets(
            loader,
            vault_ids=vault_ids,
            vault_password_files=list(options['vault_password_files']),
            ask_vault_pass=options['ask_vault_pass'],
            auto_prompt=False)
        loader.set_vault_secrets(vault_secrets)

        # create the inventory, and filter it based on the subset specified (if any)
        inventory = InventoryManager(loader=loader,
                                     sources=options['inventory'])

        # create the variable manager, which will be shared throughout
        # the code, ensuring a consistent view of global variables
        variable_manager = VariableManager(
            loader=loader,
            inventory=inventory,
            version_info=CLI.version_info(gitinfo=False))

        return loader, inventory, variable_manager

    @staticmethod
    def get_host_list(inventory, subset, pattern='all'):

        no_hosts = False
        if len(inventory.list_hosts()) == 0:
            # Empty inventory
            if C.LOCALHOST_WARNING and pattern not in C.LOCALHOST:
                display.warning(
                    "provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'"
                )
            no_hosts = True

        inventory.subset(subset)

        hosts = inventory.list_hosts(pattern)
        if not hosts and no_hosts is False:
            raise AnsibleError(
                "Specified hosts and/or --limit does not match any hosts")

        return hosts
Пример #19
0
class YumDnf(with_metaclass(ABCMeta, object)):
    """
    Abstract class that handles the population of instance variables that should
    be identical between both YUM and DNF modules because of the feature parity
    and shared argument spec
    """
    def __init__(self, module):

        self.module = module

        self.allow_downgrade = self.module.params['allow_downgrade']
        self.autoremove = self.module.params['autoremove']
        self.bugfix = self.module.params['bugfix']
        self.conf_file = self.module.params['conf_file']
        self.disable_excludes = self.module.params['disable_excludes']
        self.disable_gpg_check = self.module.params['disable_gpg_check']
        self.disable_plugin = self.module.params['disable_plugin']
        self.disablerepo = self.module.params.get('disablerepo', [])
        self.download_only = self.module.params['download_only']
        self.enable_plugin = self.module.params['enable_plugin']
        self.enablerepo = self.module.params.get('enablerepo', [])
        self.exclude = self.module.params['exclude']
        self.installroot = self.module.params['installroot']
        self.install_repoquery = self.module.params['install_repoquery']
        self.list = self.module.params['list']
        self.names = [p.strip() for p in self.module.params['name']]
        self.releasever = self.module.params['releasever']
        self.security = self.module.params['security']
        self.skip_broken = self.module.params['skip_broken']
        self.state = self.module.params['state']
        self.update_only = self.module.params['update_only']
        self.update_cache = self.module.params['update_cache']
        self.validate_certs = self.module.params['validate_certs']
        self.lock_poll = self.module.params['lock_poll']
        self.lock_timeout = self.module.params['lock_timeout']

        # It's possible someone passed a comma separated string since it used
        # to be a string type, so we should handle that
        self.names = self.listify_comma_sep_strings_in_list(self.names)
        self.disablerepo = self.listify_comma_sep_strings_in_list(
            self.disablerepo)
        self.enablerepo = self.listify_comma_sep_strings_in_list(
            self.enablerepo)
        self.exclude = self.listify_comma_sep_strings_in_list(self.exclude)

        # This should really be redefined by both the yum and dnf module but a
        # default isn't a bad idea
        self.lockfile = '/var/run/yum.pid'

    def wait_for_lock(self):
        '''Poll until the lock is removed if interval is a positive number'''
        if (os.path.isfile(self.lockfile)
                or glob.glob(self.lockfile)) and self.lock_timeout > 0:
            for iteration in range(0, self.lock_timeout):
                time.sleep(self.lock_poll)
                if not os.path.isfile(self.lockfile) or not glob.glob(
                        self.lockfile):
                    break
            if os.path.isfile(self.lockfile) or glob.glob(self.lockfile):
                self.module.fail_json(msg='{0} lockfile was not released'.
                                      format(self.pkg_mgr_name))

    def listify_comma_sep_strings_in_list(self, some_list):
        """
        method to accept a list of strings as the parameter, find any strings
        in that list that are comma separated, remove them from the list and add
        their comma separated elements to the original list
        """
        new_list = []
        remove_from_original_list = []
        for element in some_list:
            if ',' in element:
                remove_from_original_list.append(element)
                new_list.extend([e.strip() for e in element.split(',')])

        for element in remove_from_original_list:
            some_list.remove(element)

        some_list.extend(new_list)

        if some_list == [""]:
            return []

        return some_list

    @abstractmethod
    def run(self):
        raise NotImplementedError
Пример #20
0
class CliconfBase(with_metaclass(ABCMeta, object)):
    """
    A base class for implementing cli connections

    .. note:: Unlike most of Ansible, nearly all strings in
        :class:`CliconfBase` plugins are byte strings.  This is because of
        how close to the underlying platform these plugins operate.  Remember
        to mark literal strings as byte string (``b"string"``) and to use
        :func:`~ansible.module_utils._text.to_bytes` and
        :func:`~ansible.module_utils._text.to_text` to avoid unexpected
        problems.

    List of supported rpc's:
        :get_config: Retrieves the specified configuration from the device
        :edit_config: Loads the specified commands into the remote device
        :get: Execute specified command on remote device
        :get_capabilities: Retrieves device information and supported rpc methods
        :commit: Load configuration from candidate to running
        :discard_changes: Discard changes to candidate datastore

    Note: List of supported rpc's for remote device can be extracted from
          output of get_capabilities()

    :returns: Returns output received from remote device as byte string

            Usage:
            from ansible.module_utils.connection import Connection

            conn = Connection()
            conn.get('show lldp neighbors detail'')
            conn.get_config('running')
            conn.edit_config(['hostname test', 'netconf ssh'])
    """

    def __init__(self, connection):
        self._connection = connection

    def _alarm_handler(self, signum, frame):
        """Alarm handler raised in case of command timeout """
        display.display('closing shell due to command timeout (%s seconds).' % self._connection._play_context.timeout, log_only=True)
        self.close()

    def send_command(self, command, prompt=None, answer=None, sendonly=False, newline=True, prompt_retry_check=False):
        """Executes a cli command and returns the results
        This method will execute the CLI command on the connection and return
        the results to the caller.  The command output will be returned as a
        string
        """
        kwargs = {'command': to_bytes(command), 'sendonly': sendonly,
                  'newline': newline, 'prompt_retry_check': prompt_retry_check}
        if prompt is not None:
            kwargs['prompt'] = to_bytes(prompt)
        if answer is not None:
            kwargs['answer'] = to_bytes(answer)

        resp = self._connection.send(**kwargs)
        return resp

    def get_base_rpc(self):
        """Returns list of base rpc method supported by remote device"""
        return ['get_config', 'edit_config', 'get_capabilities', 'get']

    @abstractmethod
    def get_config(self, source='running', format='text'):
        """Retrieves the specified configuration from the device
        This method will retrieve the configuration specified by source and
        return it to the caller as a string.  Subsequent calls to this method
        will retrieve a new configuration from the device
        :args:
            arg[0] source: Datastore from which configuration should be retrieved eg: running/candidate/startup. (optional)
                           default is running.
            arg[1] format: Output format in which configuration is retrieved
                           Note: Specified datastore should be supported by remote device.
        :kwargs:
          Keywords supported
            :command: the command string to execute
            :source: Datastore from which configuration should be retrieved
            :format: Output format in which configuration is retrieved
        :returns: Returns output received from remote device as byte string
        """
        pass

    @abstractmethod
    def edit_config(self, commands=None):
        """Loads the specified commands into the remote device
        This method will load the commands into the remote device.  This
        method will make sure the device is in the proper context before
        send the commands (eg config mode)
        :args:
            arg[0] command: List of configuration commands
        :kwargs:
          Keywords supported
            :command: the command string to execute
        :returns: Returns output received from remote device as byte string
        """
        pass

    @abstractmethod
    def get(self, command=None, prompt=None, answer=None, sendonly=False, newline=True):
        """Execute specified command on remote device
        This method will retrieve the specified data and
        return it to the caller as a string.
        :args:
             command: command in string format to be executed on remote device
             prompt: the expected prompt generated by executing command.
                            This can be a string or a list of strings (optional)
             answer: the string to respond to the prompt with (optional)
             sendonly: bool to disable waiting for response, default is false (optional)
        :returns: Returns output received from remote device as byte string
        """
        pass

    @abstractmethod
    def get_capabilities(self):
        """Retrieves device information and supported
        rpc methods by device platform and return result
        as a string
        :returns: Returns output received from remote device as byte string
        """
        pass

    def commit(self, comment=None):
        """Commit configuration changes"""
        return self._connection.method_not_found("commit is not supported by network_os %s" % self._play_context.network_os)

    def discard_changes(self):
        "Discard changes in candidate datastore"
        return self._connection.method_not_found("discard_changes is not supported by network_os %s" % self._play_context.network_os)

    def copy_file(self, source=None, destination=None, proto='scp'):
        """Copies file over scp/sftp to remote device"""
        ssh = self._connection.paramiko_conn._connect_uncached()
        if proto == 'scp':
            if not HAS_SCP:
                self._connection.internal_error("Required library scp is not installed.  Please install it using `pip install scp`")
            with SCPClient(ssh.get_transport()) as scp:
                scp.put(source, destination)
        elif proto == 'sftp':
            with ssh.open_sftp() as sftp:
                sftp.put(source, destination)

    def get_file(self, source=None, destination=None, proto='scp'):
        """Fetch file over scp/sftp from remote device"""
        ssh = self._connection.paramiko_conn._connect_uncached()
        if proto == 'scp':
            if not HAS_SCP:
                self._connection.internal_error("Required library scp is not installed.  Please install it using `pip install scp`")
            with SCPClient(ssh.get_transport()) as scp:
                scp.get(source, destination)
        elif proto == 'sftp':
            with ssh.open_sftp() as sftp:
                sftp.get(source, destination)
Пример #21
0
class ModuleEntity(with_metaclass(ABCMeta, object)):
    """
    A soft constraint imposed on subclasses is that all their member variables are assigned
    through identically-named constructor variables, and that the constructor accepts
    an extra "merge_modes" variable.
    """
    PROPERTY_PLACEHOLDER = "generated-by-aws"

    def __init__(self, merge_modes):
        """
        :param merge_modes: A mapping of property names to merge modes, used to determine whether
                            the parameters' values override or merge with existing values.
        :type merge_modes: typing.Dict[str, str]
        """
        super(ModuleEntity, self).__init__()
        self._merge_modes = merge_modes

    def __eq__(self, other):
        """Performs a per-member comparison."""
        if type(self) != type(other) or other is None:
            return False

        var_names = vars(self).keys()
        left_values = [getattr(self, v) for v in var_names]
        right_values = [getattr(other, v) for v in var_names]
        for left, right in zip(left_values, right_values):
            if left != right:
                return False
        return True

    def __ne__(self, other):
        return not self.__eq__(other)

    @classmethod
    def _numbered_comparison_builtins(cls, us, them):
        """Transforms cmp(us, them) into (-1, 0, 1)"""
        if us == them:
            return 0
        return (int(us > them) * 2) - 1

    @classmethod
    def _comparison_helper(cls, us, them):
        """
        Rich comparison:
            - -1 -> us < them
            - 0 -> us == them
            - 1 -< us > them
        """
        # nulls are less than everything, except other nulls, which are equal
        if us is None and them is None:
            return 0
        elif us is None and them is not None:
            return -1
        elif us is not None and them is None:
            return 1

        # assertion: us and other are not None
        if isinstance(us, list):
            if len(us) != len(them):
                return cls._numbered_comparison_builtins(len(us), len(them))
            for left, right in zip(us, them):
                cmp = cls._numbered_comparison_builtins(left, right)
                if cmp != 0:
                    return cmp

        elif isinstance(us, dict):
            if len(us) != len(them):
                return cls._numbered_comparison_builtins(len(us), len(them))
            us_keys = sorted(us.keys())
            them_keys = sorted(them.keys())
            for left_key, right_key in zip(us_keys, them_keys):
                if left_key != right_key:
                    return cls._numbered_comparison_builtins(
                        left_key, right_key)
                left = us[left_key]
                right = us[right_key]
                cmp = cls._numbered_comparison_builtins(left, right)
                if cmp != 0:
                    return cmp

        elif isinstance(us, ModuleEntity):
            member_names = sorted(vars(us).keys())
            for member in member_names:
                left = getattr(us, member)
                right = getattr(them, member)
                comparison = cls._comparison_helper(left, right)
                if comparison != 0:
                    return comparison

        # includes string, int, and everything else
        else:
            if us != them:
                cmp = cls._numbered_comparison_builtins(us, them)
                if cmp != 0:
                    return cmp
        return 0

    def __lt__(self, other):
        """Maintains an internally-consistent sorting order.

        The sort order computation priority is based on instance member names, ascending.
        Each value is then evaluated until a difference is found.
        Types of member property values are handled as such:
          - nulls are less than everything, except nulls.
          - strings and ints are compared directly
          - lists are compared by size, then sequentially by member if sizes equal
          - dicts are compared with the same rules as ModuleEntity instances,
            but instead of member properties we have keys and their values.
          - instances of ModuleEntity are compared my member properties, then recursively
          - other types are optimistically attempted to be compared with their __lt__ methods

        With this implemented, instances are sortable using python's standard sorting mechanisms.
        functools.total_ordering, as a class decorator, implements all other comparison operations
        for free. It also requires __eq__ to be implemented.

        :type other: ModuleEntity
        :rtype: bool
        """
        if other is None:
            return False
        if type(self) != type(other) or other is None:
            raise AssertionError(
                "Comparing instances of different classes is not allowed: "
                "{0} < {1}".format(type(self), type(other)))
        return self._comparison_helper(self, other) < 0

    def __str__(self):
        return "{0}({1})".format(
            self.__class__.__name__, ", ".join("{0}={1}".format(k, str(v))
                                               for k, v in vars(self).items()))

    def __repr__(self):
        return str(self)

    @classmethod
    @abstractmethod
    def from_boto_dict(cls, boto_dict):
        """Build a new entity from a boto metadata dictionary."""

    @classmethod
    @abstractmethod
    def from_module_params(cls, module_params):
        """Build a new entity from module parameters."""

    @abstractmethod
    def to_module_return(self):
        """Transform into the module specification."""

    @classmethod
    @abstractmethod
    def properties_modify_prohibited(cls):
        """Properties of this entity that must not be modified.

        Examples are attributes that cannot be changed, such as the description of a security group.

        :rtype: typing.Set[str]
        """

    @classmethod
    @abstractmethod
    def properties_modify_noop(cls):
        """Properties of this entity that can not be modified.

        Examples are values computed by AWS such the primary identifier or a MAC address.

        :rtype: typing.Set[str]
        """

    @abstractmethod
    def _fill_placeholders(self):
        """Fill variables that are computed remotely with placeholders or presumable defaults."""
        pass

    @classmethod
    def _get_common_vars(cls, left, right, include_private=False):
        """Return a list of common variables, taking into account ModuleEntityUnit.

        :type left: ModuleEntity
        :type right: ModuleEntity
        :rtype: typing.List[str]
        """
        if isinstance(left, ModuleEntityUnit):
            all_vars = vars(right)
        else:
            all_vars = vars(left)

        if include_private:
            return all_vars.keys()
        return [v for v in all_vars.keys() if not v.startswith("_")]

    def build_desired_state_with_params(self, params_entity):
        """Build the final requested state.

        Builds the state from this object (the initial state) and the modifications (parameters).
        Values that are None are ignored as updates.

        :type params_entity: ModuleEntity
        :return: a copy of this entity with the requested modifications.
        :rtype: ModuleEntity
        """
        nonprivate_vars = ModuleEntity._get_common_vars(self, params_entity)
        constructor_args = {"merge_modes": {}}
        for v in nonprivate_vars:
            value_before = getattr(self, v, None)
            value_after = getattr(params_entity, v, None)

            if value_before is None:
                # no other choice
                constructor_args[v] = value_after
            elif value_after is None:
                # don't update, as clears et al happen with empty lists
                constructor_args[v] = value_before
            elif isinstance(value_after, ModuleEntity):
                constructor_args[
                    v] = value_before.build_desired_state_with_params(
                        value_after)
            elif isinstance(value_after, list):
                if params_entity._merge_modes[
                        v] == ContainerMergeMode.OVERRIDE:
                    constructor_args[v] = value_after
                else:
                    new_elements = set(value_before)
                    new_elements.update(set(value_after))
                    constructor_args[v] = list(new_elements)
            elif isinstance(value_after, dict):
                if params_entity._merge_modes[
                        v] == ContainerMergeMode.OVERRIDE:
                    constructor_args[v] = value_after
                else:
                    new_dict = copy.deepcopy(value_before)
                    new_dict.update(value_after)
                    constructor_args[v] = new_dict
            else:
                constructor_args[v] = value_after

        if isinstance(self, ModuleEntityUnit):
            constructor = params_entity.__class__
        else:
            constructor = self.__class__
        return constructor(**constructor_args)

    @classmethod
    def _attributes_differ(cls, us, them, strict_list_order):
        # special processing for special cases
        if us is not None and them is not None:
            if isinstance(us,
                          ModuleEntity) and us.differing_properties_to(them):
                return True
            elif isinstance(us, list):
                if not strict_list_order:
                    return set(us) != set(them)
        return us != them

    def differing_properties_to(self,
                                other,
                                return_noops=False,
                                strict_list_order=False):
        """Returns properties that differ between this and other.

        Non-strict list ordering only works for items that are hashable.

        :type other: ModuleEntity
        :param return_noops: if False, this does not return properties with modifications
                             specified as noops in self.properties_modify_noop().
        :type return_noops: bool
        :param strict_list_order: if True, lists are compared as ordered,
                                  otherwise order does not matter
        :type strict_list_order: bool
        :rtype: typing.Set[str]
        """
        differing = set()
        nonprivate_vars = ModuleEntity._get_common_vars(self, other)
        for prop in nonprivate_vars:
            us = getattr(self, prop, None)
            them = getattr(other, prop, None)
            if ModuleEntity._attributes_differ(us, them, strict_list_order):
                differing.add(prop)

        if return_noops:
            return differing
        else:
            return differing - self.properties_modify_noop()

    def deepcopy(self):
        """
        :rtype: ModuleEntity
        """
        all_vars = ModuleEntity._get_common_vars(self, self)
        constructor_args = dict(merge_modes=self._merge_modes)
        for name in all_vars:
            val = getattr(self, name)
            if isinstance(val, ModuleEntity):
                constructor_args[name] = val.deepcopy()
            else:
                constructor_args[name] = copy.deepcopy(val)
        return self.__class__(**constructor_args)

    def with_placeholders(self):
        dupe = self.deepcopy()
        dupe._fill_placeholders()
        return dupe

    @classmethod
    def validate_prohibited_differing_properties(cls, props):
        """Validates permitted changes between two entities.

        :type props: typing.Iterable[str]
        :raises errors.ValidationError: when a diff validation fails.
        """
        modified_but_prohibited = set(props).intersection(
            cls.properties_modify_prohibited())
        if modified_but_prohibited:
            raise errors.ValidationError(
                "Cannot update property: "
                "{0}".format(", ".join(modified_but_prohibited)))

    def ansible_diff_to(self, other):
        """
        :type other: ModuleEntity
        :rtype: typing.Dict[typing.Literal["before", "after"], dict]
        """
        return {
            "before": self.to_module_return(),
            "after": other.to_module_return()
        }
Пример #22
0
class YumDnf(with_metaclass(ABCMeta, object)):
    """
    Abstract class that handles the population of instance variables that should
    be identical between both YUM and DNF modules because of the feature parity
    and shared argument spec
    """

    def __init__(self, module):

        self.module = module

        self.allow_downgrade = self.module.params['allow_downgrade']
        self.autoremove = self.module.params['autoremove']
        self.bugfix = self.module.params['bugfix']
        self.cacheonly = self.module.params['cacheonly']
        self.conf_file = self.module.params['conf_file']
        self.disable_excludes = self.module.params['disable_excludes']
        self.disable_gpg_check = self.module.params['disable_gpg_check']
        self.disable_plugin = self.module.params['disable_plugin']
        self.disablerepo = self.module.params.get('disablerepo', [])
        self.download_only = self.module.params['download_only']
        self.download_dir = self.module.params['download_dir']
        self.enable_plugin = self.module.params['enable_plugin']
        self.enablerepo = self.module.params.get('enablerepo', [])
        self.exclude = self.module.params['exclude']
        self.installroot = self.module.params['installroot']
        self.install_repoquery = self.module.params['install_repoquery']
        self.install_weak_deps = self.module.params['install_weak_deps']
        self.list = self.module.params['list']
        self.names = [p.strip() for p in self.module.params['name']]
        self.releasever = self.module.params['releasever']
        self.security = self.module.params['security']
        self.skip_broken = self.module.params['skip_broken']
        self.state = self.module.params['state']
        self.update_only = self.module.params['update_only']
        self.update_cache = self.module.params['update_cache']
        self.validate_certs = self.module.params['validate_certs']
        self.sslverify = self.module.params['sslverify']
        self.lock_timeout = self.module.params['lock_timeout']

        # It's possible someone passed a comma separated string since it used
        # to be a string type, so we should handle that
        self.names = self.listify_comma_sep_strings_in_list(self.names)
        self.disablerepo = self.listify_comma_sep_strings_in_list(self.disablerepo)
        self.enablerepo = self.listify_comma_sep_strings_in_list(self.enablerepo)
        self.exclude = self.listify_comma_sep_strings_in_list(self.exclude)

        # Fail if someone passed a space separated string
        # https://github.com/ansible/ansible/issues/46301
        for name in self.names:
            if ' ' in name and not any(spec in name for spec in ['@', '>', '<', '=']):
                module.fail_json(
                    msg='It appears that a space separated string of packages was passed in '
                        'as an argument. To operate on several packages, pass a comma separated '
                        'string of packages or a list of packages.'
                )

        # Sanity checking for autoremove
        if self.state is None:
            if self.autoremove:
                self.state = "absent"
            else:
                self.state = "present"

        if self.autoremove and (self.state != "absent"):
            self.module.fail_json(
                msg="Autoremove should be used alone or with state=absent",
                results=[],
            )

        # This should really be redefined by both the yum and dnf module but a
        # default isn't a bad idea
        self.lockfile = '/var/run/yum.pid'

    @abstractmethod
    def is_lockfile_pid_valid(self):
        return

    def _is_lockfile_present(self):
        return (os.path.isfile(self.lockfile) or glob.glob(self.lockfile)) and self.is_lockfile_pid_valid()

    def wait_for_lock(self):
        '''Poll until the lock is removed if timeout is a positive number'''

        if not self._is_lockfile_present():
            return

        if self.lock_timeout > 0:
            for iteration in range(0, self.lock_timeout):
                time.sleep(1)
                if not self._is_lockfile_present():
                    return

        self.module.fail_json(msg='{0} lockfile is held by another process'.format(self.pkg_mgr_name))

    def listify_comma_sep_strings_in_list(self, some_list):
        """
        method to accept a list of strings as the parameter, find any strings
        in that list that are comma separated, remove them from the list and add
        their comma separated elements to the original list
        """
        new_list = []
        remove_from_original_list = []
        for element in some_list:
            if ',' in element:
                remove_from_original_list.append(element)
                new_list.extend([e.strip() for e in element.split(',')])

        for element in remove_from_original_list:
            some_list.remove(element)

        some_list.extend(new_list)

        if some_list == [""]:
            return []

        return some_list

    @abstractmethod
    def run(self):
        raise NotImplementedError
Пример #23
0
class ConnectionBase(with_metaclass(ABCMeta, object)):
    '''
    A base class for connections to contain common code.
    '''

    has_pipelining = False
    has_native_async = False # eg, winrm
    always_pipeline_modules = False # eg, winrm
    become_methods = C.BECOME_METHODS
    # When running over this connection type, prefer modules written in a certain language
    # as discovered by the specified file extension.  An empty string as the
    # language means any language.
    module_implementation_preferences = ('',)
    allow_executable = True

    def __init__(self, play_context, new_stdin, *args, **kwargs):
        # All these hasattrs allow subclasses to override these parameters
        if not hasattr(self, '_play_context'):
            self._play_context = play_context
        if not hasattr(self, '_new_stdin'):
            self._new_stdin = new_stdin
        # Backwards compat: self._display isn't really needed, just import the global display and use that.
        if not hasattr(self, '_display'):
            self._display = display
        if not hasattr(self, '_connected'):
            self._connected = False

        self.success_key = None
        self.prompt = None
        self._connected = False

        # load the shell plugin for this action/connection
        if play_context.shell:
            shell_type = play_context.shell
        elif hasattr(self, '_shell_type'):
            shell_type = getattr(self, '_shell_type')
        else:
            shell_type = 'sh'
            shell_filename = os.path.basename(self._play_context.executable)
            for shell in shell_loader.all():
                if shell_filename in shell.COMPATIBLE_SHELLS:
                    shell_type = shell.SHELL_FAMILY
                    break

        self._shell = shell_loader.get(shell_type)
        if not self._shell:
            raise AnsibleError("Invalid shell type specified (%s), or the plugin for that shell type is missing." % shell_type)

    @property
    def connected(self):
        '''Read-only property holding whether the connection to the remote host is active or closed.'''
        return self._connected

    def _become_method_supported(self):
        ''' Checks if the current class supports this privilege escalation method '''

        if self._play_context.become_method in self.become_methods:
            return True

        raise AnsibleError("Internal Error: this connection module does not support running commands via %s" % self._play_context.become_method)

    def set_host_overrides(self, host, hostvars=None):
        '''
        An optional method, which can be used to set connection plugin parameters
        from variables set on the host (or groups to which the host belongs)

        Any connection plugin using this should first initialize its attributes in
        an overridden `def __init__(self):`, and then use `host.get_vars()` to find
        variables which may be used to set those attributes in this method.
        '''
        pass

    @staticmethod
    def _split_ssh_args(argstring):
        """
        Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a
        list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to
        the argument list. The list will not contain any empty elements.
        """
        try:
            # Python 2.6.x shlex doesn't handle unicode type so we have to
            # convert args to byte string for that case.  More efficient to
            # try without conversion first but python2.6 doesn't throw an
            # exception, it merely mangles the output:
            # >>> shlex.split(u't e')
            # ['t\x00\x00\x00', '\x00\x00\x00e\x00\x00\x00']
            return [to_text(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()]
        except AttributeError:
            # In Python3, shlex.split doesn't work on a byte string.
            return [to_text(x.strip()) for x in shlex.split(argstring) if x.strip()]

    @abstractproperty
    def transport(self):
        """String used to identify this Connection class from other classes"""
        pass

    @abstractmethod
    def _connect(self):
        """Connect to the host we've been initialized with"""

        # Check if PE is supported
        if self._play_context.become:
            self._become_method_supported()

    @ensure_connect
    @abstractmethod
    def exec_command(self, cmd, in_data=None, sudoable=True):
        """Run a command on the remote host.

        :arg cmd: byte string containing the command
        :kwarg in_data: If set, this data is passed to the command's stdin.
            This is used to implement pipelining.  Currently not all
            connection plugins implement pipelining.
        :kwarg sudoable: Tell the connection plugin if we're executing
            a command via a privilege escalation mechanism.  This may affect
            how the connection plugin returns data.  Note that not all
            connections can handle privilege escalation.
        :returns: a tuple of (return code, stdout, stderr)  The return code is
            an int while stdout and stderr are both byte strings.

        When a command is executed, it goes through multiple commands to get
        there.  It looks approximately like this::

            [LocalShell] ConnectionCommand [UsersLoginShell (*)] ANSIBLE_SHELL_EXECUTABLE [(BecomeCommand ANSIBLE_SHELL_EXECUTABLE)] Command
        :LocalShell: Is optional.  It is run locally to invoke the
            ``Connection Command``.  In most instances, the
            ``ConnectionCommand`` can be invoked directly instead.  The ssh
            connection plugin which can have values that need expanding
            locally specified via ssh_args is the sole known exception to
            this.  Shell metacharacters in the command itself should be
            processed on the remote machine, not on the local machine so no
            shell is needed on the local machine.  (Example, ``/bin/sh``)
        :ConnectionCommand: This is the command that connects us to the remote
            machine to run the rest of the command.  ``ansible_ssh_user``,
            ``ansible_ssh_host`` and so forth are fed to this piece of the
            command to connect to the correct host (Examples ``ssh``,
            ``chroot``)
        :UsersLoginShell: This shell may or may not be created depending on
            the ConnectionCommand used by the connection plugin.  This is the
            shell that the ``ansible_ssh_user`` has configured as their login
            shell.  In traditional UNIX parlance, this is the last field of
            a user's ``/etc/passwd`` entry   We do not specifically try to run
            the ``UsersLoginShell`` when we connect.  Instead it is implicit
            in the actions that the ``ConnectionCommand`` takes when it
            connects to a remote machine.  ``ansible_shell_type`` may be set
            to inform ansible of differences in how the ``UsersLoginShell``
            handles things like quoting if a shell has different semantics
            than the Bourne shell.
        :ANSIBLE_SHELL_EXECUTABLE: This is the shell set via the inventory var
            ``ansible_shell_executable`` or via
            ``constants.DEFAULT_EXECUTABLE`` if the inventory var is not set.
            We explicitly invoke this shell so that we have predictable
            quoting rules at this point.  ``ANSIBLE_SHELL_EXECUTABLE`` is only
            settable by the user because some sudo setups may only allow
            invoking a specific shell.  (For instance, ``/bin/bash`` may be
            allowed but ``/bin/sh``, our default, may not).  We invoke this
            twice, once after the ``ConnectionCommand`` and once after the
            ``BecomeCommand``.  After the ConnectionCommand, this is run by
            the ``UsersLoginShell``.  After the ``BecomeCommand`` we specify
            that the ``ANSIBLE_SHELL_EXECUTABLE`` is being invoked directly.
        :BecomeComand ANSIBLE_SHELL_EXECUTABLE: Is the command that performs
            privilege escalation.  Setting this up is performed by the action
            plugin prior to running ``exec_command``. So we just get passed
            :param:`cmd` which has the BecomeCommand already added.
            (Examples: sudo, su)  If we have a BecomeCommand then we will
            invoke a ANSIBLE_SHELL_EXECUTABLE shell inside of it so that we
            have a consistent view of quoting.
        :Command: Is the command we're actually trying to run remotely.
            (Examples: mkdir -p $HOME/.ansible, python $HOME/.ansible/tmp-script-file)
        """
        pass

    @ensure_connect
    @abstractmethod
    def put_file(self, in_path, out_path):
        """Transfer a file from local to remote"""
        pass

    @ensure_connect
    @abstractmethod
    def fetch_file(self, in_path, out_path):
        """Fetch a file from remote to local"""
        pass

    @abstractmethod
    def close(self):
        """Terminate the connection"""
        pass

    def check_become_success(self, b_output):
        b_success_key = to_bytes(self._play_context.success_key)
        for b_line in b_output.splitlines(True):
            if b_success_key == b_line.rstrip():
                return True
        return False

    def check_password_prompt(self, b_output):
        if self._play_context.prompt is None:
            return False
        elif isinstance(self._play_context.prompt, string_types):
            b_prompt = to_bytes(self._play_context.prompt).strip()
            b_lines = b_output.splitlines(True)
            if not b_lines:
                return False
            return b_lines[-1].strip().endswith(b_prompt) or b_lines[0].strip().endswith(b_prompt)

    def check_incorrect_password(self, b_output):
        b_incorrect_password = to_bytes(gettext.dgettext(self._play_context.become_method, C.BECOME_ERROR_STRINGS[self._play_context.become_method]))
        return b_incorrect_password and b_incorrect_password in b_output

    def check_missing_password(self, b_output):
        b_missing_password = to_bytes(gettext.dgettext(self._play_context.become_method, C.BECOME_MISSING_STRINGS[self._play_context.become_method]))
        return b_missing_password and b_missing_password in b_output

    def connection_lock(self):
        f = self._play_context.connection_lockfd
        display.vvvv('CONNECTION: pid %d waiting for lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)
        fcntl.lockf(f, fcntl.LOCK_EX)
        display.vvvv('CONNECTION: pid %d acquired lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)

    def connection_unlock(self):
        f = self._play_context.connection_lockfd
        fcntl.lockf(f, fcntl.LOCK_UN)
        display.vvvv('CONNECTION: pid %d released lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)

    def reset(self):
        display.warning("Reset is not implemented for this connection")
Пример #24
0
class NetconfBase(with_metaclass(ABCMeta, object)):
    """
    A base class for implementing Netconf connections

    .. note:: Unlike most of Ansible, nearly all strings in
        :class:`TerminalBase` plugins are byte strings.  This is because of
        how close to the underlying platform these plugins operate.  Remember
        to mark literal strings as byte string (``b"string"``) and to use
        :func:`~ansible.module_utils._text.to_bytes` and
        :func:`~ansible.module_utils._text.to_text` to avoid unexpected
        problems.

        List of supported rpc's:
            :get_config: Retrieves the specified configuration from the device
            :edit_config: Loads the specified commands into the remote device
            :get: Execute specified command on remote device
            :get_capabilities: Retrieves device information and supported rpc methods
            :commit: Load configuration from candidate to running
            :discard_changes: Discard changes to candidate datastore
            :validate: Validate the contents of the specified configuration.
            :lock: Allows the client to lock the configuration system of a device.
            :unlock: Release a configuration lock, previously obtained with the lock operation.
            :copy_config: create or replace an entire configuration datastore with the contents of another complete
                          configuration datastore.
            For JUNOS:
            :execute_rpc: RPC to be execute on remote device
            :load_configuration: Loads given configuration on device

        Note: rpc support depends on the capabilites of remote device.

        :returns: Returns output received from remote device as byte string
        Note: the 'result' or 'error' from response should to be converted to object
              of ElementTree using 'fromstring' to parse output as xml doc

              'get_capabilities()' returns 'result' as a json string.

            Usage:
            from ansible.module_utils.connection import Connection

            conn = Connection()
            data = conn.execute_rpc(rpc)
            reply = fromstring(reply)

            data = conn.get_capabilities()
            json.loads(data)

            conn.load_configuration(config=[''set system ntp server 1.1.1.1''], action='set', format='text')
    """
    def __init__(self, connection):
        self._connection = connection
        self.m = self._connection._manager

    @ensure_connected
    def get_config(self, *args, **kwargs):
        """Retrieve all or part of a specified configuration.
           :source: name of the configuration datastore being queried
           :filter: specifies the portion of the configuration to retrieve
           (by default entire configuration is retrieved)"""
        return self.m.get_config(*args, **kwargs).data_xml

    @ensure_connected
    def get(self, *args, **kwargs):
        """Retrieve running configuration and device state information.
        *filter* specifies the portion of the configuration to retrieve
        (by default entire configuration is retrieved)
        """
        return self.m.get(*args, **kwargs).data_xml

    @ensure_connected
    def edit_config(self, *args, **kwargs):
        """Loads all or part of the specified *config* to the *target* configuration datastore.

            :target: is the name of the configuration datastore being edited
            :config: is the configuration, which must be rooted in the `config` element.
                        It can be specified either as a string or an :class:`~xml.etree.ElementTree.Element`.
            :default_operation: if specified must be one of { `"merge"`, `"replace"`, or `"none"` }
            :test_option: if specified must be one of { `"test_then_set"`, `"set"` }
            :error_option: if specified must be one of { `"stop-on-error"`, `"continue-on-error"`, `"rollback-on-error"` }
            The `"rollback-on-error"` *error_option* depends on the `:rollback-on-error` capability.
        """
        try:
            return self.m.edit_config(*args, **kwargs).data_xml
        except RPCError as exc:
            raise Exception(to_xml(exc.xml))

    @ensure_connected
    def validate(self, *args, **kwargs):
        """Validate the contents of the specified configuration.
        :source: is the name of the configuration datastore being validated or `config`
        element containing the configuration subtree to be validated
        """
        return self.m.validate(*args, **kwargs).data_xml

    @ensure_connected
    def copy_config(self, *args, **kwargs):
        """Create or replace an entire configuration datastore with the contents of another complete
        configuration datastore.
        :source: is the name of the configuration datastore to use as the source of the
                 copy operation or `config` element containing the configuration subtree to copy
        :target: is the name of the configuration datastore to use as the destination of the copy operation"""
        return self.m.copy_config(*args, **kwargs).data_xml

    @ensure_connected
    def lock(self, *args, **kwargs):
        """Allows the client to lock the configuration system of a device.
        *target* is the name of the configuration datastore to lock
        """
        return self.m.lock(*args, **kwargs).data_xml

    @ensure_connected
    def unlock(self, *args, **kwargs):
        """Release a configuration lock, previously obtained with the lock operation.
        :target: is the name of the configuration datastore to unlock
        """
        return self.m.unlock(*args, **kwargs).data_xml

    @ensure_connected
    def discard_changes(self, *args, **kwargs):
        """Revert the candidate configuration to the currently running configuration.
        Any uncommitted changes are discarded."""
        return self.m.discard_changes(*args, **kwargs).data_xml

    @ensure_connected
    def commit(self, *args, **kwargs):
        """Commit the candidate configuration as the device's new current configuration.
           Depends on the `:candidate` capability.
           A confirmed commit (i.e. if *confirmed* is `True`) is reverted if there is no
           followup commit within the *timeout* interval. If no timeout is specified the
           confirm timeout defaults to 600 seconds (10 minutes).
           A confirming commit may have the *confirmed* parameter but this is not required.
           Depends on the `:confirmed-commit` capability.
        :confirmed: whether this is a confirmed commit
        :timeout: specifies the confirm timeout in seconds
        """
        try:
            return self.m.commit(*args, **kwargs).data_xml
        except RPCError as exc:
            raise Exception(to_xml(exc.xml))

    @ensure_connected
    def validate(self, *args, **kwargs):
        """Validate the contents of the specified configuration.
           :source: name of configuration data store"""
        return self.m.validate(*args, **kwargs).data_xml

    @abstractmethod
    def get_capabilities(self, commands):
        """Retrieves device information and supported
        rpc methods by device platform and return result
        as a string
        """
        pass

    @staticmethod
    def guess_network_os(obj):
        """Identifies the operating system of
            network device.
        """
        pass

    def get_base_rpc(self):
        """Returns list of base rpc method supported by remote device"""
        return ['get_config', 'edit_config', 'get_capabilities', 'get']

    def put_file(self, source, destination):
        """Copies file over scp to remote device"""
        pass

    def fetch_file(self, source, destination):
        """Fetch file over scp from remote device"""
        pass
Пример #25
0
class YumDnf(with_metaclass(ABCMeta, object)):
    """
    Abstract class that handles the population of instance variables that should
    be identical between both YUM and DNF modules because of the feature parity
    and shared argument spec
    """
    def __init__(self, module):

        self.module = module

        self.allow_downgrade = self.module.params['allow_downgrade']
        self.autoremove = self.module.params['autoremove']
        self.bugfix = self.module.params['bugfix']
        self.conf_file = self.module.params['conf_file']
        self.disable_excludes = self.module.params['disable_excludes']
        self.disable_gpg_check = self.module.params['disable_gpg_check']
        self.disable_plugin = self.module.params['disable_plugin']
        self.disablerepo = self.module.params.get('disablerepo', [])
        self.download_only = self.module.params['download_only']
        self.enable_plugin = self.module.params['enable_plugin']
        self.enablerepo = self.module.params.get('enablerepo', [])
        self.exclude = self.module.params['exclude']
        self.installroot = self.module.params['installroot']
        self.install_repoquery = self.module.params['install_repoquery']
        self.list = self.module.params['list']
        self.names = [p.strip() for p in self.module.params['name']]
        self.releasever = self.module.params['releasever']
        self.security = self.module.params['security']
        self.skip_broken = self.module.params['skip_broken']
        self.state = self.module.params['state']
        self.update_only = self.module.params['update_only']
        self.update_cache = self.module.params['update_cache']
        self.validate_certs = self.module.params['validate_certs']

        # It's possible someone passed a comma separated string since it used
        # to be a string type, so we should handle that
        self.names = self.listify_comma_sep_strings_in_list(self.names)
        self.disablerepo = self.listify_comma_sep_strings_in_list(
            self.disablerepo)
        self.enablerepo = self.listify_comma_sep_strings_in_list(
            self.enablerepo)
        self.exclude = self.listify_comma_sep_strings_in_list(self.exclude)

        # Fail if someone passed a space separated string
        # https://github.com/ansible/ansible/issues/46301
        if any((' ' in name and '@' not in name and '==' not in name
                for name in self.names)):
            module.fail_json(
                msg=
                'It appears that a space separated string of packages was passed in '
                'as an argument. To operate on several packages, pass a comma separated '
                'string of packages or a list of packages.')

        # Sanity checking for autoremove
        if self.state is None:
            if self.autoremove:
                self.state = "absent"
            else:
                self.state = "present"

        if self.autoremove and (self.state != "absent"):
            self.module.fail_json(
                msg="Autoremove should be used alone or with state=absent",
                results=[],
            )

    def listify_comma_sep_strings_in_list(self, some_list):
        """
        method to accept a list of strings as the parameter, find any strings
        in that list that are comma separated, remove them from the list and add
        their comma separated elements to the original list
        """
        new_list = []
        remove_from_original_list = []
        for element in some_list:
            if ',' in element:
                remove_from_original_list.append(element)
                new_list.extend([e.strip() for e in element.split(',')])

        for element in remove_from_original_list:
            some_list.remove(element)

        some_list.extend(new_list)

        if some_list == [""]:
            return []

        return some_list

    @abstractmethod
    def run(self):
        raise NotImplementedError
Пример #26
0
class Display(with_metaclass(Singleton, object)):
    def __init__(self, verbosity=0):

        self.columns = None
        self.verbosity = verbosity

        # list of all deprecation messages to prevent duplicate display
        self._deprecations = {}
        self._warns = {}
        self._errors = {}

        self.b_cowsay = None
        self.noncow = C.ANSIBLE_COW_SELECTION

        self.set_cowsay_info()

        if self.b_cowsay:
            try:
                cmd = subprocess.Popen([self.b_cowsay, "-l"],
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE)
                (out, err) = cmd.communicate()
                self.cows_available = set([to_text(c) for c in out.split()])
                if C.ANSIBLE_COW_WHITELIST and any(C.ANSIBLE_COW_WHITELIST):
                    self.cows_available = set(
                        C.ANSIBLE_COW_WHITELIST).intersection(
                            self.cows_available)
            except Exception:
                # could not execute cowsay for some reason
                self.b_cowsay = False

        self._set_column_width()

    def set_cowsay_info(self):
        if C.ANSIBLE_NOCOWS:
            return

        if C.ANSIBLE_COW_PATH:
            self.b_cowsay = C.ANSIBLE_COW_PATH
        else:
            for b_cow_path in b_COW_PATHS:
                if os.path.exists(b_cow_path):
                    self.b_cowsay = b_cow_path

    def display(self,
                msg,
                color=None,
                stderr=False,
                screen_only=False,
                log_only=False):
        """ Display a message to the user

        Note: msg *must* be a unicode string to prevent UnicodeError tracebacks.
        """

        nocolor = msg
        if color:
            msg = stringc(msg, color)

        if not log_only:
            if not msg.endswith(u'\n'):
                msg2 = msg + u'\n'
            else:
                msg2 = msg

            msg2 = to_bytes(msg2,
                            encoding=self._output_encoding(stderr=stderr))
            if sys.version_info >= (3, ):
                # Convert back to text string on python3
                # We first convert to a byte string so that we get rid of
                # characters that are invalid in the user's locale
                msg2 = to_text(msg2,
                               self._output_encoding(stderr=stderr),
                               errors='replace')

            # Note: After Display() class is refactored need to update the log capture
            # code in 'bin/ansible-connection' (and other relevant places).
            if not stderr:
                fileobj = sys.stdout
            else:
                fileobj = sys.stderr

            fileobj.write(msg2)

            try:
                fileobj.flush()
            except IOError as e:
                # Ignore EPIPE in case fileobj has been prematurely closed, eg.
                # when piping to "head -n1"
                if e.errno != errno.EPIPE:
                    raise

        if logger and not screen_only:
            # We first convert to a byte string so that we get rid of
            # color and characters that are invalid in the user's locale
            msg2 = to_bytes(nocolor.lstrip(u'\n'))

            if sys.version_info >= (3, ):
                # Convert back to text string on python3
                msg2 = to_text(msg2, self._output_encoding(stderr=stderr))

            lvl = logging.INFO
            if color:
                # set logger level based on color (not great)
                try:
                    lvl = color_to_log_level[color]
                except KeyError:
                    # this should not happen, but JIC
                    raise AnsibleAssertionError(
                        'Invalid color supplied to display: %s' % color)
            # actually log
            logger.log(lvl, msg2)

    def v(self, msg, host=None):
        return self.verbose(msg, host=host, caplevel=0)

    def vv(self, msg, host=None):
        return self.verbose(msg, host=host, caplevel=1)

    def vvv(self, msg, host=None):
        return self.verbose(msg, host=host, caplevel=2)

    def vvvv(self, msg, host=None):
        return self.verbose(msg, host=host, caplevel=3)

    def vvvvv(self, msg, host=None):
        return self.verbose(msg, host=host, caplevel=4)

    def vvvvvv(self, msg, host=None):
        return self.verbose(msg, host=host, caplevel=5)

    def debug(self, msg, host=None):
        if C.DEFAULT_DEBUG:
            if host is None:
                self.display("%6d %0.5f: %s" % (os.getpid(), time.time(), msg),
                             color=C.COLOR_DEBUG)
            else:
                self.display("%6d %0.5f [%s]: %s" %
                             (os.getpid(), time.time(), host, msg),
                             color=C.COLOR_DEBUG)

    def verbose(self, msg, host=None, caplevel=2):

        to_stderr = C.VERBOSE_TO_STDERR
        if self.verbosity > caplevel:
            if host is None:
                self.display(msg, color=C.COLOR_VERBOSE, stderr=to_stderr)
            else:
                self.display("<%s> %s" % (host, msg),
                             color=C.COLOR_VERBOSE,
                             stderr=to_stderr)

    def deprecated(self, msg, version=None, removed=False):
        ''' used to print out a deprecation message.'''

        if not removed and not C.DEPRECATION_WARNINGS:
            return

        if not removed:
            if version:
                new_msg = "[DEPRECATION WARNING]: %s. This feature will be removed in version %s." % (
                    msg, version)
            else:
                new_msg = "[DEPRECATION WARNING]: %s. This feature will be removed in a future release." % (
                    msg)
            new_msg = new_msg + " Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.\n\n"
        else:
            raise AnsibleError(
                "[DEPRECATED]: %s.\nPlease update your playbooks." % msg)

        wrapped = textwrap.wrap(new_msg, self.columns, drop_whitespace=False)
        new_msg = "\n".join(wrapped) + "\n"

        if new_msg not in self._deprecations:
            self.display(new_msg.strip(), color=C.COLOR_DEPRECATE, stderr=True)
            self._deprecations[new_msg] = 1

    def warning(self, msg, formatted=False):

        if not formatted:
            new_msg = "\n[WARNING]: %s" % msg
            wrapped = textwrap.wrap(new_msg, self.columns)
            new_msg = "\n".join(wrapped) + "\n"
        else:
            new_msg = "\n[WARNING]: \n%s" % msg

        if new_msg not in self._warns:
            self.display(new_msg, color=C.COLOR_WARN, stderr=True)
            self._warns[new_msg] = 1

    def system_warning(self, msg):
        if C.SYSTEM_WARNINGS:
            self.warning(msg)

    def banner(self, msg, color=None, cows=True):
        '''
        Prints a header-looking line with cowsay or stars with length depending on terminal width (3 minimum)
        '''
        if self.b_cowsay and cows:
            try:
                self.banner_cowsay(msg)
                return
            except OSError:
                self.warning(
                    "somebody cleverly deleted cowsay or something during the PB run.  heh."
                )

        msg = msg.strip()
        star_len = self.columns - len(msg)
        if star_len <= 3:
            star_len = 3
        stars = u"*" * star_len
        self.display(u"\n%s %s" % (msg, stars), color=color)

    def banner_cowsay(self, msg, color=None):
        if u": [" in msg:
            msg = msg.replace(u"[", u"")
            if msg.endswith(u"]"):
                msg = msg[:-1]
        runcmd = [self.b_cowsay, b"-W", b"60"]
        if self.noncow:
            thecow = self.noncow
            if thecow == 'random':
                thecow = random.choice(list(self.cows_available))
            runcmd.append(b'-f')
            runcmd.append(to_bytes(thecow))
        runcmd.append(to_bytes(msg))
        cmd = subprocess.Popen(runcmd,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
        (out, err) = cmd.communicate()
        self.display(u"%s\n" % to_text(out), color=color)

    def error(self, msg, wrap_text=True):
        if wrap_text:
            new_msg = u"\n[ERROR]: %s" % msg
            wrapped = textwrap.wrap(new_msg, self.columns)
            new_msg = u"\n".join(wrapped) + u"\n"
        else:
            new_msg = u"ERROR! %s" % msg
        if new_msg not in self._errors:
            self.display(new_msg, color=C.COLOR_ERROR, stderr=True)
            self._errors[new_msg] = 1

    @staticmethod
    def prompt(msg, private=False):
        prompt_string = to_bytes(msg, encoding=Display._output_encoding())
        if sys.version_info >= (3, ):
            # Convert back into text on python3.  We do this double conversion
            # to get rid of characters that are illegal in the user's locale
            prompt_string = to_text(prompt_string)

        if private:
            return getpass.getpass(prompt_string)
        else:
            return input(prompt_string)

    def do_var_prompt(self,
                      varname,
                      private=True,
                      prompt=None,
                      encrypt=None,
                      confirm=False,
                      salt_size=None,
                      salt=None,
                      default=None,
                      unsafe=None):

        result = None
        if sys.__stdin__.isatty():

            do_prompt = self.prompt

            if prompt and default is not None:
                msg = "%s [%s]: " % (prompt, default)
            elif prompt:
                msg = "%s: " % prompt
            else:
                msg = 'input for %s: ' % varname

            if confirm:
                while True:
                    result = do_prompt(msg, private)
                    second = do_prompt("confirm " + msg, private)
                    if result == second:
                        break
                    self.display("***** VALUES ENTERED DO NOT MATCH ****")
            else:
                result = do_prompt(msg, private)
        else:
            result = None
            self.warning("Not prompting as we are not in interactive mode")

        # if result is false and default is not None
        if not result and default is not None:
            result = default

        if encrypt:
            # Circular import because encrypt needs a display class
            from ansible.utils.encrypt import do_encrypt
            result = do_encrypt(result, encrypt, salt_size, salt)

        # handle utf-8 chars
        result = to_text(result, errors='surrogate_or_strict')

        if unsafe:
            result = wrap_var(result)
        return result

    @staticmethod
    def _output_encoding(stderr=False):
        encoding = locale.getpreferredencoding()
        # https://bugs.python.org/issue6202
        # Python2 hardcodes an obsolete value on Mac.  Use MacOSX defaults
        # instead.
        if encoding in ('mac-roman', ):
            encoding = 'utf-8'
        return encoding

    def _set_column_width(self):
        if os.isatty(0):
            tty_size = unpack(
                'HHHH', fcntl.ioctl(0, TIOCGWINSZ, pack('HHHH', 0, 0, 0,
                                                        0)))[1]
        else:
            tty_size = 0
        self.columns = max(79, tty_size - 1)
Пример #27
0
class NetconfBase(with_metaclass(ABCMeta, object)):
    """
    A base class for implementing Netconf connections

    .. note:: Unlike most of Ansible, nearly all strings in
        :class:`TerminalBase` plugins are byte strings.  This is because of
        how close to the underlying platform these plugins operate.  Remember
        to mark literal strings as byte string (``b"string"``) and to use
        :func:`~ansible.module_utils._text.to_bytes` and
        :func:`~ansible.module_utils._text.to_text` to avoid unexpected
        problems.

        List of supported rpc's:
            :get: Retrieves running configuration and device state information
            :get_config: Retrieves the specified configuration from the device
            :edit_config: Loads the specified commands into the remote device
            :commit: Load configuration from candidate to running
            :discard_changes: Discard changes to candidate datastore
            :validate: Validate the contents of the specified configuration.
            :lock: Allows the client to lock the configuration system of a device.
            :unlock: Release a configuration lock, previously obtained with the lock operation.
            :copy_config: create or replace an entire configuration datastore with the contents of another complete
                          configuration datastore.
            :get-schema: Retrieves the required schema from the device
            :get_capabilities: Retrieves device information and supported rpc methods

            For JUNOS:
            :execute_rpc: RPC to be execute on remote device
            :load_configuration: Loads given configuration on device

        Note: rpc support depends on the capabilites of remote device.

        :returns: Returns output received from remote device as byte string
        Note: the 'result' or 'error' from response should to be converted to object
              of ElementTree using 'fromstring' to parse output as xml doc

              'get_capabilities()' returns 'result' as a json string.

            Usage:
            from ansible.module_utils.connection import Connection

            conn = Connection()
            data = conn.execute_rpc(rpc)
            reply = fromstring(reply)

            data = conn.get_capabilities()
            json.loads(data)

            conn.load_configuration(config=[''set system ntp server 1.1.1.1''], action='set', format='text')
    """
    def __init__(self, connection):
        self._connection = connection
        self.m = self._connection._manager

    @ensure_connected
    def rpc(self, name):
        """RPC to be execute on remote device
           :name: Name of rpc in string format"""
        try:
            obj = to_ele(name)
            resp = self.m.rpc(obj)
            return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
        except RPCError as exc:
            msg = exc.xml
            raise Exception(to_xml(msg))

    @ensure_connected
    def get_config(self, source=None, filter=None):
        """Retrieve all or part of a specified configuration
           (by default entire configuration is retrieved).

        :param source: Name of the configuration datastore being queried, defaults to running datastore
        :param filter: This argument specifies the portion of the configuration data to retrieve
        :return: Returns xml string containing the RPC response received from remote host
        """
        if isinstance(filter, list):
            filter = tuple(filter)

        if not source:
            source = 'running'
        resp = self.m.get_config(source=source, filter=filter)
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @ensure_connected
    def get(self, filter=None):
        """Retrieve device configuration and state information.

        :param filter: This argument specifies the portion of the state data to retrieve
                        (by default entire state data is retrieved)
        :return: Returns xml string containing the RPC response received from remote host
        """
        if isinstance(filter, list):
            filter = tuple(filter)
        resp = self.m.get(filter=filter)
        response = resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
        return response

    @ensure_connected
    def edit_config(self, *args, **kwargs):
        """Loads all or part of the specified *config* to the *target* configuration datastore.

            :target: is the name of the configuration datastore being edited
            :config: is the configuration, which must be rooted in the `config` element.
                        It can be specified either as a string or an :class:`~xml.etree.ElementTree.Element`.
            :default_operation: if specified must be one of { `"merge"`, `"replace"`, or `"none"` }
            :test_option: if specified must be one of { `"test_then_set"`, `"set"` }
            :error_option: if specified must be one of { `"stop-on-error"`, `"continue-on-error"`, `"rollback-on-error"` }
            The `"rollback-on-error"` *error_option* depends on the `:rollback-on-error` capability.
        """
        resp = self.m.edit_config(*args, **kwargs)
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @ensure_connected
    def validate(self, *args, **kwargs):
        """Validate the contents of the specified configuration.
        :source: is the name of the configuration datastore being validated or `config`
        element containing the configuration subtree to be validated
        """
        resp = self.m.validate(*args, **kwargs)
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @ensure_connected
    def copy_config(self, *args, **kwargs):
        """Create or replace an entire configuration datastore with the contents of another complete
        configuration datastore.
        :source: is the name of the configuration datastore to use as the source of the
                 copy operation or `config` element containing the configuration subtree to copy
        :target: is the name of the configuration datastore to use as the destination of the copy operation"""
        resp = self.m.copy_config(*args, **kwargs)
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @ensure_connected
    def dispatch(self, request):
        """Execute operation on the remote device
        :request: is the rpc request including attributes as XML string
        """
        req = fromstring(request)
        resp = self.m.dispatch(req)
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @ensure_connected
    def lock(self, target=None):
        """
        Allows the client to lock the configuration system of a device.
        :param target: is the name of the configuration datastore to lock,
                        defaults to candidate datastore
        :return: Returns xml string containing the RPC response received from remote host
        """
        if not target:
            target = 'candidate'
        resp = self.m.lock(target=target)
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @ensure_connected
    def unlock(self, target=None):
        """
        Release a configuration lock, previously obtained with the lock operation.
        :param target: is the name of the configuration datastore to unlock,
                       defaults to candidate datastore
        :return: Returns xml string containing the RPC response received from remote host
        """
        """Release a configuration lock, previously obtained with the lock operation.
        :target: is the name of the configuration datastore to unlock
        """
        if not target:
            target = 'candidate'
        resp = self.m.unlock(target=target)
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @ensure_connected
    def discard_changes(self):
        """
        Revert the candidate configuration to the currently running configuration.
        Any uncommitted changes are discarded.
        :return: Returns xml string containing the RPC response received from remote host
        """
        resp = self.m.discard_changes()
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @ensure_connected
    def commit(self, *args, **kwargs):
        """Commit the candidate configuration as the device's new current configuration.
           Depends on the `:candidate` capability.
           A confirmed commit (i.e. if *confirmed* is `True`) is reverted if there is no
           followup commit within the *timeout* interval. If no timeout is specified the
           confirm timeout defaults to 600 seconds (10 minutes).
           A confirming commit may have the *confirmed* parameter but this is not required.
           Depends on the `:confirmed-commit` capability.
        :confirmed: whether this is a confirmed commit
        :timeout: specifies the confirm timeout in seconds
        """
        resp = self.m.commit(*args, **kwargs)
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @ensure_connected
    def get_schema(self, *args, **kwargs):
        """Retrieves the required schema from the device
        """
        resp = self.m.get_schema(*args, **kwargs)
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @ensure_connected
    def locked(self, *args, **kwargs):
        resp = self.m.locked(*args, **kwargs)
        return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml

    @abstractmethod
    def get_capabilities(self):
        """Retrieves device information and supported
        rpc methods by device platform and return result
        as a string
        """
        pass

    @staticmethod
    def guess_network_os(obj):
        """Identifies the operating system of
            network device.
        """
        pass

    def get_base_rpc(self):
        """Returns list of base rpc method supported by remote device"""
        return ['get_config', 'edit_config', 'get_capabilities', 'get']

    def put_file(self, source, destination):
        """Copies file over scp to remote device"""
        pass

    def fetch_file(self, source, destination):
        """Fetch file over scp from remote device"""
        pass

    def get_device_operations(self, server_capabilities):
        operations = {}
        capabilities = '\n'.join(server_capabilities)
        operations['supports_commit'] = ':candidate' in capabilities
        operations['supports_defaults'] = ':with-defaults' in capabilities
        operations[
            'supports_confirm_commit'] = ':confirmed-commit' in capabilities
        operations['supports_startup'] = ':startup' in capabilities
        operations['supports_xpath'] = ':xpath' in capabilities
        operations[
            'supports_writable_running'] = ':writable-running' in capabilities

        operations['lock_datastore'] = []
        if operations['supports_writable_running']:
            operations['lock_datastore'].append('running')

        if operations['supports_commit']:
            operations['lock_datastore'].append('candidate')

        if operations['supports_startup']:
            operations['lock_datastore'].append('startup')

        operations['supports_lock'] = True if len(
            operations['lock_datastore']) else False

        return operations
Пример #28
0
class FieldAttributeBase(with_metaclass(BaseMeta, object)):
    def __init__(self):

        # initialize the data loader and variable manager, which will be provided
        # later when the object is actually loaded
        self._loader = None
        self._variable_manager = None

        # other internal params
        self._validated = False
        self._squashed = False
        self._finalized = False

        # every object gets a random uuid:
        self._uuid = get_unique_id()

        # we create a copy of the attributes here due to the fact that
        # it was initialized as a class param in the meta class, so we
        # need a unique object here (all members contained within are
        # unique already).
        self._attributes = self._attributes.copy()

        # and init vars, avoid using defaults in field declaration as it lives across plays
        self.vars = dict()

    def dump_me(self, depth=0):
        ''' this is never called from production code, it is here to be used when debugging as a 'complex print' '''
        if depth == 0:
            display.debug(
                "DUMPING OBJECT ------------------------------------------------------"
            )
        display.debug("%s- %s (%s, id=%s)" %
                      (" " * depth, self.__class__.__name__, self, id(self)))
        if hasattr(self, '_parent') and self._parent:
            self._parent.dump_me(depth + 2)
            dep_chain = self._parent.get_dep_chain()
            if dep_chain:
                for dep in dep_chain:
                    dep.dump_me(depth + 2)
        if hasattr(self, '_play') and self._play:
            self._play.dump_me(depth + 2)

    def preprocess_data(self, ds):
        ''' infrequently used method to do some pre-processing of legacy terms '''

        for base_class in self.__class__.mro():
            method = getattr(
                self, "_preprocess_data_%s" % base_class.__name__.lower(),
                None)
            if method:
                return method(ds)
        return ds

    def load_data(self, ds, variable_manager=None, loader=None):
        ''' walk the input datastructure and assign any values '''

        if ds is None:
            raise AnsibleAssertionError(
                'ds (%s) should not be None but it is.' % ds)

        # cache the datastructure internally
        setattr(self, '_ds', ds)

        # the variable manager class is used to manage and merge variables
        # down to a single dictionary for reference in templating, etc.
        self._variable_manager = variable_manager

        # the data loader class is used to parse data from strings and files
        if loader is not None:
            self._loader = loader
        else:
            self._loader = DataLoader()

        # call the preprocess_data() function to massage the data into
        # something we can more easily parse, and then call the validation
        # function on it to ensure there are no incorrect key values
        ds = self.preprocess_data(ds)
        self._validate_attributes(ds)

        # Walk all attributes in the class. We sort them based on their priority
        # so that certain fields can be loaded before others, if they are dependent.
        for name, attr in sorted(iteritems(self._valid_attrs),
                                 key=operator.itemgetter(1)):
            # copy the value over unless a _load_field method is defined
            target_name = name
            if name in self._alias_attrs:
                target_name = self._alias_attrs[name]
            if name in ds:
                method = getattr(self, '_load_%s' % name, None)
                if method:
                    self._attributes[target_name] = method(name, ds[name])
                else:
                    self._attributes[target_name] = ds[name]

        # run early, non-critical validation
        self.validate()

        # return the constructed object
        return self

    def get_ds(self):
        try:
            return getattr(self, '_ds')
        except AttributeError:
            return None

    def get_loader(self):
        return self._loader

    def get_variable_manager(self):
        return self._variable_manager

    def _validate_debugger(self, attr, name, value):
        valid_values = frozenset(
            ('always', 'on_failed', 'on_unreachable', 'on_skipped', 'never'))
        if value and isinstance(value,
                                string_types) and value not in valid_values:
            raise AnsibleParserError(
                "'%s' is not a valid value for debugger. Must be one of %s" %
                (value, ', '.join(valid_values)),
                obj=self.get_ds())
        return value

    def _validate_attributes(self, ds):
        '''
        Ensures that there are no keys in the datastructure which do
        not map to attributes for this object.
        '''

        valid_attrs = frozenset(self._valid_attrs.keys())
        for key in ds:
            if key not in valid_attrs:
                raise AnsibleParserError(
                    "'%s' is not a valid attribute for a %s" %
                    (key, self.__class__.__name__),
                    obj=ds)

    def validate(self, all_vars=None):
        ''' validation that is done at parse time, not load time '''
        all_vars = {} if all_vars is None else all_vars

        if not self._validated:
            # walk all fields in the object
            for (name, attribute) in iteritems(self._valid_attrs):

                if name in self._alias_attrs:
                    name = self._alias_attrs[name]

                # run validator only if present
                method = getattr(self, '_validate_%s' % name, None)
                if method:
                    method(attribute, name, getattr(self, name))
                else:
                    # and make sure the attribute is of the type it should be
                    value = self._attributes[name]
                    if value is not None:
                        if attribute.isa == 'string' and isinstance(
                                value, (list, dict)):
                            raise AnsibleParserError(
                                "The field '%s' is supposed to be a string type,"
                                " however the incoming data structure is a %s"
                                % (name, type(value)),
                                obj=self.get_ds())

        self._validated = True

    def squash(self):
        '''
        Evaluates all attributes and sets them to the evaluated version,
        so that all future accesses of attributes do not need to evaluate
        parent attributes.
        '''
        if not self._squashed:
            for name in self._valid_attrs.keys():
                self._attributes[name] = getattr(self, name)
            self._squashed = True

    def copy(self):
        '''
        Create a copy of this object and return it.
        '''

        new_me = self.__class__()

        for name in self._valid_attrs.keys():
            if name in self._alias_attrs:
                continue
            new_me._attributes[name] = shallowcopy(self._attributes[name])

        new_me._loader = self._loader
        new_me._variable_manager = self._variable_manager
        new_me._validated = self._validated
        new_me._finalized = self._finalized
        new_me._uuid = self._uuid

        # if the ds value was set on the object, copy it to the new copy too
        if hasattr(self, '_ds'):
            new_me._ds = self._ds

        return new_me

    def post_validate(self, templar):
        '''
        we can't tell that everything is of the right type until we have
        all the variables.  Run basic types (from isa) as well as
        any _post_validate_<foo> functions.
        '''

        # save the omit value for later checking
        omit_value = templar._available_variables.get('omit')

        for (name, attribute) in iteritems(self._valid_attrs):

            if getattr(self, name) is None:
                if not attribute.required:
                    continue
                else:
                    raise AnsibleParserError(
                        "the field '%s' is required but was not set" % name)
            elif not attribute.always_post_validate and self.__class__.__name__ not in (
                    'Task', 'Handler', 'PlayContext'):
                # Intermediate objects like Play() won't have their fields validated by
                # default, as their values are often inherited by other objects and validated
                # later, so we don't want them to fail out early
                continue

            try:
                # Run the post-validator if present. These methods are responsible for
                # using the given templar to template the values, if required.
                method = getattr(self, '_post_validate_%s' % name, None)
                if method:
                    value = method(attribute, getattr(self, name), templar)
                elif attribute.isa == 'class':
                    value = getattr(self, name)
                else:
                    # if the attribute contains a variable, template it now
                    value = templar.template(getattr(self, name))

                # if this evaluated to the omit value, set the value back to
                # the default specified in the FieldAttribute and move on
                if omit_value is not None and value == omit_value:
                    setattr(self, name, attribute.default)
                    continue

                # and make sure the attribute is of the type it should be
                if value is not None:
                    if attribute.isa == 'string':
                        value = to_text(value)
                    elif attribute.isa == 'int':
                        value = int(value)
                    elif attribute.isa == 'float':
                        value = float(value)
                    elif attribute.isa == 'bool':
                        value = boolean(value, strict=False)
                    elif attribute.isa == 'percent':
                        # special value, which may be an integer or float
                        # with an optional '%' at the end
                        if isinstance(value, string_types) and '%' in value:
                            value = value.replace('%', '')
                        value = float(value)
                    elif attribute.isa in ('list', 'barelist'):
                        if value is None:
                            value = []
                        elif not isinstance(value, list):
                            if isinstance(value, string_types
                                          ) and attribute.isa == 'barelist':
                                display.deprecated(
                                    "Using comma separated values for a list has been deprecated. "
                                    "You should instead use the correct YAML syntax for lists. "
                                )
                                value = value.split(',')
                            else:
                                value = [value]
                        if attribute.listof is not None:
                            for item in value:
                                if not isinstance(item, attribute.listof):
                                    raise AnsibleParserError(
                                        "the field '%s' should be a list of %s, "
                                        "but the item '%s' is a %s" %
                                        (name, attribute.listof, item,
                                         type(item)),
                                        obj=self.get_ds())
                                elif attribute.required and attribute.listof == string_types:
                                    if item is None or item.strip() == "":
                                        raise AnsibleParserError(
                                            "the field '%s' is required, and cannot have empty values"
                                            % (name, ),
                                            obj=self.get_ds())
                    elif attribute.isa == 'set':
                        if value is None:
                            value = set()
                        elif not isinstance(value, (list, set)):
                            if isinstance(value, string_types):
                                value = value.split(',')
                            else:
                                # Making a list like this handles strings of
                                # text and bytes properly
                                value = [value]
                        if not isinstance(value, set):
                            value = set(value)
                    elif attribute.isa == 'dict':
                        if value is None:
                            value = dict()
                        elif not isinstance(value, dict):
                            raise TypeError("%s is not a dictionary" % value)
                    elif attribute.isa == 'class':
                        if not isinstance(value, attribute.class_type):
                            raise TypeError(
                                "%s is not a valid %s (got a %s instead)" %
                                (name, attribute.class_type, type(value)))
                        value.post_validate(templar=templar)

                # and assign the massaged value back to the attribute field
                setattr(self, name, value)
            except (TypeError, ValueError) as e:
                raise AnsibleParserError(
                    "the field '%s' has an invalid value (%s), and could not be converted to an %s."
                    "The error was: %s" % (name, value, attribute.isa, e),
                    obj=self.get_ds(),
                    orig_exc=e)
            except (AnsibleUndefinedVariable, UndefinedError) as e:
                if templar._fail_on_undefined_errors and name != 'name':
                    if name == 'args':
                        msg = "The task includes an option with an undefined variable. The error was: %s" % (
                            to_native(e))
                    else:
                        msg = "The field '%s' has an invalid value, which includes an undefined variable. The error was: %s" % (
                            name, to_native(e))
                    raise AnsibleParserError(msg,
                                             obj=self.get_ds(),
                                             orig_exc=e)

        self._finalized = True

    def _load_vars(self, attr, ds):
        '''
        Vars in a play can be specified either as a dictionary directly, or
        as a list of dictionaries. If the later, this method will turn the
        list into a single dictionary.
        '''
        def _validate_variable_keys(ds):
            for key in ds:
                if not isidentifier(key):
                    raise TypeError("'%s' is not a valid variable name" % key)

        try:
            if isinstance(ds, dict):
                _validate_variable_keys(ds)
                return combine_vars(self.vars, ds)
            elif isinstance(ds, list):
                all_vars = self.vars
                for item in ds:
                    if not isinstance(item, dict):
                        raise ValueError
                    _validate_variable_keys(item)
                    all_vars = combine_vars(all_vars, item)
                return all_vars
            elif ds is None:
                return {}
            else:
                raise ValueError
        except ValueError as e:
            raise AnsibleParserError(
                "Vars in a %s must be specified as a dictionary, or a list of dictionaries"
                % self.__class__.__name__,
                obj=ds,
                orig_exc=e)
        except TypeError as e:
            raise AnsibleParserError(
                "Invalid variable name in vars specified for %s: %s" %
                (self.__class__.__name__, e),
                obj=ds,
                orig_exc=e)

    def _extend_value(self, value, new_value, prepend=False):
        '''
        Will extend the value given with new_value (and will turn both
        into lists if they are not so already). The values are run through
        a set to remove duplicate values.
        '''

        if not isinstance(value, list):
            value = [value]
        if not isinstance(new_value, list):
            new_value = [new_value]

        if prepend:
            combined = new_value + value
        else:
            combined = value + new_value

        return [i for i, _ in itertools.groupby(combined) if i is not None]

    def dump_attrs(self):
        '''
        Dumps all attributes to a dictionary
        '''
        attrs = dict()
        for (name, attribute) in iteritems(self._valid_attrs):
            attr = getattr(self, name)
            if attribute.isa == 'class' and attr is not None and hasattr(
                    attr, 'serialize'):
                attrs[name] = attr.serialize()
            else:
                attrs[name] = attr
        return attrs

    def from_attrs(self, attrs):
        '''
        Loads attributes from a dictionary
        '''
        for (attr, value) in iteritems(attrs):
            if attr in self._valid_attrs:
                attribute = self._valid_attrs[attr]
                if attribute.isa == 'class' and isinstance(value, dict):
                    obj = attribute.class_type()
                    obj.deserialize(value)
                    setattr(self, attr, obj)
                else:
                    setattr(self, attr, value)

    def serialize(self):
        '''
        Serializes the object derived from the base object into
        a dictionary of values. This only serializes the field
        attributes for the object, so this may need to be overridden
        for any classes which wish to add additional items not stored
        as field attributes.
        '''

        repr = self.dump_attrs()

        # serialize the uuid field
        repr['uuid'] = self._uuid
        repr['finalized'] = self._finalized
        repr['squashed'] = self._squashed

        return repr

    def deserialize(self, data):
        '''
        Given a dictionary of values, load up the field attributes for
        this object. As with serialize(), if there are any non-field
        attribute data members, this method will need to be overridden
        and extended.
        '''

        if not isinstance(data, dict):
            raise AnsibleAssertionError(
                'data (%s) should be a dict but is a %s' % (data, type(data)))

        for (name, attribute) in iteritems(self._valid_attrs):
            if name in data:
                setattr(self, name, data[name])
            else:
                setattr(self, name, attribute.default)

        # restore the UUID field
        setattr(self, '_uuid', data.get('uuid'))
        self._finalized = data.get('finalized', False)
        self._squashed = data.get('squashed', False)
Пример #29
0
class TerminalBase(with_metaclass(ABCMeta, object)):
    '''
    A base class for implementing cli connections

    .. note:: Unlike most of Ansible, nearly all strings in
        :class:`TerminalBase` plugins are byte strings.  This is because of
        how close to the underlying platform these plugins operate.  Remember
        to mark literal strings as byte string (``b"string"``) and to use
        :func:`~ansible.module_utils._text.to_bytes` and
        :func:`~ansible.module_utils._text.to_text` to avoid unexpected
        problems.
    '''

    #: compiled bytes regular expressions as stdout
    terminal_stdout_re = []

    #: compiled bytes regular expressions as stderr
    terminal_stderr_re = []

    #: compiled bytes regular expressions to remove ANSI codes
    ansi_re = [
        re.compile(br'(\x1b\[\?1h\x1b=)'),
        re.compile(br'\x08.')
    ]

    def __init__(self, connection):
        self._connection = connection

    def _exec_cli_command(self, cmd, check_rc=True):
        """
        Executes a CLI command on the device

        :arg cmd: Byte string consisting of the command to execute
        :kwarg check_rc: If True, the default, raise an
            :exc:`AnsibleConnectionFailure` if the return code from the
            command is nonzero
        :returns: A tuple of return code, stdout, and stderr from running the
            command.  stdout and stderr are both byte strings.
        """
        rc, out, err = self._connection.exec_command(cmd)
        if check_rc and rc != 0:
            raise AnsibleConnectionFailure(err)
        return rc, out, err

    def _get_prompt(self):
        """
        Returns the current prompt from the device

        :returns: A byte string of the prompt
        """
        for cmd in (b'\n', b'prompt()'):
            rc, out, err = self._exec_cli_command(cmd)
        return out

    def on_open_shell(self):
        """Called after the SSH session is established

        This method is called right after the invoke_shell() is called from
        the Paramiko SSHClient instance.  It provides an opportunity to setup
        terminal parameters such as disbling paging for instance.
        """
        pass

    def on_close_shell(self):
        """Called before the connection is closed

        This method gets called once the connection close has been requested
        but before the connection is actually closed.  It provides an
        opportunity to clean up any terminal resources before the shell is
        actually closed
        """
        pass

    def on_authorize(self, passwd=None):
        """Called when privilege escalation is requested

        :kwarg passwd: String containing the password

        This method is called when the privilege is requested to be elevated
        in the play context by setting become to True.  It is the responsibility
        of the terminal plugin to actually do the privilege escalation such
        as entering `enable` mode for instance
        """
        pass

    def on_deauthorize(self):
        """Called when privilege deescalation is requested

        This method is called when the privilege changed from escalated
        (become=True) to non escalated (become=False).  It is the responsibility
        of this method to actually perform the deauthorization procedure
        """
        pass