Ejemplo n.º 1
0
    def test_plugins_connection_ssh_put_file(self, mock_ospe, mock_sleep):
        pc = PlayContext()
        new_stdin = StringIO()
        conn = connection_loader.get('ssh', pc, new_stdin)
        conn._build_command = MagicMock()
        conn._bare_run = MagicMock()

        mock_ospe.return_value = True
        conn._build_command.return_value = 'some command to run'
        conn._bare_run.return_value = (0, '', '')
        conn.host = "some_host"

        C.ANSIBLE_SSH_RETRIES = 9

        # Test with C.DEFAULT_SCP_IF_SSH set to smart
        # Test when SFTP works
        C.DEFAULT_SCP_IF_SSH = 'smart'
        expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
        conn.put_file('/path/to/in/file', '/path/to/dest/file')
        conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)

        # Test when SFTP doesn't work but SCP does
        conn._bare_run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')]
        conn.put_file('/path/to/in/file', '/path/to/dest/file')
        conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
        conn._bare_run.side_effect = None

        # test with C.DEFAULT_SCP_IF_SSH enabled
        C.DEFAULT_SCP_IF_SSH = True
        conn.put_file('/path/to/in/file', '/path/to/dest/file')
        conn._bare_run.assert_called_with('some command to run', None, checkrc=False)

        conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
        conn._bare_run.assert_called_with('some command to run', None, checkrc=False)

        # test with C.DEFAULT_SCP_IF_SSH disabled
        C.DEFAULT_SCP_IF_SSH = False
        expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
        conn.put_file('/path/to/in/file', '/path/to/dest/file')
        conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)

        expected_in_data = b' '.join((b'put',
                                      to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
                                      to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
        conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
        conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)

        # test that a non-zero rc raises an error
        conn._bare_run.return_value = (1, 'stdout', 'some errors')
        self.assertRaises(AnsibleError, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')

        # test that a not-found path raises an error
        mock_ospe.return_value = False
        conn._bare_run.return_value = (0, 'stdout', '')
        self.assertRaises(AnsibleFileNotFound, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
Ejemplo n.º 2
0
def query_package(module, name, depot=None):
    """ Returns whether a package is installed or not and version. """

    cmd_list = '/usr/sbin/swlist -a revision -l product'
    if depot:
        rc, stdout, stderr = module.run_command("%s -s %s %s | grep %s" % (cmd_list, shlex_quote(depot), shlex_quote(name), shlex_quote(name)),
                                                use_unsafe_shell=True)
    else:
        rc, stdout, stderr = module.run_command("%s %s | grep %s" % (cmd_list, shlex_quote(name), shlex_quote(name)), use_unsafe_shell=True)
    if rc == 0:
        version = re.sub(r"\s\s+|\t", " ", stdout).strip().split()[1]
    else:
        version = None

    return rc, version
Ejemplo n.º 3
0
    def checksum(self, path, python_interp):
        # In the following test, each condition is a check and logical
        # comparison (|| or &&) that sets the rc value.  Every check is run so
        # the last check in the series to fail will be the rc that is returned.
        #
        # If a check fails we error before invoking the hash functions because
        # hash functions may successfully take the hash of a directory on BSDs
        # (UFS filesystem?) which is not what the rest of the ansible code expects
        #
        # If all of the available hashing methods fail we fail with an rc of 0.
        # This logic is added to the end of the cmd at the bottom of this function.

        # Return codes:
        # checksum: success!
        # 0: Unknown error
        # 1: Remote file does not exist
        # 2: No read permissions on the file
        # 3: File is a directory
        # 4: No python interpreter

        # Quoting gets complex here.  We're writing a python string that's
        # used by a variety of shells on the remote host to invoke a python
        # "one-liner".
        shell_escaped_path = shlex_quote(path)
        test = "rc=flag; [ -r %(p)s ] %(shell_or)s rc=2; [ -f %(p)s ] %(shell_or)s rc=1; [ -d %(p)s ] %(shell_and)s rc=3; %(i)s -V 2>/dev/null %(shell_or)s rc=4; [ x\"$rc\" != \"xflag\" ] %(shell_and)s echo \"${rc}  \"%(p)s %(shell_and)s exit 0" % dict(p=shell_escaped_path, i=python_interp, shell_and=self._SHELL_AND, shell_or=self._SHELL_OR)  # NOQA
        csums = [
            u"({0} -c 'import hashlib; BLOCKSIZE = 65536; hasher = hashlib.sha1();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)".format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL),  # NOQA  Python > 2.4 (including python3)
            u"({0} -c 'import sha; BLOCKSIZE = 65536; hasher = sha.sha();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)".format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL),  # NOQA  Python == 2.4
        ]

        cmd = (" %s " % self._SHELL_OR).join(csums)
        cmd = "%s; %s %s (echo \'0  \'%s)" % (test, cmd, self._SHELL_OR, shell_escaped_path)
        return cmd
Ejemplo n.º 4
0
    def put_file(self, in_path, out_path):
        """ Transfer a file from local to docker container """
        super(Connection, self).put_file(in_path, out_path)
        display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._play_context.remote_addr)

        out_path = self._prefix_login_path(out_path)
        if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
            raise AnsibleFileNotFound(
                "file or module does not exist: %s" % to_native(in_path))

        out_path = shlex_quote(out_path)
        # Older docker doesn't have native support for copying files into
        # running containers, so we use docker exec to implement this
        # Although docker version 1.8 and later provide support, the
        # owner and group of the files are always set to root
        args = self._build_exec_cmd([self._play_context.executable, "-c", "dd of=%s bs=%s" % (out_path, BUFSIZE)])
        args = [to_bytes(i, errors='surrogate_or_strict') for i in args]
        with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
            try:
                p = subprocess.Popen(args, stdin=in_file,
                                     stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            except OSError:
                raise AnsibleError("docker connection requires dd command in the container to put files")
            stdout, stderr = p.communicate()

            if p.returncode != 0:
                raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" %
                                   (to_native(in_path), to_native(out_path), to_native(stdout), to_native(stderr)))
Ejemplo n.º 5
0
def remove_packages(module, packages):

    remove_c = 0
    pkg_glob_path = module.get_bin_path('pkg_glob', True)

    # If pkg_delete not found, we assume pkgng
    pkg_delete_path = module.get_bin_path('pkg_delete', False)
    if not pkg_delete_path:
        pkg_delete_path = module.get_bin_path('pkg', True)
        pkg_delete_path = pkg_delete_path + " delete -y"

    # Using a for loop in case of error, we can report the package that failed
    for package in packages:
        # Query the package first, to see if we even need to remove
        if not query_package(module, package):
            continue

        rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path, shlex_quote(package)), use_unsafe_shell=True)

        if query_package(module, package):
            name_without_digits = re.sub('[0-9]', '', package)
            rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path,
                                                              shlex_quote(name_without_digits)),
                                                              use_unsafe_shell=True)
            if query_package(module, package):
                module.fail_json(msg="failed to remove %s: %s" % (package, out))

        remove_c += 1

    if remove_c > 0:

        module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)

    module.exit_json(changed=False, msg="package(s) already absent")
Ejemplo n.º 6
0
    def set_user_facl(self, paths, user, mode):
        """Only sets acls for users as that's really all we need"""
        cmd = ['setfacl', '-m', 'u:%s:%s' % (user, mode)]
        cmd.extend(paths)
        cmd = [shlex_quote(c) for c in cmd]

        return ' '.join(cmd)
Ejemplo n.º 7
0
def query_package(module, name):

    pkg_info_path = module.get_bin_path('pkg_info', False)

    # Assume that if we have pkg_info, we haven't upgraded to pkgng
    if pkg_info_path:
        pkgng = False
        pkg_glob_path = module.get_bin_path('pkg_glob', True)
        rc, out, err = module.run_command("%s -e `pkg_glob %s`" % (pkg_info_path, shlex_quote(name)), use_unsafe_shell=True)
    else:
        pkgng = True
        pkg_info_path = module.get_bin_path('pkg', True)
        pkg_info_path = pkg_info_path + " info"
        rc, out, err = module.run_command("%s %s" % (pkg_info_path, name))

    found = rc == 0

    if not found:
        # databases/mysql55-client installs as mysql-client, so try solving
        # that the ugly way. Pity FreeBSD doesn't have a fool proof way of checking
        # some package is installed
        name_without_digits = re.sub('[0-9]', '', name)
        if name != name_without_digits:
            if pkgng:
                rc, out, err = module.run_command("%s %s" % (pkg_info_path, name_without_digits))
            else:
                rc, out, err = module.run_command("%s %s" % (pkg_info_path, name_without_digits))

        found = rc == 0

    return found
Ejemplo n.º 8
0
def query_package(module, port_path, name, state="present"):
    """ Returns whether a package is installed or not. """

    if state == "present":

        rc, out, err = module.run_command("%s installed | grep -q ^.*%s" % (shlex_quote(port_path), shlex_quote(name)), use_unsafe_shell=True)
        if rc == 0:
            return True

        return False

    elif state == "active":

        rc, out, err = module.run_command("%s installed %s | grep -q active" % (shlex_quote(port_path), shlex_quote(name)), use_unsafe_shell=True)

        if rc == 0:
            return True

        return False
Ejemplo n.º 9
0
def query_package(module, opkg_path, name, state="present"):
    """ Returns whether a package is installed or not. """

    if state == "present":

        rc, out, err = module.run_command("%s list-installed | grep -q \"^%s \"" % (shlex_quote(opkg_path), shlex_quote(name)), use_unsafe_shell=True)
        if rc == 0:
            return True

        return False
Ejemplo n.º 10
0
 def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None):
     # don't quote the cmd if it's an empty string, because this will break pipelining mode
     if cmd.strip() != '':
         cmd = shlex_quote(cmd)
     cmd_parts = [env_string.strip(), shebang.replace("#!", "").strip(), cmd]
     if arg_path is not None:
         cmd_parts.append(arg_path)
     new_cmd = " ".join(cmd_parts)
     if rm_tmp:
         new_cmd = 'begin ; %s; rm -rf "%s" %s ; end' % (new_cmd, rm_tmp, self._SHELL_REDIRECT_ALLNULL)
     return new_cmd
Ejemplo n.º 11
0
    def build_module_command(self, env_string, shebang, cmd, arg_path=None):
        # don't quote the cmd if it's an empty string, because this will break pipelining mode
        if cmd.strip() != '':
            cmd = shlex_quote(cmd)

        cmd_parts = []
        if shebang:
            shebang = shebang.replace("#!", "").strip()
        else:
            shebang = ""
        cmd_parts.extend([env_string.strip(), shebang, cmd])
        if arg_path is not None:
            cmd_parts.append(arg_path)
        new_cmd = " ".join(cmd_parts)
        return new_cmd
Ejemplo n.º 12
0
    def expand_user(self, user_home_path, username=''):
        ''' Return a command to expand tildes in a path

        It can be either "~" or "~username". We just ignore $HOME
        We use the POSIX definition of a username:
            http://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap03.html#tag_03_426
            http://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap03.html#tag_03_276

            Falls back to 'current workind directory' as we assume 'home is where the remote user ends up'
        '''

        # Check that the user_path to expand is safe
        if user_home_path != '~':
            if not _USER_HOME_PATH_RE.match(user_home_path):
                # shlex_quote will make the shell return the string verbatim
                user_home_path = shlex_quote(user_home_path)
        elif username:
            # if present the user name is appended to resolve "that user's home"
            user_home_path += username

        return 'echo %s' % user_home_path
Ejemplo n.º 13
0
    def put_file(self, in_path, out_path):
        ''' transfer a file from local to jail '''
        super(Connection, self).put_file(in_path, out_path)
        display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail)

        out_path = shlex_quote(self._prefix_login_path(out_path))
        try:
            with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
                try:
                    p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), stdin=in_file)
                except OSError:
                    raise AnsibleError("jail connection requires dd command in the jail")
                try:
                    stdout, stderr = p.communicate()
                except:
                    traceback.print_exc()
                    raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
                if p.returncode != 0:
                    raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, to_native(stdout), to_native(stderr)))
        except IOError:
            raise AnsibleError("file or module does not exist at: %s" % in_path)
Ejemplo n.º 14
0
    def fetch_file(self, in_path, out_path):
        ''' fetch a file from jail to local '''
        super(Connection, self).fetch_file(in_path, out_path)
        display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.jail)

        in_path = shlex_quote(self._prefix_login_path(in_path))
        try:
            p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE))
        except OSError:
            raise AnsibleError("jail connection requires dd command in the jail")

        with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file:
            try:
                chunk = p.stdout.read(BUFSIZE)
                while chunk:
                    out_file.write(chunk)
                    chunk = p.stdout.read(BUFSIZE)
            except:
                traceback.print_exc()
                raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
            stdout, stderr = p.communicate()
            if p.returncode != 0:
                raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, to_native(stdout), to_native(stderr)))
Ejemplo n.º 15
0
    def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='surrogate_then_replace', chdir=None):
        '''
        This is the function which executes the low level shell command, which
        may be commands to create/remove directories for temporary files, or to
        run the module code or python directly when pipelining.

        :kwarg encoding_errors: If the value returned by the command isn't
            utf-8 then we have to figure out how to transform it to unicode.
            If the value is just going to be displayed to the user (or
            discarded) then the default of 'replace' is fine.  If the data is
            used as a key or is going to be written back out to a file
            verbatim, then this won't work.  May have to use some sort of
            replacement strategy (python3 could use surrogateescape)
        :kwarg chdir: cd into this directory before executing the command.
        '''

        display.debug("_low_level_execute_command(): starting")
#        if not cmd:
#            # this can happen with powershell modules when there is no analog to a Windows command (like chmod)
#            display.debug("_low_level_execute_command(): no command, exiting")
#           return dict(stdout='', stderr='', rc=254)

        if chdir:
            display.debug("_low_level_execute_command(): changing cwd to %s for this command" % chdir)
            cmd = self._connection._shell.append_command('cd %s' % chdir, cmd)

        allow_same_user = C.BECOME_ALLOW_SAME_USER
        same_user = self._play_context.become_user == self._play_context.remote_user
        if sudoable and self._play_context.become and (allow_same_user or not same_user):
            display.debug("_low_level_execute_command(): using become for this command")
            cmd = self._play_context.make_become_cmd(cmd, executable=executable)

        if self._connection.allow_executable:
            if executable is None:
                executable = self._play_context.executable
                # mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876)
                # only applied for the default executable to avoid interfering with the raw action
                cmd = self._connection._shell.append_command(cmd, 'sleep 0')
            if executable:
                cmd = executable + ' -c ' + shlex_quote(cmd)

        display.debug("_low_level_execute_command(): executing: %s" % (cmd,))

        # Change directory to basedir of task for command execution when connection is local
        if self._connection.transport == 'local':
            cwd = os.getcwd()
            os.chdir(self._loader.get_basedir())
        try:
            rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
        finally:
            if self._connection.transport == 'local':
                os.chdir(cwd)

        # stdout and stderr may be either a file-like or a bytes object.
        # Convert either one to a text type
        if isinstance(stdout, binary_type):
            out = to_text(stdout, errors=encoding_errors)
        elif not isinstance(stdout, text_type):
            out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
        else:
            out = stdout

        if isinstance(stderr, binary_type):
            err = to_text(stderr, errors=encoding_errors)
        elif not isinstance(stderr, text_type):
            err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
        else:
            err = stderr

        if rc is None:
            rc = 0

        # be sure to remove the BECOME-SUCCESS message now
        out = self._strip_success_message(out)

        display.debug(u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, out, err))
        return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err)
Ejemplo n.º 16
0
 def env_prefix(**args):
     return ' '.join([
         '%s=%s' % (k, shlex_quote(text_type(v)))
         for k, v in args.items()
     ])
Ejemplo n.º 17
0
    def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True, wrap_async=False):
        '''
        Transfer and run a module along with its arguments.
        '''
        if task_vars is None:
            task_vars = dict()

        remote_module_path = None
        args_file_path = None
        remote_files = []

        # if a module name was not specified for this execution, use the action from the task
        if module_name is None:
            module_name = self._task.action
        if module_args is None:
            module_args = self._task.args

        self._update_module_args(module_name, module_args, task_vars)

        # FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
        (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
        display.vvv("Using module file %s" % module_path)
        if not shebang and module_style != 'binary':
            raise AnsibleError("module (%s) is missing interpreter line" % module_name)

        if not self._is_pipelining_enabled(module_style, wrap_async):

            # we might need remote tmp dir
            if not tmp or 'tmp' not in tmp:
                tmp = self._make_tmp_path()

            remote_module_filename = self._connection._shell.get_remote_filename(module_path)
            remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)

        if module_style in ('old', 'non_native_want_json', 'binary'):
            # we'll also need a temp file to hold our module arguments
            args_file_path = self._connection._shell.join_path(tmp, 'args')

        if remote_module_path or module_style != 'new':
            display.debug("transferring module to remote %s" % remote_module_path)
            if module_style == 'binary':
                self._transfer_file(module_path, remote_module_path)
            else:
                self._transfer_data(remote_module_path, module_data)
            if module_style == 'old':
                # we need to dump the module args to a k=v string in a file on
                # the remote system, which can be read and parsed by the module
                args_data = ""
                for k, v in iteritems(module_args):
                    args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
                self._transfer_data(args_file_path, args_data)
            elif module_style in ('non_native_want_json', 'binary'):
                self._transfer_data(args_file_path, json.dumps(module_args))
            display.debug("done transferring module to remote")

        environment_string = self._compute_environment_string()

        if tmp and remote_module_path:
            remote_files = [tmp, remote_module_path]

        if args_file_path:
            remote_files.append(args_file_path)

        sudoable = True
        in_data = None
        cmd = ""

        if wrap_async:
            # configure, upload, and chmod the async_wrapper module
            (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(),
                                                                                                         task_vars=task_vars)
            async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
            remote_async_module_path = self._connection._shell.join_path(tmp, async_module_remote_filename)
            self._transfer_data(remote_async_module_path, async_module_data)
            remote_files.append(remote_async_module_path)

            async_limit = self._task.async
Ejemplo n.º 18
0
    def _execute_module(self,
                        module_name=None,
                        module_args=None,
                        tmp=None,
                        task_vars=None,
                        persist_files=False,
                        delete_remote_tmp=True,
                        wrap_async=False):
        '''
        Transfer and run a module along with its arguments.
        '''
        if task_vars is None:
            task_vars = dict()

        remote_module_path = None
        args_file_path = None
        remote_files = []

        # if a module name was not specified for this execution, use the action from the task
        if module_name is None:
            module_name = self._task.action
        if module_args is None:
            module_args = self._task.args

        self._update_module_args(module_name, module_args, task_vars)

        # FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
        (module_style, shebang, module_data,
         module_path) = self._configure_module(module_name=module_name,
                                               module_args=module_args,
                                               task_vars=task_vars)
        display.vvv("Using module file %s" % module_path)
        if not shebang and module_style != 'binary':
            raise AnsibleError("module (%s) is missing interpreter line" %
                               module_name)

        if not self._is_pipelining_enabled(module_style, wrap_async):

            # we might need remote tmp dir
            if not tmp:
                if not self._connection._shell.tempdir or tmp is None or 'tmp' not in tmp:
                    tmp = self._make_tmp_path()
                else:
                    tmp = self._connection._shell.tempdir

            remote_module_filename = self._connection._shell.get_remote_filename(
                module_path)
            remote_module_path = self._connection._shell.join_path(
                tmp, remote_module_filename)

        if module_style in ('old', 'non_native_want_json', 'binary'):
            # we'll also need a temp file to hold our module arguments
            args_file_path = self._connection._shell.join_path(tmp, 'args')

        if remote_module_path or module_style != 'new':
            display.debug("transferring module to remote %s" %
                          remote_module_path)
            if module_style == 'binary':
                self._transfer_file(module_path, remote_module_path)
            else:
                self._transfer_data(remote_module_path, module_data)
            if module_style == 'old':
                # we need to dump the module args to a k=v string in a file on
                # the remote system, which can be read and parsed by the module
                args_data = ""
                for k, v in iteritems(module_args):
                    args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
                self._transfer_data(args_file_path, args_data)
            elif module_style in ('non_native_want_json', 'binary'):
                self._transfer_data(args_file_path, json.dumps(module_args))
            display.debug("done transferring module to remote")

        environment_string = self._compute_environment_string()

        if tmp and remote_module_path:
            remote_files = [tmp, remote_module_path]

        if args_file_path:
            remote_files.append(args_file_path)

        sudoable = True
        in_data = None
        cmd = ""

        if wrap_async and not self._connection.always_pipeline_modules:
            # configure, upload, and chmod the async_wrapper module
            (async_module_style, shebang, async_module_data,
             async_module_path) = self._configure_module(
                 module_name='async_wrapper',
                 module_args=dict(),
                 task_vars=task_vars)
            async_module_remote_filename = self._connection._shell.get_remote_filename(
                async_module_path)
            remote_async_module_path = self._connection._shell.join_path(
                tmp, async_module_remote_filename)
            self._transfer_data(remote_async_module_path, async_module_data)
            remote_files.append(remote_async_module_path)

            async_limit = self._task.async_val
            async_jid = str(random.randint(0, 999999999999))

            # call the interpreter for async_wrapper directly
            # this permits use of a script for an interpreter on non-Linux platforms
            # TODO: re-implement async_wrapper as a regular module to avoid this special case
            interpreter = shebang.replace('#!', '').strip()
            async_cmd = [
                interpreter, remote_async_module_path, async_jid, async_limit,
                remote_module_path
            ]

            if environment_string:
                async_cmd.insert(0, environment_string)

            if args_file_path:
                async_cmd.append(args_file_path)
            else:
                # maintain a fixed number of positional parameters for async_wrapper
                async_cmd.append('_')

            if not self._should_remove_tmp_path(tmp):
                async_cmd.append("-preserve_tmp")

            cmd = " ".join(to_text(x) for x in async_cmd)

        else:

            if self._is_pipelining_enabled(module_style):
                in_data = module_data
            else:
                cmd = remote_module_path

            cmd = self._connection._shell.build_module_command(
                environment_string, shebang, cmd,
                arg_path=args_file_path).strip()

        # Fix permissions of the tmp path and tmp files. This should be called after all files have been transferred.
        if remote_files:
            # remove none/empty
            remote_files = [x for x in remote_files if x]
            self._fixup_perms2(remote_files, self._play_context.remote_user)

        # actually execute
        res = self._low_level_execute_command(cmd,
                                              sudoable=sudoable,
                                              in_data=in_data)

        # parse the main result
        data = self._parse_returned_data(res)

        # NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
        # get internal info before cleaning
        if data.pop("_ansible_suppress_tmpdir_delete", False):
            self._cleanup_remote_tmp = False

        # remove internal keys
        remove_internal_keys(data)

        # FIXME: for backwards compat, figure out if still makes sense
        if wrap_async:
            data['changed'] = True

        # pre-split stdout/stderr into lines if needed
        if 'stdout' in data and 'stdout_lines' not in data:
            # if the value is 'False', a default won't catch it.
            txt = data.get('stdout', None) or u''
            data['stdout_lines'] = txt.splitlines()
        if 'stderr' in data and 'stderr_lines' not in data:
            # if the value is 'False', a default won't catch it.
            txt = data.get('stderr', None) or u''
            data['stderr_lines'] = txt.splitlines()

        display.debug("done with _execute_module (%s, %s)" %
                      (module_name, module_args))
        return data
Ejemplo n.º 19
0
 def env_prefix(self, **kwargs):
     env = self.env.copy()
     env.update(kwargs)
     return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k, v in env.items()])
Ejemplo n.º 20
0
 def remove(self, path, recurse=False):
     path = shlex_quote(path)
     cmd = 'rm -f '
     if recurse:
         cmd += '-r '
     return cmd + "%s %s" % (path, self._SHELL_REDIRECT_ALLNULL)
Ejemplo n.º 21
0
    def _low_level_execute_command(self,
                                   cmd,
                                   sudoable=True,
                                   in_data=None,
                                   executable=None,
                                   encoding_errors='surrogate_then_replace',
                                   chdir=None):
        '''
        This is the function which executes the low level shell command, which
        may be commands to create/remove directories for temporary files, or to
        run the module code or python directly when pipelining.

        :kwarg encoding_errors: If the value returned by the command isn't
            utf-8 then we have to figure out how to transform it to unicode.
            If the value is just going to be displayed to the user (or
            discarded) then the default of 'replace' is fine.  If the data is
            used as a key or is going to be written back out to a file
            verbatim, then this won't work.  May have to use some sort of
            replacement strategy (python3 could use surrogateescape)
        :kwarg chdir: cd into this directory before executing the command.
        '''

        display.debug("_low_level_execute_command(): starting")
        #        if not cmd:
        #            # this can happen with powershell modules when there is no analog to a Windows command (like chmod)
        #            display.debug("_low_level_execute_command(): no command, exiting")
        #           return dict(stdout='', stderr='', rc=254)

        if chdir:
            display.debug(
                "_low_level_execute_command(): changing cwd to %s for this command"
                % chdir)
            cmd = self._connection._shell.append_command('cd %s' % chdir, cmd)

        allow_same_user = C.BECOME_ALLOW_SAME_USER
        same_user = self._play_context.become_user == self._play_context.remote_user
        if sudoable and self._play_context.become and (allow_same_user
                                                       or not same_user):
            display.debug(
                "_low_level_execute_command(): using become for this command")
            if self._connection.transport != 'network_cli' and self._play_context.become_method != 'enable':
                cmd = self._play_context.make_become_cmd(cmd,
                                                         executable=executable)

        if self._connection.allow_executable:
            if executable is None:
                executable = self._play_context.executable
                # mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876)
                # only applied for the default executable to avoid interfering with the raw action
                cmd = self._connection._shell.append_command(cmd, 'sleep 0')
            if executable:
                cmd = executable + ' -c ' + shlex_quote(cmd)

        display.debug("_low_level_execute_command(): executing: %s" % (cmd, ))

        # Change directory to basedir of task for command execution when connection is local
        if self._connection.transport == 'local':
            cwd = os.getcwd()
            os.chdir(self._loader.get_basedir())
        try:
            rc, stdout, stderr = self._connection.exec_command(
                cmd, in_data=in_data, sudoable=sudoable)
        finally:
            if self._connection.transport == 'local':
                os.chdir(cwd)

        # stdout and stderr may be either a file-like or a bytes object.
        # Convert either one to a text type
        if isinstance(stdout, binary_type):
            out = to_text(stdout, errors=encoding_errors)
        elif not isinstance(stdout, text_type):
            out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
        else:
            out = stdout

        if isinstance(stderr, binary_type):
            err = to_text(stderr, errors=encoding_errors)
        elif not isinstance(stderr, text_type):
            err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
        else:
            err = stderr

        if rc is None:
            rc = 0

        # be sure to remove the BECOME-SUCCESS message now
        out = self._strip_success_message(out)

        display.debug(
            u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" %
            (rc, out, err))
        return dict(rc=rc,
                    stdout=out,
                    stdout_lines=out.splitlines(),
                    stderr=err)
Ejemplo n.º 22
0
 def exists(self, path):
     cmd = ['test', '-e', shlex_quote(path)]
     return ' '.join(cmd)
Ejemplo n.º 23
0
 def remove(self, path, recurse=False):
     path = shlex_quote(path)
     cmd = 'rm -f '
     if recurse:
         cmd += '-r '
     return cmd + "%s %s" % (path, self._SHELL_REDIRECT_ALLNULL)
Ejemplo n.º 24
0
def main():
    module = AnsibleModule(
        argument_spec=dict(list_all=dict(required=False,
                                         type='bool',
                                         default=False),
                           name=dict(type='str'),
                           repo=dict(type='path'),
                           scope=dict(required=False,
                                      type='str',
                                      choices=['local', 'global', 'system']),
                           state=dict(required=False,
                                      type='str',
                                      default='present',
                                      choices=['present', 'absent']),
                           value=dict(required=False)),
        mutually_exclusive=[['list_all', 'name'], ['list_all', 'value'],
                            ['list_all', 'state']],
        required_if=[('scope', 'local', ['repo'])],
        required_one_of=[['list_all', 'name']],
        supports_check_mode=True,
    )
    git_path = module.get_bin_path('git', True)

    params = module.params
    # We check error message for a pattern, so we need to make sure the messages appear in the form we're expecting.
    # Set the locale to C to ensure consistent messages.
    module.run_command_environ_update = dict(LANG='C',
                                             LC_ALL='C',
                                             LC_MESSAGES='C',
                                             LC_CTYPE='C')

    if params['name']:
        name = params['name']
    else:
        name = None

    if params['scope']:
        scope = params['scope']
    elif params['list_all']:
        scope = None
    else:
        scope = 'system'

    if params['state'] == 'absent':
        unset = 'unset'
        params['value'] = None
    else:
        unset = None

    if params['value']:
        new_value = params['value']
    else:
        new_value = None

    args = [git_path, "config", "--includes"]
    if params['list_all']:
        args.append('-l')
    if scope:
        args.append("--" + scope)
    if name:
        args.append(name)

    if scope == 'local':
        dir = params['repo']
    elif params['list_all'] and params['repo']:
        # Include local settings from a specific repo when listing all available settings
        dir = params['repo']
    else:
        # Run from root directory to avoid accidentally picking up any local config settings
        dir = "/"

    (rc, out, err) = module.run_command(' '.join(args), cwd=dir)
    if params[
            'list_all'] and scope and rc == 128 and 'unable to read config file' in err:
        # This just means nothing has been set at the given scope
        module.exit_json(changed=False, msg='', config_values={})
    elif rc >= 2:
        # If the return code is 1, it just means the option hasn't been set yet, which is fine.
        module.fail_json(rc=rc, msg=err, cmd=' '.join(args))

    if params['list_all']:
        values = out.rstrip().splitlines()
        config_values = {}
        for value in values:
            k, v = value.split('=', 1)
            config_values[k] = v
        module.exit_json(changed=False, msg='', config_values=config_values)
    elif not new_value and not unset:
        module.exit_json(changed=False, msg='', config_value=out.rstrip())
    elif unset and not out:
        module.exit_json(changed=False, msg='no setting to unset')
    else:
        old_value = out.rstrip()
        if old_value == new_value:
            module.exit_json(changed=False, msg="")

    if not module.check_mode:
        if unset:
            args.insert(len(args) - 1, "--" + unset)
            cmd = ' '.join(args)
        else:
            new_value_quoted = shlex_quote(new_value)
            cmd = ' '.join(args + [new_value_quoted])
        (rc, out, err) = module.run_command(cmd, cwd=dir)
        if err:
            module.fail_json(rc=rc, msg=err, cmd=cmd)

    module.exit_json(msg='setting changed',
                     diff=dict(before_header=' '.join(args),
                               before=old_value + "\n",
                               after_header=' '.join(args),
                               after=(new_value or '') + "\n"),
                     changed=True)
Ejemplo n.º 25
0
 def env_prefix(**args):
     return ' '.join(['%s=%s' % (k, shlex_quote(text_type(v))) for k, v in args.items()])
Ejemplo n.º 26
0
def quote(a):
    ''' return its argument quoted for shell usage '''
    return shlex_quote(to_text(a))
Ejemplo n.º 27
0
    def _file_transport_command(self, in_path, out_path, sftp_action):
        # scp and sftp require square brackets for IPv6 addresses, but
        # accept them for hostnames and IPv4 addresses too.
        host = '[%s]' % self.host

        # Transfer methods to try
        methods = []

        # Use the transfer_method option if set, otherwise use scp_if_ssh
        ssh_transfer_method = self._play_context.ssh_transfer_method
        if ssh_transfer_method is not None:
            if not (ssh_transfer_method in ('smart', 'sftp', 'scp', 'piped')):
                raise AnsibleOptionsError(
                    'transfer_method needs to be one of [smart|sftp|scp|piped]'
                )
            if ssh_transfer_method == 'smart':
                methods = ['sftp', 'scp', 'piped']
            else:
                methods = [ssh_transfer_method]
        else:
            # since this can be a non-bool now, we need to handle it correctly
            scp_if_ssh = C.DEFAULT_SCP_IF_SSH
            if not isinstance(scp_if_ssh, bool):
                scp_if_ssh = scp_if_ssh.lower()
                if scp_if_ssh in BOOLEANS:
                    scp_if_ssh = boolean(scp_if_ssh, strict=False)
                elif scp_if_ssh != 'smart':
                    raise AnsibleOptionsError(
                        'scp_if_ssh needs to be one of [smart|True|False]')
            if scp_if_ssh == 'smart':
                methods = ['sftp', 'scp', 'piped']
            elif scp_if_ssh is True:
                methods = ['scp']
            else:
                methods = ['sftp']

        success = False
        for method in methods:
            returncode = stdout = stderr = None
            if method == 'sftp':
                cmd = self._build_command('sftp', to_bytes(host))
                in_data = u"{0} {1} {2}\n".format(sftp_action,
                                                  shlex_quote(in_path),
                                                  shlex_quote(out_path))
                in_data = to_bytes(in_data, nonstring='passthru')
                (returncode, stdout, stderr) = self._run(cmd,
                                                         in_data,
                                                         checkrc=False)
            elif method == 'scp':
                if sftp_action == 'get':
                    cmd = self._build_command(
                        'scp', u'{0}:{1}'.format(host, shlex_quote(in_path)),
                        out_path)
                else:
                    cmd = self._build_command(
                        'scp', in_path,
                        u'{0}:{1}'.format(host, shlex_quote(out_path)))
                in_data = None
                (returncode, stdout, stderr) = self._run(cmd,
                                                         in_data,
                                                         checkrc=False)
            elif method == 'piped':
                if sftp_action == 'get':
                    # we pass sudoable=False to disable pty allocation, which
                    # would end up mixing stdout/stderr and screwing with newlines
                    (returncode, stdout, stderr) = self.exec_command(
                        'dd if=%s bs=%s' % (in_path, BUFSIZE), sudoable=False)
                    out_file = open(
                        to_bytes(out_path, errors='surrogate_or_strict'),
                        'wb+')
                    out_file.write(stdout)
                    out_file.close()
                else:
                    in_data = open(
                        to_bytes(in_path, errors='surrogate_or_strict'),
                        'rb').read()
                    in_data = to_bytes(in_data, nonstring='passthru')
                    (returncode, stdout, stderr) = self.exec_command(
                        'dd of=%s bs=%s' % (out_path, BUFSIZE),
                        in_data=in_data)

            # Check the return code and rollover to next method if failed
            if returncode == 0:
                return (returncode, stdout, stderr)
            else:
                # If not in smart mode, the data will be printed by the raise below
                if len(methods) > 1:
                    display.warning(
                        msg=
                        '%s transfer mechanism failed on %s. Use ANSIBLE_DEBUG=1 to see detailed information'
                        % (method, host))
                    display.debug(msg='%s' % to_native(stdout))
                    display.debug(msg='%s' % to_native(stderr))

        if returncode == 255:
            raise AnsibleConnectionFailure(
                "Failed to connect to the host via %s: %s" %
                (method, to_native(stderr)))
        else:
            raise AnsibleError("failed to transfer file to %s %s:\n%s\n%s" %
                               (to_native(in_path), to_native(out_path),
                                to_native(stdout), to_native(stderr)))
Ejemplo n.º 28
0
 def exists(self, path):
     cmd = ['test', '-e', shlex_quote(path)]
     return ' '.join(cmd)
Ejemplo n.º 29
0
    def make_become_cmd(self, cmd, executable=None):
        """ helper function to create privilege escalation commands """

        prompt = None
        success_key = None
        self.prompt = None

        if self.become:

            if not executable:
                executable = self.executable

            becomecmd = None
            randbits = ''.join(
                random.choice(string.ascii_lowercase) for x in range(32))
            success_key = 'BECOME-SUCCESS-%s' % randbits
            success_cmd = shlex_quote('echo %s; %s' % (success_key, cmd))

            if executable:
                command = '%s -c %s' % (executable, success_cmd)
            else:
                command = success_cmd

            # set executable to use for the privilege escalation method, with various overrides
            exe = self.become_exe or getattr(
                self, '%s_exe' % self.become_method, self.become_method)

            # set flags to use for the privilege escalation method, with various overrides
            flags = self.become_flags or getattr(
                self, '%s_flags' % self.become_method, '')

            if self.become_method == 'sudo':
                # If we have a password, we run sudo with a randomly-generated
                # prompt set using -p. Otherwise we run it with default -n, which makes
                # it fail if it would have prompted for a password.
                # Cannot rely on -n as it can be removed from defaults, which should be
                # done for older versions of sudo that do not support the option.
                #
                # Passing a quoted compound command to sudo (or sudo -s)
                # directly doesn't work, so we shellquote it with shlex_quote()
                # and pass the quoted string to the user's shell.

                # force quick error if password is required but not supplied, should prevent sudo hangs.
                if self.become_pass:
                    prompt = '[sudo via ansible, key=%s] password: '******'%s %s -p "%s" -u %s %s' % (
                        exe, flags.replace(
                            '-n', ''), prompt, self.become_user, command)
                else:
                    becomecmd = '%s %s -u %s %s' % (exe, flags,
                                                    self.become_user, command)

            elif self.become_method == 'su':

                # passing code ref to examine prompt as simple string comparisson isn't good enough with su
                def detect_su_prompt(b_data):
                    b_password_string = b"|".join(
                        [b'(\w+\'s )?' + x for x in b_SU_PROMPT_LOCALIZATIONS])
                    # Colon or unicode fullwidth colon
                    b_password_string = b_password_string + to_bytes(
                        u' ?(:|:) ?')
                    b_SU_PROMPT_LOCALIZATIONS_RE = re.compile(
                        b_password_string, flags=re.IGNORECASE)
                    return bool(b_SU_PROMPT_LOCALIZATIONS_RE.match(b_data))

                prompt = detect_su_prompt

                becomecmd = '%s %s %s -c %s' % (exe, flags, self.become_user,
                                                shlex_quote(command))

            elif self.become_method == 'pbrun':

                prompt = 'Password:'******'%s %s -u %s %s' % (exe, flags, self.become_user,
                                                success_cmd)

            elif self.become_method == 'ksu':

                def detect_ksu_prompt(b_data):
                    return re.match(b"Kerberos password for .*@.*:", b_data)

                prompt = detect_ksu_prompt
                becomecmd = '%s %s %s -e %s' % (exe, self.become_user, flags,
                                                command)

            elif self.become_method == 'pfexec':

                # No user as it uses it's own exec_attr to figure it out
                becomecmd = '%s %s "%s"' % (exe, flags, success_cmd)

            elif self.become_method == 'runas':
                # become is handled inside the WinRM connection plugin
                display.warning(
                    "The Windows 'runas' become method is experimental, and may change significantly in future Ansible releases."
                )

                if not self.become_user:
                    raise AnsibleError((
                        "The 'runas' become method requires a username "
                        "(specify with the '--become-user' CLI arg, the 'become_user' keyword, or the 'ansible_become_user' variable)"
                    ))
                becomecmd = cmd

            elif self.become_method == 'doas':

                prompt = 'doas (%s@' % self.remote_user
                exe = self.become_exe or 'doas'

                if not self.become_pass:
                    flags += ' -n '

                if self.become_user:
                    flags += ' -u %s ' % self.become_user

                # FIXME: make shell independent
                becomecmd = '%s %s echo %s && %s %s env ANSIBLE=true %s' % (
                    exe, flags, success_key, exe, flags, cmd)

            elif self.become_method == 'dzdo':

                exe = self.become_exe or 'dzdo'
                if self.become_pass:
                    prompt = '[dzdo via ansible, key=%s] password: '******'%s -p %s -u %s %s' % (
                        exe, shlex_quote(prompt), self.become_user, command)
                else:
                    becomecmd = '%s -u %s %s' % (exe, self.become_user,
                                                 command)

            elif self.become_method == 'pmrun':

                exe = self.become_exe or 'pmrun'

                prompt = 'Enter UPM user password:'******'%s %s %s' % (exe, flags, shlex_quote(command))

            else:
                raise AnsibleError(
                    "Privilege escalation method not found: %s" %
                    self.become_method)

            if self.become_pass:
                self.prompt = prompt
            self.success_key = success_key
            return becomecmd

        return cmd
Ejemplo n.º 30
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            src=dict(type='str', required=True),
            dest=dict(type='str', required=True),
            dest_port=dict(type='int'),
            delete=dict(type='bool', default=False),
            private_key=dict(type='path'),
            rsync_path=dict(type='str'),
            _local_rsync_path=dict(type='path', default='rsync'),
            _local_rsync_password=dict(type='str', no_log=True),
            _substitute_controller=dict(type='bool', default=False),
            archive=dict(type='bool', default=True),
            checksum=dict(type='bool', default=False),
            compress=dict(type='bool', default=True),
            existing_only=dict(type='bool', default=False),
            dirs=dict(type='bool', default=False),
            recursive=dict(type='bool'),
            links=dict(type='bool'),
            copy_links=dict(type='bool', default=False),
            perms=dict(type='bool'),
            times=dict(type='bool'),
            owner=dict(type='bool'),
            group=dict(type='bool'),
            set_remote_user=dict(type='bool', default=True),
            rsync_timeout=dict(type='int', default=0),
            rsync_opts=dict(type='list', default=[], elements='str'),
            ssh_args=dict(type='str'),
            ssh_connection_multiplexing=dict(type='bool', default=False),
            partial=dict(type='bool', default=False),
            verify_host=dict(type='bool', default=False),
            delay_updates=dict(type='bool', default=True),
            mode=dict(type='str', default='push', choices=['pull', 'push']),
            link_dest=dict(type='list', elements='str'),
        ),
        supports_check_mode=True,
    )

    if module.params['_substitute_controller']:
        try:
            source = substitute_controller(module.params['src'])
            dest = substitute_controller(module.params['dest'])
        except ValueError:
            module.fail_json(
                msg=
                'Could not determine controller hostname for rsync to send to')
    else:
        source = module.params['src']
        dest = module.params['dest']
    dest_port = module.params['dest_port']
    delete = module.params['delete']
    private_key = module.params['private_key']
    rsync_path = module.params['rsync_path']
    rsync = module.params.get('_local_rsync_path', 'rsync')
    rsync_password = module.params.get('_local_rsync_password')
    rsync_timeout = module.params.get('rsync_timeout', 'rsync_timeout')
    archive = module.params['archive']
    checksum = module.params['checksum']
    compress = module.params['compress']
    existing_only = module.params['existing_only']
    dirs = module.params['dirs']
    partial = module.params['partial']
    # the default of these params depends on the value of archive
    recursive = module.params['recursive']
    links = module.params['links']
    copy_links = module.params['copy_links']
    perms = module.params['perms']
    times = module.params['times']
    owner = module.params['owner']
    group = module.params['group']
    rsync_opts = module.params['rsync_opts']
    ssh_args = module.params['ssh_args']
    ssh_connection_multiplexing = module.params['ssh_connection_multiplexing']
    verify_host = module.params['verify_host']
    link_dest = module.params['link_dest']
    delay_updates = module.params['delay_updates']

    if '/' not in rsync:
        rsync = module.get_bin_path(rsync, required=True)

    cmd = [rsync]
    _sshpass_pipe = None
    if rsync_password:
        try:
            module.run_command(["sshpass"])
        except OSError:
            module.fail_json(
                msg=
                "to use rsync connection with passwords, you must install the sshpass program"
            )
        _sshpass_pipe = os.pipe()
        cmd = [
            'sshpass',
            '-d' + to_native(_sshpass_pipe[0], errors='surrogate_or_strict')
        ] + cmd
    if delay_updates:
        cmd.append('--delay-updates')
        cmd.append('-F')
    if compress:
        cmd.append('--compress')
    if rsync_timeout:
        cmd.append('--timeout=%s' % rsync_timeout)
    if module.check_mode:
        cmd.append('--dry-run')
    if delete:
        cmd.append('--delete-after')
    if existing_only:
        cmd.append('--existing')
    if checksum:
        cmd.append('--checksum')
    if copy_links:
        cmd.append('--copy-links')
    if archive:
        cmd.append('--archive')
        if recursive is False:
            cmd.append('--no-recursive')
        if links is False:
            cmd.append('--no-links')
        if perms is False:
            cmd.append('--no-perms')
        if times is False:
            cmd.append('--no-times')
        if owner is False:
            cmd.append('--no-owner')
        if group is False:
            cmd.append('--no-group')
    else:
        if recursive is True:
            cmd.append('--recursive')
        if links is True:
            cmd.append('--links')
        if perms is True:
            cmd.append('--perms')
        if times is True:
            cmd.append('--times')
        if owner is True:
            cmd.append('--owner')
        if group is True:
            cmd.append('--group')
    if dirs:
        cmd.append('--dirs')

    if source.startswith('rsync://') and dest.startswith('rsync://'):
        module.fail_json(msg='either src or dest must be a localhost', rc=1)

    if is_rsh_needed(source, dest):

        # https://github.com/ansible/ansible/issues/15907
        has_rsh = False
        for rsync_opt in rsync_opts:
            if '--rsh' in rsync_opt:
                has_rsh = True
                break

        # if the user has not supplied an --rsh option go ahead and add ours
        if not has_rsh:
            ssh_cmd = [module.get_bin_path('ssh', required=True)]
            if not ssh_connection_multiplexing:
                ssh_cmd.extend(['-S', 'none'])
            if private_key is not None:
                ssh_cmd.extend(['-i', private_key])
            # If the user specified a port value
            # Note:  The action plugin takes care of setting this to a port from
            # inventory if the user didn't specify an explicit dest_port
            if dest_port is not None:
                ssh_cmd.extend(['-o', 'Port=%s' % dest_port])
            if not verify_host:
                ssh_cmd.extend([
                    '-o', 'StrictHostKeyChecking=no', '-o',
                    'UserKnownHostsFile=/dev/null'
                ])
            ssh_cmd_str = ' '.join(shlex_quote(arg) for arg in ssh_cmd)
            if ssh_args:
                ssh_cmd_str += ' %s' % ssh_args
            cmd.append(shlex_quote('--rsh=%s' % ssh_cmd_str))

    if rsync_path:
        cmd.append(shlex_quote('--rsync-path=%s' % rsync_path))

    if rsync_opts:
        if '' in rsync_opts:
            module.warn(
                'The empty string is present in rsync_opts which will cause rsync to'
                ' transfer the current working directory. If this is intended, use "."'
                ' instead to get rid of this warning. If this is unintended, check for'
                ' problems in your playbook leading to empty string in rsync_opts.'
            )
        cmd.extend(rsync_opts)

    if partial:
        cmd.append('--partial')

    if link_dest:
        cmd.append('-H')
        # verbose required because rsync does not believe that adding a
        # hardlink is actually a change
        cmd.append('-vv')
        for x in link_dest:
            link_path = os.path.abspath(os.path.expanduser(x))
            destination_path = os.path.abspath(os.path.dirname(dest))
            if destination_path.find(link_path) == 0:
                module.fail_json(
                    msg=
                    'Hardlinking into a subdirectory of the source would cause recursion. %s and %s'
                    % (destination_path, dest))
            cmd.append('--link-dest=%s' % link_path)

    changed_marker = '<<CHANGED>>'
    cmd.append(shlex_quote('--out-format=' + changed_marker + '%i %n%L'))

    # expand the paths
    if '@' not in source:
        source = os.path.expanduser(source)
    if '@' not in dest:
        dest = os.path.expanduser(dest)

    cmd.append(source)
    cmd.append(dest)
    cmdstr = ' '.join(cmd)

    # If we are using password authentication, write the password into the pipe
    if rsync_password:

        def _write_password_to_pipe(proc):
            os.close(_sshpass_pipe[0])
            try:
                os.write(_sshpass_pipe[1], to_bytes(rsync_password) + b'\n')
            except OSError as exc:
                # Ignore broken pipe errors if the sshpass process has exited.
                if exc.errno != errno.EPIPE or proc.poll() is None:
                    raise

        (rc, out, err) = module.run_command(
            cmdstr,
            pass_fds=_sshpass_pipe,
            before_communicate_callback=_write_password_to_pipe)
    else:
        (rc, out, err) = module.run_command(cmdstr)

    if rc:
        return module.fail_json(msg=err, rc=rc, cmd=cmdstr)

    if link_dest:
        # a leading period indicates no change
        changed = (changed_marker + '.') not in out
    else:
        changed = changed_marker in out

    out_clean = out.replace(changed_marker, '')
    out_lines = out_clean.split('\n')
    while '' in out_lines:
        out_lines.remove('')
    if module._diff:
        diff = {'prepared': out_clean}
        return module.exit_json(changed=changed,
                                msg=out_clean,
                                rc=rc,
                                cmd=cmdstr,
                                stdout_lines=out_lines,
                                diff=diff)

    return module.exit_json(changed=changed,
                            msg=out_clean,
                            rc=rc,
                            cmd=cmdstr,
                            stdout_lines=out_lines)
Ejemplo n.º 31
0
 def quote(self, cmd):
     """Returns a shell-escaped string that can be safely used as one token in a shell command line"""
     return shlex_quote(cmd)
Ejemplo n.º 32
0
def db_restore(module,
               target,
               target_opts="",
               db=None,
               user=None,
               password=None,
               host=None,
               port=None,
               **kw):

    flags = login_flags(db, host, port, user)
    comp_prog_path = None
    cmd = module.get_bin_path('psql', True)

    if os.path.splitext(target)[-1] == '.sql':
        flags.append(' --file={0}'.format(target))

    elif os.path.splitext(target)[-1] == '.tar':
        flags.append(' --format=Tar')
        cmd = module.get_bin_path('pg_restore', True)

    elif os.path.splitext(target)[-1] == '.pgc':
        flags.append(' --format=Custom')
        cmd = module.get_bin_path('pg_restore', True)

    elif os.path.splitext(target)[-1] == '.gz':
        comp_prog_path = module.get_bin_path('zcat', True)

    elif os.path.splitext(target)[-1] == '.bz2':
        comp_prog_path = module.get_bin_path('bzcat', True)

    elif os.path.splitext(target)[-1] == '.xz':
        comp_prog_path = module.get_bin_path('xzcat', True)

    cmd += "".join(flags)
    if target_opts:
        cmd += " {0} ".format(target_opts)

    if comp_prog_path:
        env = os.environ.copy()
        if password:
            env = {"PGPASSWORD": password}
        p1 = subprocess.Popen([comp_prog_path, target],
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE)
        p2 = subprocess.Popen(cmd,
                              stdin=p1.stdout,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              shell=True,
                              env=env)
        (stdout2, stderr2) = p2.communicate()
        p1.stdout.close()
        p1.wait()
        if p1.returncode != 0:
            stderr1 = p1.stderr.read()
            return p1.returncode, '', stderr1, 'cmd: ****'
        else:
            return p2.returncode, '', stderr2, 'cmd: ****'
    else:
        cmd = '{0} < {1}'.format(cmd, shlex_quote(target))

    return do_with_password(module, cmd, password)
Ejemplo n.º 33
0
    def _file_transport_command(self, in_path, out_path, sftp_action):
        # scp and sftp require square brackets for IPv6 addresses, but
        # accept them for hostnames and IPv4 addresses too.
        host = '[%s]' % self.host

        # Transfer methods to try
        methods = []

        # Use the transfer_method option if set, otherwise use scp_if_ssh
        ssh_transfer_method = self._play_context.ssh_transfer_method
        if ssh_transfer_method is not None:
            if not (ssh_transfer_method in ('smart', 'sftp', 'scp', 'piped')):
                raise AnsibleOptionsError('transfer_method needs to be one of [smart|sftp|scp|piped]')
            if ssh_transfer_method == 'smart':
                methods = ['sftp', 'scp', 'piped']
            else:
                methods = [ssh_transfer_method]
        else:
            # since this can be a non-bool now, we need to handle it correctly
            scp_if_ssh = C.DEFAULT_SCP_IF_SSH
            if not isinstance(scp_if_ssh, bool):
                scp_if_ssh = scp_if_ssh.lower()
                if scp_if_ssh in BOOLEANS:
                    scp_if_ssh = boolean(scp_if_ssh, strict=False)
                elif scp_if_ssh != 'smart':
                    raise AnsibleOptionsError('scp_if_ssh needs to be one of [smart|True|False]')
            if scp_if_ssh == 'smart':
                methods = ['sftp', 'scp', 'piped']
            elif scp_if_ssh is True:
                methods = ['scp']
            else:
                methods = ['sftp']

        for method in methods:
            returncode = stdout = stderr = None
            if method == 'sftp':
                cmd = self._build_command('sftp', to_bytes(host))
                in_data = u"{0} {1} {2}\n".format(sftp_action, shlex_quote(in_path), shlex_quote(out_path))
                in_data = to_bytes(in_data, nonstring='passthru')
                (returncode, stdout, stderr) = self._bare_run(cmd, in_data, checkrc=False)
            elif method == 'scp':
                if sftp_action == 'get':
                    cmd = self._build_command('scp', u'{0}:{1}'.format(host, shlex_quote(in_path)), out_path)
                else:
                    cmd = self._build_command('scp', in_path, u'{0}:{1}'.format(host, shlex_quote(out_path)))
                in_data = None
                (returncode, stdout, stderr) = self._bare_run(cmd, in_data, checkrc=False)
            elif method == 'piped':
                if sftp_action == 'get':
                    # we pass sudoable=False to disable pty allocation, which
                    # would end up mixing stdout/stderr and screwing with newlines
                    (returncode, stdout, stderr) = self.exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE), sudoable=False)
                    out_file = open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+')
                    out_file.write(stdout)
                    out_file.close()
                else:
                    in_data = open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb').read()
                    in_data = to_bytes(in_data, nonstring='passthru')
                    (returncode, stdout, stderr) = self.exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), in_data=in_data)

            # Check the return code and rollover to next method if failed
            if returncode == 0:
                return (returncode, stdout, stderr)
            else:
                # If not in smart mode, the data will be printed by the raise below
                if len(methods) > 1:
                    display.warning(msg='%s transfer mechanism failed on %s. Use ANSIBLE_DEBUG=1 to see detailed information' % (method, host))
                    display.debug(msg='%s' % to_native(stdout))
                    display.debug(msg='%s' % to_native(stderr))

        if returncode == 255:
            raise AnsibleConnectionFailure("Failed to connect to the host via %s: %s" % (method, to_native(stderr)))
        else:
            raise AnsibleError("failed to transfer file to %s %s:\n%s\n%s" %
                               (to_native(in_path), to_native(out_path), to_native(stdout), to_native(stderr)))
Ejemplo n.º 34
0
    def chown(self, paths, user):
        cmd = ['chown', user]
        cmd.extend(paths)
        cmd = [shlex_quote(c) for c in cmd]

        return ' '.join(cmd)
Ejemplo n.º 35
0
def zuul_run_command(self, args, zuul_log_id, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None,
                     use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict'):
    '''
    Execute a command, returns rc, stdout, and stderr.

    :arg args: is the command to run
        * If args is a list, the command will be run with shell=False.
        * If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
        * If args is a string and use_unsafe_shell=True it runs with shell=True.
    :kw check_rc: Whether to call fail_json in case of non zero RC.
        Default False
    :kw close_fds: See documentation for subprocess.Popen(). Default True
    :kw executable: See documentation for subprocess.Popen(). Default None
    :kw data: If given, information to write to the stdin of the command
    :kw binary_data: If False, append a newline to the data.  Default False
    :kw path_prefix: If given, additional path to find the command in.
        This adds to the PATH environment vairable so helper commands in
        the same directory can also be found
    :kw cwd: If given, working directory to run the command inside
    :kw use_unsafe_shell: See `args` parameter.  Default False
    :kw prompt_regex: Regex string (not a compiled regex) which can be
        used to detect prompts in the stdout which would otherwise cause
        the execution to hang (especially if no input data is specified)
    :kw environ_update: dictionary to *update* os.environ with
    :kw umask: Umask to be used when running the command. Default None
    :kw encoding: Since we return native strings, on python3 we need to
        know the encoding to use to transform from bytes to text.  If you
        want to always get bytes back, use encoding=None.  The default is
        "utf-8".  This does not affect transformation of strings given as
        args.
    :kw errors: Since we return native strings, on python3 we need to
        transform stdout and stderr from bytes to text.  If the bytes are
        undecodable in the ``encoding`` specified, then use this error
        handler to deal with them.  The default is ``surrogate_or_strict``
        which means that the bytes will be decoded using the
        surrogateescape error handler if available (available on all
        python3 versions we support) otherwise a UnicodeError traceback
        will be raised.  This does not affect transformations of strings
        given as args.
    :returns: A 3-tuple of return code (integer), stdout (native string),
        and stderr (native string).  On python2, stdout and stderr are both
        byte strings.  On python3, stdout and stderr are text strings converted
        according to the encoding and errors parameters.  If you want byte
        strings on python3, use encoding=None to turn decoding to text off.
    '''

    if not isinstance(args, (list, binary_type, text_type)):
        msg = "Argument 'args' to run_command must be list or string"
        self.fail_json(rc=257, cmd=args, msg=msg)

    shell = False
    if use_unsafe_shell:

        # stringify args for unsafe/direct shell usage
        if isinstance(args, list):
            args = " ".join([shlex_quote(x) for x in args])

        # not set explicitly, check if set by controller
        if executable:
            args = [executable, '-c', args]
        elif self._shell not in (None, '/bin/sh'):
            args = [self._shell, '-c', args]
        else:
            shell = True
    else:
        # ensure args are a list
        if isinstance(args, (binary_type, text_type)):
            # On python2.6 and below, shlex has problems with text type
            # On python3, shlex needs a text type.
            if PY2:
                args = to_bytes(args, errors='surrogate_or_strict')
            elif PY3:
                args = to_text(args, errors='surrogateescape')
            args = shlex.split(args)

        # expand shellisms
        args = [os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None]

    prompt_re = None
    if prompt_regex:
        if isinstance(prompt_regex, text_type):
            if PY3:
                prompt_regex = to_bytes(prompt_regex, errors='surrogateescape')
            elif PY2:
                prompt_regex = to_bytes(prompt_regex, errors='surrogate_or_strict')
        try:
            prompt_re = re.compile(prompt_regex, re.MULTILINE)
        except re.error:
            self.fail_json(msg="invalid prompt regular expression given to run_command")

    rc = 0
    msg = None
    st_in = None

    # Manipulate the environ we'll send to the new process
    old_env_vals = {}
    # We can set this from both an attribute and per call
    for key, val in self.run_command_environ_update.items():
        old_env_vals[key] = os.environ.get(key, None)
        os.environ[key] = val
    if environ_update:
        for key, val in environ_update.items():
            old_env_vals[key] = os.environ.get(key, None)
            os.environ[key] = val
    if path_prefix:
        old_env_vals['PATH'] = os.environ['PATH']
        os.environ['PATH'] = "%s:%s" % (path_prefix, os.environ['PATH'])

    # If using test-module and explode, the remote lib path will resemble ...
    #   /tmp/test_module_scratch/debug_dir/ansible/module_utils/basic.py
    # If using ansible or ansible-playbook with a remote system ...
    #   /tmp/ansible_vmweLQ/ansible_modlib.zip/ansible/module_utils/basic.py

    # Clean out python paths set by ansiballz
    if 'PYTHONPATH' in os.environ:
        pypaths = os.environ['PYTHONPATH'].split(':')
        pypaths = [x for x in pypaths
                   if not x.endswith('/ansible_modlib.zip') and
                   not x.endswith('/debug_dir')]
        os.environ['PYTHONPATH'] = ':'.join(pypaths)
        if not os.environ['PYTHONPATH']:
            del os.environ['PYTHONPATH']

    # create a printable version of the command for use
    # in reporting later, which strips out things like
    # passwords from the args list
    to_clean_args = args
    if PY2:
        if isinstance(args, text_type):
            to_clean_args = to_bytes(args)
    else:
        if isinstance(args, binary_type):
            to_clean_args = to_text(args)
    if isinstance(args, (text_type, binary_type)):
        to_clean_args = shlex.split(to_clean_args)

    clean_args = []
    is_passwd = False
    for arg in (to_native(a) for a in to_clean_args):
        if is_passwd:
            is_passwd = False
            clean_args.append('********')
            continue
        if PASSWD_ARG_RE.match(arg):
            sep_idx = arg.find('=')
            if sep_idx > -1:
                clean_args.append('%s=********' % arg[:sep_idx])
                continue
            else:
                is_passwd = True
        arg = heuristic_log_sanitize(arg, self.no_log_values)
        clean_args.append(arg)
    clean_args = ' '.join(shlex_quote(arg) for arg in clean_args)

    if data:
        st_in = subprocess.PIPE

    # ZUUL: changed stderr to follow stdout
    kwargs = dict(
        executable=executable,
        shell=shell,
        close_fds=close_fds,
        stdin=st_in,
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
    )

    # store the pwd
    prev_dir = os.getcwd()

    # make sure we're in the right working directory
    if cwd and os.path.isdir(cwd):
        cwd = os.path.abspath(os.path.expanduser(cwd))
        kwargs['cwd'] = cwd
        try:
            os.chdir(cwd)
        except (OSError, IOError) as e:
            self.fail_json(rc=e.errno, msg="Could not open %s, %s" % (cwd, to_native(e)),
                           exception=traceback.format_exc())

    old_umask = None
    if umask:
        old_umask = os.umask(umask)

    t = None
    fail_json_kwargs = None

    try:
        if self._debug:
            self.log('Executing: ' + clean_args)

        # ZUUL: Replaced the execution loop with the zuul_runner run function

        cmd = subprocess.Popen(args, **kwargs)
        if self.no_log:
            t = None
        else:
            t = threading.Thread(target=follow, args=(cmd.stdout, zuul_log_id))
            t.daemon = True
            t.start()

        # ZUUL: Our log thread will catch the output so don't do that here.

        # # the communication logic here is essentially taken from that
        # # of the _communicate() function in ssh.py
        #
        # stdout = b('')
        # stderr = b('')
        #
        # # ZUUL: stderr follows stdout
        # rpipes = [cmd.stdout]
        #
        # if data:
        #     if not binary_data:
        #         data += '\n'
        #     if isinstance(data, text_type):
        #         data = to_bytes(data)
        #     cmd.stdin.write(data)
        #     cmd.stdin.close()
        #
        # while True:
        #     rfds, wfds, efds = select.select(rpipes, [], rpipes, 1)
        #     stdout += self._read_from_pipes(rpipes, rfds, cmd.stdout)
        #
        #     # ZUUL: stderr follows stdout
        #     # stderr += self._read_from_pipes(rpipes, rfds, cmd.stderr)
        #
        #     # if we're checking for prompts, do it now
        #     if prompt_re:
        #         if prompt_re.search(stdout) and not data:
        #             if encoding:
        #                 stdout = to_native(stdout, encoding=encoding, errors=errors)
        #             else:
        #                 stdout = stdout
        #             return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
        #     # only break out if no pipes are left to read or
        #     # the pipes are completely read and
        #     # the process is terminated
        #     if (not rpipes or not rfds) and cmd.poll() is not None:
        #         break
        #     # No pipes are left to read but process is not yet terminated
        #     # Only then it is safe to wait for the process to be finished
        #     # NOTE: Actually cmd.poll() is always None here if rpipes is empty
        #     elif not rpipes and cmd.poll() is None:
        #         cmd.wait()
        #         # The process is terminated. Since no pipes to read from are
        #         # left, there is no need to call select() again.
        #         break

        # ZUUL: If the console log follow thread *is* stuck in readline,
        # we can't close stdout (attempting to do so raises an
        # exception) , so this is disabled.
        # cmd.stdout.close()
        # cmd.stderr.close()

        rc = cmd.wait()

        # Give the thread that is writing the console log up to 10 seconds
        # to catch up and exit.  If it hasn't done so by then, it is very
        # likely stuck in readline() because it spawed a child that is
        # holding stdout or stderr open.
        if t:
            t.join(10)
            with Console(zuul_log_id) as console:
                if t.isAlive():
                    console.addLine("[Zuul] standard output/error still open "
                                    "after child exited")
            # ZUUL: stdout and stderr are in the console log file
            # ZUUL: return the saved log lines so we can ship them back
            stdout = b('').join(_log_lines)
        else:
            stdout = b('')
        stderr = b('')

    except (OSError, IOError) as e:
        self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(e)))
        # ZUUL: store fail_json_kwargs and fail later in finally
        fail_json_kwargs = dict(rc=e.errno, msg=to_native(e), cmd=clean_args)
    except Exception as e:
        self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(traceback.format_exc())))
        # ZUUL: store fail_json_kwargs and fail later in finally
        fail_json_kwargs = dict(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=clean_args)
    finally:
        if t:
            with Console(zuul_log_id) as console:
                if t.isAlive():
                    console.addLine("[Zuul] standard output/error still open "
                                    "after child exited")
                if fail_json_kwargs:
                    # we hit an exception and need to use the rc from
                    # fail_json_kwargs
                    rc = fail_json_kwargs['rc']

                console.addLine("[Zuul] Task exit code: %s\n" % rc)

        if fail_json_kwargs:
            self.fail_json(**fail_json_kwargs)

    # Restore env settings
    for key, val in old_env_vals.items():
        if val is None:
            del os.environ[key]
        else:
            os.environ[key] = val

    if old_umask:
        os.umask(old_umask)

    if rc != 0 and check_rc:
        msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_values)
        self.fail_json(cmd=clean_args, rc=rc, stdout=stdout, stderr=stderr, msg=msg)

    # reset the pwd
    os.chdir(prev_dir)

    if encoding is not None:
        return (rc, to_native(stdout, encoding=encoding, errors=errors),
                to_native(stderr, encoding=encoding, errors=errors))
    return (rc, stdout, stderr)
Ejemplo n.º 36
0
def quote(a):
    ''' return its argument quoted for shell usage '''
    return shlex_quote(a)
Ejemplo n.º 37
0
    def make_become_cmd(self, cmd, executable=None):
        """ helper function to create privilege escalation commands """

        prompt = None
        success_key = None
        self.prompt = None

        if self.become:

            if not executable:
                executable = self.executable

            becomecmd = None
            randbits = ''.join(random.choice(string.ascii_lowercase) for x in range(32))
            success_key = 'BECOME-SUCCESS-%s' % randbits
            success_cmd = shlex_quote('echo %s; %s' % (success_key, cmd))

            if executable:
                command = '%s -c %s' % (executable, success_cmd)
            else:
                command = success_cmd

            # set executable to use for the privilege escalation method, with various overrides
            exe = self.become_exe or getattr(self, '%s_exe' % self.become_method, self.become_method)

            # set flags to use for the privilege escalation method, with various overrides
            flags = self.become_flags or getattr(self, '%s_flags' % self.become_method, '')

            if self.become_method == 'sudo':
                # If we have a password, we run sudo with a randomly-generated
                # prompt set using -p. Otherwise we run it with default -n, which makes
                # it fail if it would have prompted for a password.
                # Cannot rely on -n as it can be removed from defaults, which should be
                # done for older versions of sudo that do not support the option.
                #
                # Passing a quoted compound command to sudo (or sudo -s)
                # directly doesn't work, so we shellquote it with shlex_quote()
                # and pass the quoted string to the user's shell.

                # force quick error if password is required but not supplied, should prevent sudo hangs.
                if self.become_pass:
                    prompt = '[sudo via ansible, key=%s] password: '******'%s %s -p "%s" -u %s %s' % (exe, flags.replace('-n', ''), prompt, self.become_user, command)
                else:
                    becomecmd = '%s %s -u %s %s' % (exe, flags, self.become_user, command)

            elif self.become_method == 'su':

                # passing code ref to examine prompt as simple string comparisson isn't good enough with su
                def detect_su_prompt(b_data):
                    b_password_string = b"|".join([br'(\w+\'s )?' + x for x in b_SU_PROMPT_LOCALIZATIONS])
                    # Colon or unicode fullwidth colon
                    b_password_string = b_password_string + to_bytes(u' ?(:|:) ?')
                    b_SU_PROMPT_LOCALIZATIONS_RE = re.compile(b_password_string, flags=re.IGNORECASE)
                    return bool(b_SU_PROMPT_LOCALIZATIONS_RE.match(b_data))
                prompt = detect_su_prompt

                becomecmd = '%s %s %s -c %s' % (exe, flags, self.become_user, shlex_quote(command))

            elif self.become_method == 'pbrun':

                prompt = 'Password:'******'%s %s -u %s %s' % (exe, flags, self.become_user, success_cmd)

            elif self.become_method == 'ksu':
                def detect_ksu_prompt(b_data):
                    return re.match(b"Kerberos password for .*@.*:", b_data)

                prompt = detect_ksu_prompt
                becomecmd = '%s %s %s -e %s' % (exe, self.become_user, flags, command)

            elif self.become_method == 'pfexec':

                # No user as it uses it's own exec_attr to figure it out
                becomecmd = '%s %s "%s"' % (exe, flags, success_cmd)

            elif self.become_method == 'runas':
                # become is handled inside the WinRM connection plugin
                if not self.become_user:
                    raise AnsibleError(("The 'runas' become method requires a username "
                                        "(specify with the '--become-user' CLI arg, the 'become_user' keyword, or the 'ansible_become_user' variable)"))
                becomecmd = cmd

            elif self.become_method == 'doas':

                prompt = 'doas (%s@' % self.remote_user
                exe = self.become_exe or 'doas'

                if not self.become_pass:
                    flags += ' -n '

                if self.become_user:
                    flags += ' -u %s ' % self.become_user

                # FIXME: make shell independent
                becomecmd = '%s %s %s -c %s' % (exe, flags, executable, success_cmd)

            elif self.become_method == 'dzdo':

                exe = self.become_exe or 'dzdo'
                if self.become_pass:
                    prompt = '[dzdo via ansible, key=%s] password: '******'%s %s -p %s -u %s %s' % (exe, flags, shlex_quote(prompt), self.become_user, command)
                else:
                    becomecmd = '%s %s -u %s %s' % (exe, flags, self.become_user, command)

            elif self.become_method == 'pmrun':

                exe = self.become_exe or 'pmrun'

                prompt = 'Enter UPM user password:'******'%s %s %s' % (exe, flags, shlex_quote(command))

            else:
                raise AnsibleError("Privilege escalation method not found: %s" % self.become_method)

            if self.become_pass:
                self.prompt = prompt
            self.success_key = success_key
            return becomecmd

        return cmd
Ejemplo n.º 38
0
    def chmod(self, paths, mode):
        cmd = ['chmod', mode]
        cmd.extend(paths)
        cmd = [shlex_quote(c) for c in cmd]

        return ' '.join(cmd)
Ejemplo n.º 39
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            src=dict(type='str', required=True),
            dest=dict(type='str', required=True),
            dest_port=dict(type='int'),
            delete=dict(type='bool', default=False),
            private_key=dict(type='path'),
            rsync_path=dict(type='str'),
            _local_rsync_path=dict(type='path', default='rsync'),
            _substitute_controller=dict(type='bool', default=False),
            archive=dict(type='bool', default=True),
            checksum=dict(type='bool', default=False),
            compress=dict(type='bool', default=True),
            existing_only=dict(type='bool', default=False),
            dirs=dict(type='bool', default=False),
            recursive=dict(type='bool'),
            links=dict(type='bool'),
            copy_links=dict(type='bool', default=False),
            perms=dict(type='bool'),
            times=dict(type='bool'),
            owner=dict(type='bool'),
            group=dict(type='bool'),
            set_remote_user=dict(type='bool', default=True),
            rsync_timeout=dict(type='int', default=0),
            rsync_opts=dict(type='list'),
            ssh_args=dict(type='str'),
            partial=dict(type='bool', default=False),
            verify_host=dict(type='bool', default=False),
            mode=dict(type='str', default='push', choices=['pull', 'push']),
            link_dest=dict(type='list')
        ),
        supports_check_mode=True,
    )

    if module.params['_substitute_controller']:
        try:
            source = substitute_controller(module.params['src'])
            dest = substitute_controller(module.params['dest'])
        except ValueError:
            module.fail_json(msg='Could not determine controller hostname for rsync to send to')
    else:
        source = module.params['src']
        dest = module.params['dest']
    dest_port = module.params['dest_port']
    delete = module.params['delete']
    private_key = module.params['private_key']
    rsync_path = module.params['rsync_path']
    rsync = module.params.get('_local_rsync_path', 'rsync')
    rsync_timeout = module.params.get('rsync_timeout', 'rsync_timeout')
    archive = module.params['archive']
    checksum = module.params['checksum']
    compress = module.params['compress']
    existing_only = module.params['existing_only']
    dirs = module.params['dirs']
    partial = module.params['partial']
    # the default of these params depends on the value of archive
    recursive = module.params['recursive']
    links = module.params['links']
    copy_links = module.params['copy_links']
    perms = module.params['perms']
    times = module.params['times']
    owner = module.params['owner']
    group = module.params['group']
    rsync_opts = module.params['rsync_opts']
    ssh_args = module.params['ssh_args']
    verify_host = module.params['verify_host']
    link_dest = module.params['link_dest']

    if '/' not in rsync:
        rsync = module.get_bin_path(rsync, required=True)

    cmd = [rsync, '--delay-updates', '-F']
    if compress:
        cmd.append('--compress')
    if rsync_timeout:
        cmd.append('--timeout=%s' % rsync_timeout)
    if module.check_mode:
        cmd.append('--dry-run')
    if delete:
        cmd.append('--delete-after')
    if existing_only:
        cmd.append('--existing')
    if checksum:
        cmd.append('--checksum')
    if copy_links:
        cmd.append('--copy-links')
    if archive:
        cmd.append('--archive')
        if recursive is False:
            cmd.append('--no-recursive')
        if links is False:
            cmd.append('--no-links')
        if perms is False:
            cmd.append('--no-perms')
        if times is False:
            cmd.append('--no-times')
        if owner is False:
            cmd.append('--no-owner')
        if group is False:
            cmd.append('--no-group')
    else:
        if recursive is True:
            cmd.append('--recursive')
        if links is True:
            cmd.append('--links')
        if perms is True:
            cmd.append('--perms')
        if times is True:
            cmd.append('--times')
        if owner is True:
            cmd.append('--owner')
        if group is True:
            cmd.append('--group')
    if dirs:
        cmd.append('--dirs')

    if source.startswith('rsync://') and dest.startswith('rsync://'):
        module.fail_json(msg='either src or dest must be a localhost', rc=1)

    if is_rsh_needed(source, dest):

        # https://github.com/ansible/ansible/issues/15907
        has_rsh = False
        for rsync_opt in rsync_opts:
            if '--rsh' in rsync_opt:
                has_rsh = True
                break

        # if the user has not supplied an --rsh option go ahead and add ours
        if not has_rsh:
            ssh_cmd = [module.get_bin_path('ssh', required=True), '-S', 'none']
            if private_key is not None:
                ssh_cmd.extend(['-i', private_key])
            # If the user specified a port value
            # Note:  The action plugin takes care of setting this to a port from
            # inventory if the user didn't specify an explicit dest_port
            if dest_port is not None:
                ssh_cmd.extend(['-o', 'Port=%s' % dest_port])
            if not verify_host:
                ssh_cmd.extend(['-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null'])
            ssh_cmd_str = ' '.join(shlex_quote(arg) for arg in ssh_cmd)
            if ssh_args:
                ssh_cmd_str += ' %s' % ssh_args
            cmd.append('--rsh=%s' % ssh_cmd_str)

    if rsync_path:
        cmd.append('--rsync-path=%s' % rsync_path)

    if rsync_opts:
        cmd.extend(rsync_opts)

    if partial:
        cmd.append('--partial')

    if link_dest:
        cmd.append('-H')
        # verbose required because rsync does not believe that adding a
        # hardlink is actually a change
        cmd.append('-vv')
        for x in link_dest:
            link_path = os.path.abspath(os.path.expanduser(x))
            destination_path = os.path.abspath(os.path.dirname(dest))
            if destination_path.find(link_path) == 0:
                module.fail_json(msg='Hardlinking into a subdirectory of the source would cause recursion. %s and %s' % (destination_path, dest))
            cmd.append('--link-dest=%s' % link_path)

    changed_marker = '<<CHANGED>>'
    cmd.append('--out-format=' + changed_marker + '%i %n%L')

    # expand the paths
    if '@' not in source:
        source = os.path.expanduser(source)
    if '@' not in dest:
        dest = os.path.expanduser(dest)

    cmd.append(source)
    cmd.append(dest)
    cmdstr = ' '.join(cmd)
    (rc, out, err) = module.run_command(cmd)
    if rc:
        return module.fail_json(msg=err, rc=rc, cmd=cmdstr)

    if link_dest:
        # a leading period indicates no change
        changed = (changed_marker + '.') not in out
    else:
        changed = changed_marker in out

    out_clean = out.replace(changed_marker, '')
    out_lines = out_clean.split('\n')
    while '' in out_lines:
        out_lines.remove('')
    if module._diff:
        diff = {'prepared': out_clean}
        return module.exit_json(changed=changed, msg=out_clean,
                                rc=rc, cmd=cmdstr, stdout_lines=out_lines,
                                diff=diff)

    return module.exit_json(changed=changed, msg=out_clean,
                            rc=rc, cmd=cmdstr, stdout_lines=out_lines)
Ejemplo n.º 40
0
    def chown(self, paths, user):
        cmd = ['chown', user]
        cmd.extend(paths)
        cmd = [shlex_quote(c) for c in cmd]

        return ' '.join(cmd)
Ejemplo n.º 41
0
def test_play_context_make_become_cmd(parser):
    (options, args) = parser.parse_args([])
    play_context = PlayContext(options=options)

    default_cmd = "/bin/foo"
    default_exe = "/bin/bash"
    sudo_exe = C.DEFAULT_SUDO_EXE or 'sudo'
    sudo_flags = C.DEFAULT_SUDO_FLAGS
    su_exe = C.DEFAULT_SU_EXE or 'su'
    su_flags = C.DEFAULT_SU_FLAGS or ''
    pbrun_exe = 'pbrun'
    pbrun_flags = ''
    pfexec_exe = 'pfexec'
    pfexec_flags = ''
    doas_exe = 'doas'
    doas_flags = ' -n  -u foo '
    ksu_exe = 'ksu'
    ksu_flags = ''
    dzdo_exe = 'dzdo'

    cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
    assert cmd == default_cmd

    play_context.become = True
    play_context.become_user = '******'

    play_context.become_method = 'sudo'
    cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
    assert (cmd == """%s %s -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags, play_context.become_user,
                                                            default_exe, play_context.success_key, default_cmd))

    play_context.become_pass = '******'
    cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
    assert (cmd == """%s %s -p "%s" -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags.replace('-n', ''),
                                                                    play_context.prompt, play_context.become_user, default_exe,
                                                                    play_context.success_key, default_cmd))

    play_context.become_pass = None
    play_context.become_method = 'su'
    cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
    assert (cmd == """%s  %s -c '%s -c '"'"'echo %s; %s'"'"''""" % (su_exe, play_context.become_user, default_exe,
                                                                    play_context.success_key, default_cmd))

    play_context.become_method = 'pbrun'
    cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
    assert cmd == """%s %s -u %s 'echo %s; %s'""" % (pbrun_exe, pbrun_flags, play_context.become_user, play_context.success_key, default_cmd)

    play_context.become_method = 'pfexec'
    cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
    assert cmd == '''%s %s "'echo %s; %s'"''' % (pfexec_exe, pfexec_flags, play_context.success_key, default_cmd)

    play_context.become_method = 'doas'
    cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
    assert (cmd == """%s %s echo %s && %s %s env ANSIBLE=true %s""" % (doas_exe, doas_flags, play_context.
                                                                       success_key, doas_exe, doas_flags, default_cmd))

    play_context.become_method = 'ksu'
    cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
    assert (cmd == """%s %s %s -e %s -c 'echo %s; %s'""" % (ksu_exe, play_context.become_user, ksu_flags,
                                                            default_exe, play_context.success_key, default_cmd))

    play_context.become_method = 'bad'
    with pytest.raises(AnsibleError):
        play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")

    play_context.become_method = 'dzdo'
    cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
    assert cmd == """%s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, play_context.become_user, default_exe, play_context.success_key, default_cmd)

    play_context.become_pass = '******'
    play_context.become_method = 'dzdo'
    cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
    assert (cmd == """%s -p %s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, shlex_quote(play_context.prompt),
                                                               play_context.become_user, default_exe,
                                                               play_context.success_key, default_cmd))
Ejemplo n.º 42
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            list_all=dict(required=False, type='bool', default=False),
            name=dict(type='str'),
            repo=dict(type='path'),
            scope=dict(required=False, type='str', choices=['local', 'global', 'system']),
            value=dict(required=False)
        ),
        mutually_exclusive=[['list_all', 'name'], ['list_all', 'value']],
        required_if=[('scope', 'local', ['repo'])],
        required_one_of=[['list_all', 'name']],
        supports_check_mode=True,
    )
    git_path = module.get_bin_path('git', True)

    params = module.params
    # We check error message for a pattern, so we need to make sure the messages appear in the form we're expecting.
    # Set the locale to C to ensure consistent messages.
    module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')

    if params['name']:
        name = params['name']
    else:
        name = None

    if params['scope']:
        scope = params['scope']
    elif params['list_all']:
        scope = None
    else:
        scope = 'system'

    if params['value']:
        new_value = params['value']
    else:
        new_value = None

    args = [git_path, "config", "--includes"]
    if params['list_all']:
        args.append('-l')
    if scope:
        args.append("--" + scope)
    if name:
        args.append(name)

    if scope == 'local':
        dir = params['repo']
    elif params['list_all'] and params['repo']:
        # Include local settings from a specific repo when listing all available settings
        dir = params['repo']
    else:
        # Run from root directory to avoid accidentally picking up any local config settings
        dir = "/"

    (rc, out, err) = module.run_command(' '.join(args), cwd=dir)
    if params['list_all'] and scope and rc == 128 and 'unable to read config file' in err:
        # This just means nothing has been set at the given scope
        module.exit_json(changed=False, msg='', config_values={})
    elif rc >= 2:
        # If the return code is 1, it just means the option hasn't been set yet, which is fine.
        module.fail_json(rc=rc, msg=err, cmd=' '.join(args))

    if params['list_all']:
        values = out.rstrip().splitlines()
        config_values = {}
        for value in values:
            k, v = value.split('=', 1)
            config_values[k] = v
        module.exit_json(changed=False, msg='', config_values=config_values)
    elif not new_value:
        module.exit_json(changed=False, msg='', config_value=out.rstrip())
    else:
        old_value = out.rstrip()
        if old_value == new_value:
            module.exit_json(changed=False, msg="")

    if not module.check_mode:
        new_value_quoted = shlex_quote(new_value)
        cmd = ' '.join(args + [new_value_quoted])
        (rc, out, err) = module.run_command(cmd, cwd=dir)
        if err:
            module.fail_json(rc=rc, msg=err, cmd=cmd)

    module.exit_json(
        msg='setting changed',
        diff=dict(
            before_header=' '.join(args),
            before=old_value + "\n",
            after_header=' '.join(args),
            after=new_value + "\n"
        ),
        changed=True
    )
Ejemplo n.º 43
0
    def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=None, wrap_async=False):
        '''
        Transfer and run a module along with its arguments.
        '''
        if tmp is not None:
            display.warning('_execute_module no longer honors the tmp parameter. Action plugins'
                            ' should set self._connection._shell.tmpdir to share the tmpdir')
        del tmp  # No longer used
        if delete_remote_tmp is not None:
            display.warning('_execute_module no longer honors the delete_remote_tmp parameter.'
                            ' Action plugins should check self._connection._shell.tmpdir to'
                            ' see if a tmpdir existed before they were called to determine'
                            ' if they are responsible for removing it.')
        del delete_remote_tmp  # No longer used

        if task_vars is None:
            task_vars = dict()

        # if a module name was not specified for this execution, use the action from the task
        if module_name is None:
            module_name = self._task.action
        if module_args is None:
            module_args = self._task.args

        self._update_module_args(module_name, module_args, task_vars)

        # FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
        (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
        display.vvv("Using module file %s" % module_path)
        if not shebang and module_style != 'binary':
            raise AnsibleError("module (%s) is missing interpreter line" % module_name)

        tmpdir = self._connection._shell.tmpdir
        remote_module_path = None

        if not self._is_pipelining_enabled(module_style, wrap_async):
            # we might need remote tmp dir
            if tmpdir is None:
                self._make_tmp_path()
                tmpdir = self._connection._shell.tmpdir

            remote_module_filename = self._connection._shell.get_remote_filename(module_path)
            remote_module_path = self._connection._shell.join_path(tmpdir, remote_module_filename)

        args_file_path = None
        if module_style in ('old', 'non_native_want_json', 'binary'):
            # we'll also need a tmp file to hold our module arguments
            args_file_path = self._connection._shell.join_path(tmpdir, 'args')

        if remote_module_path or module_style != 'new':
            display.debug("transferring module to remote %s" % remote_module_path)
            if module_style == 'binary':
                self._transfer_file(module_path, remote_module_path)
            else:
                self._transfer_data(remote_module_path, module_data)
            if module_style == 'old':
                # we need to dump the module args to a k=v string in a file on
                # the remote system, which can be read and parsed by the module
                args_data = ""
                for k, v in iteritems(module_args):
                    args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
                self._transfer_data(args_file_path, args_data)
            elif module_style in ('non_native_want_json', 'binary'):
                self._transfer_data(args_file_path, json.dumps(module_args))
            display.debug("done transferring module to remote")

        environment_string = self._compute_environment_string()

        remote_files = []
        if tmpdir and remote_module_path:
            remote_files = [tmpdir, remote_module_path]

        if args_file_path:
            remote_files.append(args_file_path)

        sudoable = True
        in_data = None
        cmd = ""

        if wrap_async and not self._connection.always_pipeline_modules:
            # configure, upload, and chmod the async_wrapper module
            (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(),
                                                                                                         task_vars=task_vars)
            async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
            remote_async_module_path = self._connection._shell.join_path(tmpdir, async_module_remote_filename)
            self._transfer_data(remote_async_module_path, async_module_data)
            remote_files.append(remote_async_module_path)

            async_limit = self._task.async_val
            async_jid = str(random.randint(0, 999999999999))

            # call the interpreter for async_wrapper directly
            # this permits use of a script for an interpreter on non-Linux platforms
            # TODO: re-implement async_wrapper as a regular module to avoid this special case
            interpreter = shebang.replace('#!', '').strip()
            async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]

            if environment_string:
                async_cmd.insert(0, environment_string)

            if args_file_path:
                async_cmd.append(args_file_path)
            else:
                # maintain a fixed number of positional parameters for async_wrapper
                async_cmd.append('_')

            if not self._should_remove_tmp_path(tmpdir):
                async_cmd.append("-preserve_tmp")

            cmd = " ".join(to_text(x) for x in async_cmd)

        else:

            if self._is_pipelining_enabled(module_style):
                in_data = module_data
            else:
                cmd = remote_module_path

            cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path).strip()

        # Fix permissions of the tmpdir path and tmpdir files. This should be called after all
        # files have been transferred.
        if remote_files:
            # remove none/empty
            remote_files = [x for x in remote_files if x]
            self._fixup_perms2(remote_files, self._play_context.remote_user)

        # actually execute
        res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)

        # parse the main result
        data = self._parse_returned_data(res)

        # NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
        # get internal info before cleaning
        if data.pop("_ansible_suppress_tmpdir_delete", False):
            self._cleanup_remote_tmp = False

        # remove internal keys
        remove_internal_keys(data)

        if wrap_async:
            # async_wrapper will clean up its tmpdir on its own so we want the controller side to
            # forget about it now
            self._connection._shell.tmpdir = None

            # FIXME: for backwards compat, figure out if still makes sense
            data['changed'] = True

        # pre-split stdout/stderr into lines if needed
        if 'stdout' in data and 'stdout_lines' not in data:
            # if the value is 'False', a default won't catch it.
            txt = data.get('stdout', None) or u''
            data['stdout_lines'] = txt.splitlines()
        if 'stderr' in data and 'stderr_lines' not in data:
            # if the value is 'False', a default won't catch it.
            txt = data.get('stderr', None) or u''
            data['stderr_lines'] = txt.splitlines()

        display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
        return data
Ejemplo n.º 44
0
    def chmod(self, paths, mode):
        cmd = ['chmod', mode]
        cmd.extend(paths)
        cmd = [shlex_quote(c) for c in cmd]

        return ' '.join(cmd)