def test_plugins_connection_ssh_put_file(self, mock_ospe, mock_sleep): pc = PlayContext() new_stdin = StringIO() conn = ssh.Connection(pc, new_stdin) conn._build_command = MagicMock() conn._run = MagicMock() mock_ospe.return_value = True conn._build_command.return_value = 'some command to run' conn._run.return_value = (0, '', '') conn.host = "some_host" C.ANSIBLE_SSH_RETRIES = 9 # Test with C.DEFAULT_SCP_IF_SSH set to smart # Test when SFTP works C.DEFAULT_SCP_IF_SSH = 'smart' expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n' conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False) # Test when SFTP doesn't work but SCP does conn._run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')] conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._run.assert_called_with('some command to run', None, checkrc=False) conn._run.side_effect = None # test with C.DEFAULT_SCP_IF_SSH enabled C.DEFAULT_SCP_IF_SSH = True conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._run.assert_called_with('some command to run', None, checkrc=False) conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩') conn._run.assert_called_with('some command to run', None, checkrc=False) # test with C.DEFAULT_SCP_IF_SSH disabled C.DEFAULT_SCP_IF_SSH = False expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n' conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False) expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')), to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n' conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩') conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False) # test that a non-zero rc raises an error conn._run.return_value = (1, 'stdout', 'some errors') self.assertRaises(AnsibleError, conn.put_file, '/path/to/bad/file', '/remote/path/to/file') # test that a not-found path raises an error mock_ospe.return_value = False conn._run.return_value = (0, 'stdout', '') self.assertRaises(AnsibleFileNotFound, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
def test_plugins_connection_ssh_put_file(self, mock_ospe): pc = PlayContext() new_stdin = StringIO() conn = ssh.Connection(pc, new_stdin) conn._build_command = MagicMock() conn._run = MagicMock() mock_ospe.return_value = True conn._build_command.return_value = 'some command to run' conn._run.return_value = (0, '', '') conn.host = "some_host" # Test with C.DEFAULT_SCP_IF_SSH set to smart # Test when SFTP works C.DEFAULT_SCP_IF_SSH = 'smart' expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n' conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False) # Test when SFTP doesn't work but SCP does conn._run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')] conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._run.assert_called_with('some command to run', None, checkrc=False) conn._run.side_effect = None # test with C.DEFAULT_SCP_IF_SSH enabled C.DEFAULT_SCP_IF_SSH = True conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._run.assert_called_with('some command to run', None, checkrc=False) conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩') conn._run.assert_called_with('some command to run', None, checkrc=False) # test with C.DEFAULT_SCP_IF_SSH disabled C.DEFAULT_SCP_IF_SSH = False expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n' conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False) expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')), to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n' conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩') conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False) # test that a non-zero rc raises an error conn._run.return_value = (1, 'stdout', 'some errors') self.assertRaises(AnsibleError, conn.put_file, '/path/to/bad/file', '/remote/path/to/file') # test that a not-found path raises an error mock_ospe.return_value = False conn._run.return_value = (0, 'stdout', '') self.assertRaises(AnsibleFileNotFound, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
def _file_transport_command(self, in_path, out_path, sftp_action): # scp and sftp require square brackets for IPv6 addresses, but # accept them for hostnames and IPv4 addresses too. host = '[%s]' % self.host # since this can be a non-bool now, we need to handle it correctly scp_if_ssh = C.DEFAULT_SCP_IF_SSH if not isinstance(scp_if_ssh, bool): scp_if_ssh = scp_if_ssh.lower() if scp_if_ssh in BOOLEANS: scp_if_ssh = boolean(scp_if_ssh) elif scp_if_ssh != 'smart': raise AnsibleOptionsError('scp_if_ssh needs to be one of [smart|True|False]') # create a list of commands to use based on config options methods = ['sftp'] if scp_if_ssh == 'smart': methods.append('scp') elif scp_if_ssh: methods = ['scp'] success = False res = None for method in methods: if method == 'sftp': cmd = self._build_command('sftp', to_bytes(host)) in_data = u"{0} {1} {2}\n".format(sftp_action, shlex_quote(in_path), shlex_quote(out_path)) elif method == 'scp': if sftp_action == 'get': cmd = self._build_command('scp', u'{0}:{1}'.format(host, shlex_quote(out_path)), in_path) else: cmd = self._build_command('scp', in_path, u'{0}:{1}'.format(host, shlex_quote(out_path))) in_data = None in_data = to_bytes(in_data, nonstring='passthru') (returncode, stdout, stderr) = self._run(cmd, in_data, checkrc=False) # Check the return code and rollover to next method if failed if returncode == 0: success = True break else: # If not in smart mode, the data will be printed by the raise below if scp_if_ssh == 'smart': display.warning(msg='%s transfer mechanism failed on %s. Use ANSIBLE_DEBUG=1 to see detailed information' % (method, host)) display.debug(msg='%s' % to_native(stdout)) display.debug(msg='%s' % to_native(stderr)) res = (returncode, stdout, stderr) if not success: raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}"\ .format(to_native(out_path), to_native(res[1]), to_native(res[2])))
def _file_transport_command(self, in_path, out_path, sftp_action): # scp and sftp require square brackets for IPv6 addresses, but # accept them for hostnames and IPv4 addresses too. host = '[%s]' % self.host # since this can be a non-bool now, we need to handle it correctly scp_if_ssh = C.DEFAULT_SCP_IF_SSH if not isinstance(scp_if_ssh, bool): scp_if_ssh = scp_if_ssh.lower() if scp_if_ssh in BOOLEANS: scp_if_ssh = boolean(scp_if_ssh) elif scp_if_ssh != 'smart': raise AnsibleOptionsError('scp_if_ssh needs to be one of [smart|True|False]') # create a list of commands to use based on config options methods = ['sftp'] if scp_if_ssh == 'smart': methods.append('scp') elif scp_if_ssh: methods = ['scp'] success = False res = None for method in methods: if method == 'sftp': cmd = self._build_command('sftp', to_bytes(host)) in_data = u"{0} {1} {2}\n".format(sftp_action, shlex_quote(in_path), shlex_quote(out_path)) elif method == 'scp': if sftp_action == 'get': cmd = self._build_command('scp', u'{0}:{1}'.format(host, shlex_quote(in_path)), out_path) else: cmd = self._build_command('scp', in_path, u'{0}:{1}'.format(host, shlex_quote(out_path))) in_data = None in_data = to_bytes(in_data, nonstring='passthru') (returncode, stdout, stderr) = self._run(cmd, in_data, checkrc=False) # Check the return code and rollover to next method if failed if returncode == 0: success = True break else: # If not in smart mode, the data will be printed by the raise below if scp_if_ssh == 'smart': display.warning(msg='%s transfer mechanism failed on %s. Use ANSIBLE_DEBUG=1 to see detailed information' % (method, host)) display.debug(msg='%s' % to_native(stdout)) display.debug(msg='%s' % to_native(stderr)) res = (returncode, stdout, stderr) if not success: raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}"\ .format(to_native(out_path), to_native(res[1]), to_native(res[2])))
def put_file(self, in_path, out_path): """ Transfer a file from local to docker container """ super(Connection, self).put_file(in_path, out_path) display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._play_context.remote_addr) out_path = self._prefix_login_path(out_path) if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')): raise AnsibleFileNotFound( "file or module does not exist: %s" % in_path) out_path = shlex_quote(out_path) # Older docker doesn't have native support for copying files into # running containers, so we use docker exec to implement this # Although docker version 1.8 and later provide support, the # owner and group of the files are always set to root args = self._build_exec_cmd([self._play_context.executable, "-c", "dd of=%s bs=%s" % (out_path, BUFSIZE)]) args = [to_bytes(i, errors='surrogate_or_strict') for i in args] with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file: try: p = subprocess.Popen(args, stdin=in_file, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError: raise AnsibleError("docker connection requires dd command in the container to put files") stdout, stderr = p.communicate() if p.returncode != 0: raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def fetch_file(self, in_path, out_path): ''' fetch a file from jail to local ''' super(Connection, self).fetch_file(in_path, out_path) display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.jail) in_path = shlex_quote(self._prefix_login_path(in_path)) try: p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE)) except OSError: raise AnsibleError( "jail connection requires dd command in the jail") with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file: try: chunk = p.stdout.read(BUFSIZE) while chunk: out_file.write(chunk) chunk = p.stdout.read(BUFSIZE) except: traceback.print_exc() raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path)) stdout, stderr = p.communicate() if p.returncode != 0: raise AnsibleError( "failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def set_user_facl(self, paths, user, mode): """Only sets acls for users as that's really all we need""" cmd = ['setfacl', '-m', 'u:%s:%s' % (user, mode)] cmd.extend(paths) cmd = [shlex_quote(c) for c in cmd] return ' '.join(cmd)
def env_prefix(self, **kwargs): env = self.env.copy() env.update(kwargs) return ' '.join([ 'set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k, v in env.items() ])
def put_file(self, in_path, out_path): ''' transfer a file from local to jail ''' super(Connection, self).put_file(in_path, out_path) display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail) out_path = shlex_quote(self._prefix_login_path(out_path)) try: with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file: try: p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), stdin=in_file) except OSError: raise AnsibleError( "jail connection requires dd command in the jail") try: stdout, stderr = p.communicate() except: traceback.print_exc() raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path)) if p.returncode != 0: raise AnsibleError( "failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr)) except IOError: raise AnsibleError("file or module does not exist at: %s" % in_path)
def exec_command(host, cmd): play_context = host.get_play_context() connection = host.get_connection(play_context) in_data = None sudoable = True executable = None allow_same_user = C.BECOME_ALLOW_SAME_USER same_user = play_context.become_user == play_context.remote_user if sudoable and play_context.become and (allow_same_user or not same_user): cmd = play_context.make_become_cmd(cmd, executable=executable) if connection.allow_executable: if executable is None: executable = play_context.executable # mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876) # only applied for the default executable to avoid interfering with the raw action cmd = connection._shell.append_command(cmd, 'sleep 0') if executable: cmd = executable + ' -c ' + shlex_quote(cmd) returncode, stdout, stderr = connection.exec_command(cmd, in_data=in_data, sudoable=sudoable) print() # Required for spacing return returncode, stdout, stderr
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None): # don't quote the cmd if it's an empty string, because this will break pipelining mode if cmd.strip() != '': cmd = shlex_quote(cmd) cmd_parts = [env_string.strip(), shebang.replace("#!", "").strip(), cmd] if arg_path is not None: cmd_parts.append(arg_path) new_cmd = " ".join(cmd_parts) if rm_tmp: new_cmd = 'begin ; %s; rm -rf "%s" %s ; end' % (new_cmd, rm_tmp, self._SHELL_REDIRECT_ALLNULL) return new_cmd
def expand_user(self, user_home_path): ''' Return a command to expand tildes in a path It can be either "~" or "~username". We use the POSIX definition of a username: http://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap03.html#tag_03_426 http://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap03.html#tag_03_276 ''' # Check that the user_path to expand is safe if user_home_path != '~': if not _USER_HOME_PATH_RE.match(user_home_path): # shlex_quote will make the shell return the string verbatim user_home_path = shlex_quote(user_home_path) return 'echo %s' % user_home_path
def checksum(self, path, python_interp): # The following test is fish-compliant. # # In the following test, each condition is a check and logical # comparison (or or and) that sets the rc value. Every check is run so # the last check in the series to fail will be the rc that is # returned. # # If a check fails we error before invoking the hash functions because # hash functions may successfully take the hash of a directory on BSDs # (UFS filesystem?) which is not what the rest of the ansible code # expects # # If all of the available hashing methods fail we fail with an rc of # 0. This logic is added to the end of the cmd at the bottom of this # function. # Return codes: # checksum: success! # 0: Unknown error # 1: Remote file does not exist # 2: No read permissions on the file # 3: File is a directory # 4: No python interpreter # Quoting gets complex here. We're writing a python string that's # used by a variety of shells on the remote host to invoke a python # "one-liner". shell_escaped_path = shlex_quote(path) test = "set rc flag; [ -r %(p)s ] %(shell_or)s set rc 2; [ -f %(p)s ] %(shell_or)s set rc 1; [ -d %(p)s ] %(shell_and)s set rc 3; %(i)s -V 2>/dev/null %(shell_or)s set rc 4; [ x\"$rc\" != \"xflag\" ] %(shell_and)s echo \"$rc \"%(p)s %(shell_and)s exit 0" % dict( p=shell_escaped_path, i=python_interp, shell_and=self._SHELL_AND, shell_or=self._SHELL_OR) # NOQA csums = [ u"({0} -c 'import hashlib; BLOCKSIZE = 65536; hasher = hashlib.sha1();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)" .format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL ), # NOQA Python > 2.4 (including python3) u"({0} -c 'import sha; BLOCKSIZE = 65536; hasher = sha.sha();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)" .format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL), # NOQA Python == 2.4 ] cmd = (" %s " % self._SHELL_OR).join(csums) cmd = "%s; %s %s (echo \'0 \'%s)" % (test, cmd, self._SHELL_OR, shell_escaped_path) return cmd
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None): # don't quote the cmd if it's an empty string, because this will break pipelining mode if cmd.strip() != '': cmd = shlex_quote(cmd) cmd_parts = [ env_string.strip(), shebang.replace("#!", "").strip(), cmd ] if arg_path is not None: cmd_parts.append(arg_path) new_cmd = " ".join(cmd_parts) if rm_tmp: new_cmd = 'begin ; %s; rm -rf "%s" %s ; end' % ( new_cmd, rm_tmp, self._SHELL_REDIRECT_ALLNULL) return new_cmd
def put_file(self, in_path, out_path): ''' transfer a file from local to chroot ''' super(Connection, self).put_file(in_path, out_path) display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.chroot) out_path = shlex_quote(self._prefix_login_path(out_path)) try: with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file: try: p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), stdin=in_file) except OSError: raise AnsibleError("chroot connection requires dd command in the chroot") try: stdout, stderr = p.communicate() except: traceback.print_exc() raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path)) if p.returncode != 0: raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr)) except IOError: raise AnsibleError("file or module does not exist at: %s" % in_path)
def checksum(self, path, python_interp): # The following test needs to be SH-compliant. BASH-isms will # not work if /bin/sh points to a non-BASH shell. # # In the following test, each condition is a check and logical # comparison (|| or &&) that sets the rc value. Every check is run so # the last check in the series to fail will be the rc that is # returned. # # If a check fails we error before invoking the hash functions because # hash functions may successfully take the hash of a directory on BSDs # (UFS filesystem?) which is not what the rest of the ansible code # expects # # If all of the available hashing methods fail we fail with an rc of # 0. This logic is added to the end of the cmd at the bottom of this # function. # Return codes: # checksum: success! # 0: Unknown error # 1: Remote file does not exist # 2: No read permissions on the file # 3: File is a directory # 4: No python interpreter # Quoting gets complex here. We're writing a python string that's # used by a variety of shells on the remote host to invoke a python # "one-liner". shell_escaped_path = shlex_quote(path) test = "rc=flag; [ -r %(p)s ] %(shell_or)s rc=2; [ -f %(p)s ] %(shell_or)s rc=1; [ -d %(p)s ] %(shell_and)s rc=3; %(i)s -V 2>/dev/null %(shell_or)s rc=4; [ x\"$rc\" != \"xflag\" ] %(shell_and)s echo \"${rc} \"%(p)s %(shell_and)s exit 0" % dict(p=shell_escaped_path, i=python_interp, shell_and=self._SHELL_AND, shell_or=self._SHELL_OR) csums = [ u"({0} -c 'import hashlib; BLOCKSIZE = 65536; hasher = hashlib.sha1();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)".format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL), # Python > 2.4 (including python3) u"({0} -c 'import sha; BLOCKSIZE = 65536; hasher = sha.sha();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)".format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL), # Python == 2.4 ] cmd = (" %s " % self._SHELL_OR).join(csums) cmd = "%s; %s %s (echo \'0 \'%s)" % (test, cmd, self._SHELL_OR, shell_escaped_path) return cmd
def fetch_file(self, in_path, out_path): ''' fetch a file from chroot to local ''' super(Connection, self).fetch_file(in_path, out_path) display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.chroot) in_path = shlex_quote(self._prefix_login_path(in_path)) try: p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE)) except OSError: raise AnsibleError("chroot connection requires dd command in the chroot") with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file: try: chunk = p.stdout.read(BUFSIZE) while chunk: out_file.write(chunk) chunk = p.stdout.read(BUFSIZE) except: traceback.print_exc() raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path)) stdout, stderr = p.communicate() if p.returncode != 0: raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def _file_transport_command(self, in_path, out_path, sftp_action): # scp and sftp require square brackets for IPv6 addresses, but # accept them for hostnames and IPv4 addresses too. host = '[%s]' % self.host # Transfer methods to try methods = [] # Use the transfer_method option if set, otherwise use scp_if_ssh ssh_transfer_method = self._play_context.ssh_transfer_method if ssh_transfer_method is not None: if not (ssh_transfer_method in ('smart', 'sftp', 'scp', 'piped')): raise AnsibleOptionsError( 'transfer_method needs to be one of [smart|sftp|scp|piped]' ) if ssh_transfer_method == 'smart': methods = ['sftp', 'scp', 'piped'] else: methods = [ssh_transfer_method] else: # since this can be a non-bool now, we need to handle it correctly scp_if_ssh = C.DEFAULT_SCP_IF_SSH if not isinstance(scp_if_ssh, bool): scp_if_ssh = scp_if_ssh.lower() if scp_if_ssh in BOOLEANS: scp_if_ssh = boolean(scp_if_ssh) elif scp_if_ssh != 'smart': raise AnsibleOptionsError( 'scp_if_ssh needs to be one of [smart|True|False]') if scp_if_ssh == 'smart': methods = ['sftp', 'scp', 'piped'] elif scp_if_ssh is True: methods = ['scp'] else: methods = ['sftp'] success = False for method in methods: returncode = stdout = stderr = None if method == 'sftp': cmd = self._build_command('sftp', to_bytes(host)) in_data = u"{0} {1} {2}\n".format(sftp_action, shlex_quote(in_path), shlex_quote(out_path)) in_data = to_bytes(in_data, nonstring='passthru') (returncode, stdout, stderr) = self._bare_run(cmd, in_data, checkrc=False) elif method == 'scp': if sftp_action == 'get': cmd = self._build_command( 'scp', u'{0}:{1}'.format(host, shlex_quote(in_path)), out_path) else: cmd = self._build_command( 'scp', in_path, u'{0}:{1}'.format(host, shlex_quote(out_path))) in_data = None (returncode, stdout, stderr) = self._bare_run(cmd, in_data, checkrc=False) elif method == 'piped': if sftp_action == 'get': # we pass sudoable=False to disable pty allocation, which # would end up mixing stdout/stderr and screwing with newlines (returncode, stdout, stderr) = self.exec_command( 'dd if=%s bs=%s' % (in_path, BUFSIZE), sudoable=False) out_file = open( to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') out_file.write(stdout) out_file.close() else: in_data = open( to_bytes(in_path, errors='surrogate_or_strict'), 'rb').read() in_data = to_bytes(in_data, nonstring='passthru') (returncode, stdout, stderr) = self.exec_command( 'dd of=%s bs=%s' % (out_path, BUFSIZE), in_data=in_data) # Check the return code and rollover to next method if failed if returncode == 0: return (returncode, stdout, stderr) else: # If not in smart mode, the data will be printed by the raise below if len(methods) > 1: display.warning( msg= '%s transfer mechanism failed on %s. Use ANSIBLE_DEBUG=1 to see detailed information' % (method, host)) display.debug(msg='%s' % to_native(stdout)) display.debug(msg='%s' % to_native(stderr)) if returncode == 255: raise AnsibleConnectionFailure( "Failed to connect to the host via %s: %s" % (method, to_native(stderr))) else: raise AnsibleError("failed to transfer file to {0} {1}:\n{2}\n{3}"\ .format(to_native(in_path), to_native(out_path), to_native(stdout), to_native(stderr)))
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True): ''' Transfer and run a module along with its arguments. ''' if task_vars is None: task_vars = dict() # if a module name was not specified for this execution, use # the action from the task if module_name is None: module_name = self._task.action if module_args is None: module_args = self._task.args # set check mode in the module arguments, if required if self._play_context.check_mode: if not self._supports_check_mode: raise AnsibleError("check mode is not supported for this operation") module_args['_ansible_check_mode'] = True else: module_args['_ansible_check_mode'] = False # Get the connection user for permission checks remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user # set no log in the module arguments, if required module_args['_ansible_no_log'] = self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG # set debug in the module arguments, if required module_args['_ansible_debug'] = C.DEFAULT_DEBUG # let module know we are in diff mode module_args['_ansible_diff'] = self._play_context.diff # let module know our verbosity module_args['_ansible_verbosity'] = display.verbosity # give the module information about the ansible version module_args['_ansible_version'] = __version__ # give the module information about its name module_args['_ansible_module_name'] = module_name # set the syslog facility to be used in the module module_args['_ansible_syslog_facility'] = task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY) # let module know about filesystems that selinux treats specially module_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars) display.vvv("Using module file %s" % module_path) if not shebang and module_style != 'binary': raise AnsibleError("module (%s) is missing interpreter line" % module_name) # a remote tmp path may be necessary and not already created remote_module_path = None args_file_path = None if not tmp and self._late_needs_tmp_path(tmp, module_style): tmp = self._make_tmp_path(remote_user) if tmp and \ (module_style != 'new' or \ not self._connection.has_pipelining or \ not self._play_context.pipelining or \ C.DEFAULT_KEEP_REMOTE_FILES or \ self._play_context.become_method == 'su'): remote_module_filename = self._connection._shell.get_remote_filename(module_path) remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename) if module_style in ('old', 'non_native_want_json', 'binary'): # we'll also need a temp file to hold our module arguments args_file_path = self._connection._shell.join_path(tmp, 'args') if remote_module_path or module_style != 'new': display.debug("transferring module to remote %s" % remote_module_path) if module_style == 'binary': self._transfer_file(module_path, remote_module_path) else: self._transfer_data(remote_module_path, module_data) if module_style == 'old': # we need to dump the module args to a k=v string in a file on # the remote system, which can be read and parsed by the module args_data = "" for k,v in iteritems(module_args): args_data += '%s=%s ' % (k, shlex_quote(text_type(v))) self._transfer_data(args_file_path, args_data) elif module_style in ('non_native_want_json', 'binary'): self._transfer_data(args_file_path, json.dumps(module_args)) display.debug("done transferring module to remote") environment_string = self._compute_environment_string() remote_files = None if args_file_path: remote_files = tmp, remote_module_path, args_file_path elif remote_module_path: remote_files = tmp, remote_module_path # Fix permissions of the tmp path and tmp files. This should be # called after all files have been transferred. if remote_files: self._fixup_perms2(remote_files, remote_user) cmd = "" in_data = None if self._connection.has_pipelining and self._play_context.pipelining and not C.DEFAULT_KEEP_REMOTE_FILES and module_style == 'new': in_data = module_data else: if remote_module_path: cmd = remote_module_path rm_tmp = None if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp: if not self._play_context.become or self._play_context.become_user == 'root': # not sudoing or sudoing to root, so can cleanup files in the same step rm_tmp = tmp cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp) cmd = cmd.strip() sudoable = True if module_name == "accelerate": # always run the accelerate module as the user # specified in the play, not the sudo_user sudoable = False res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data) if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp: if self._play_context.become and self._play_context.become_user != 'root': # not sudoing to root, so maybe can't delete files as that other user # have to clean up temp files as original user in a second step tmp_rm_cmd = self._connection._shell.remove(tmp, recurse=True) tmp_rm_res = self._low_level_execute_command(tmp_rm_cmd, sudoable=False) tmp_rm_data = self._parse_returned_data(tmp_rm_res) if tmp_rm_data.get('rc', 0) != 0: display.warning('Error deleting remote temporary files (rc: {0}, stderr: {1})'.format(tmp_rm_res.get('rc'), tmp_rm_res.get('stderr', 'No error string available.'))) # parse the main result data = self._parse_returned_data(res) # pre-split stdout into lines, if stdout is in the data and there # isn't already a stdout_lines value there if 'stdout' in data and 'stdout_lines' not in data: data['stdout_lines'] = data.get('stdout', u'').splitlines() display.debug("done with _execute_module (%s, %s)" % (module_name, module_args)) return data
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True, wrap_async=False): ''' Transfer and run a module along with its arguments. ''' if task_vars is None: task_vars = dict() remote_module_path = None args_file_path = None remote_files = [] # if a module name was not specified for this execution, use the action from the task if module_name is None: module_name = self._task.action if module_args is None: module_args = self._task.args self._update_module_args(module_name, module_args, task_vars) # FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars) display.vvv("Using module file %s" % module_path) if not shebang and module_style != 'binary': raise AnsibleError("module (%s) is missing interpreter line" % module_name) if not self._is_pipelining_enabled(module_style, wrap_async): # we might need remote tmp dir if not tmp or not 'tmp' in tmp: tmp = self._make_tmp_path() remote_module_filename = self._connection._shell.get_remote_filename(module_path) remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename) if module_style in ('old', 'non_native_want_json', 'binary'): # we'll also need a temp file to hold our module arguments args_file_path = self._connection._shell.join_path(tmp, 'args') if remote_module_path or module_style != 'new': display.debug("transferring module to remote %s" % remote_module_path) if module_style == 'binary': self._transfer_file(module_path, remote_module_path) else: self._transfer_data(remote_module_path, module_data) if module_style == 'old': # we need to dump the module args to a k=v string in a file on # the remote system, which can be read and parsed by the module args_data = "" for k,v in iteritems(module_args): args_data += '%s=%s ' % (k, shlex_quote(text_type(v))) self._transfer_data(args_file_path, args_data) elif module_style in ('non_native_want_json', 'binary'): self._transfer_data(args_file_path, json.dumps(module_args)) display.debug("done transferring module to remote") environment_string = self._compute_environment_string() if tmp and remote_module_path: remote_files = [tmp, remote_module_path] if args_file_path: remote_files.append(args_file_path) sudoable = True in_data = None cmd = "" if wrap_async: # configure, upload, and chmod the async_wrapper module (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(), task_vars=task_vars) async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path) remote_async_module_path = self._connection._shell.join_path(tmp, async_module_remote_filename) self._transfer_data(remote_async_module_path, async_module_data) remote_files.append(remote_async_module_path) async_limit = self._task.async
def exists(self, path): cmd = ['test', '-e', shlex_quote(path)] return ' '.join(cmd)
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='surrogate_or_replace'): ''' This is the function which executes the low level shell command, which may be commands to create/remove directories for temporary files, or to run the module code or python directly when pipelining. :kwarg encoding_errors: If the value returned by the command isn't utf-8 then we have to figure out how to transform it to unicode. If the value is just going to be displayed to the user (or discarded) then the default of 'replace' is fine. If the data is used as a key or is going to be written back out to a file verbatim, then this won't work. May have to use some sort of replacement strategy (python3 could use surrogateescape) ''' display.debug("_low_level_execute_command(): starting") if not cmd: # this can happen with powershell modules when there is no analog to a Windows command (like chmod) display.debug("_low_level_execute_command(): no command, exiting") return dict(stdout='', stderr='', rc=254) allow_same_user = C.BECOME_ALLOW_SAME_USER same_user = self._play_context.become_user == self._play_context.remote_user if sudoable and self._play_context.become and (allow_same_user or not same_user): display.debug("_low_level_execute_command(): using become for this command") cmd = self._play_context.make_become_cmd(cmd, executable=executable) if self._connection.allow_executable: if executable is None: executable = self._play_context.executable # mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876) # only applied for the default executable to avoid interfering with the raw action cmd = self._connection._shell.append_command(cmd, 'sleep 0') if executable: cmd = executable + ' -c ' + shlex_quote(cmd) display.debug("_low_level_execute_command(): executing: %s" % (cmd,)) # Change directory to basedir of task for command execution when connection is local if self._connection.transport == 'local': cwd = os.getcwd() os.chdir(self._loader.get_basedir()) try: rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable) finally: if self._connection.transport == 'local': os.chdir(cwd) # stdout and stderr may be either a file-like or a bytes object. # Convert either one to a text type if isinstance(stdout, binary_type): out = to_text(stdout, errors=encoding_errors) elif not isinstance(stdout, text_type): out = to_text(b''.join(stdout.readlines()), errors=encoding_errors) else: out = stdout if isinstance(stderr, binary_type): err = to_text(stderr, errors=encoding_errors) elif not isinstance(stderr, text_type): err = to_text(b''.join(stderr.readlines()), errors=encoding_errors) else: err = stderr if rc is None: rc = 0 # be sure to remove the BECOME-SUCCESS message now out = self._strip_success_message(out) display.debug(u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, out, err)) return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err)
def remove(self, path, recurse=False): path = shlex_quote(path) cmd = 'rm -f ' if recurse: cmd += '-r ' return cmd + "%s %s" % (path, self._SHELL_REDIRECT_ALLNULL)
def chmod(self, paths, mode): cmd = ['chmod', mode] cmd.extend(paths) cmd = [shlex_quote(c) for c in cmd] return ' '.join(cmd)
def chown(self, paths, user): cmd = ['chown', user] cmd.extend(paths) cmd = [shlex_quote(c) for c in cmd] return ' '.join(cmd)
def run(self, tmp=None, task_vars=None): """ transfer the given module name, plus the async module, then run it """ if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) if self._play_context.check_mode: result["skipped"] = True result["msg"] = "check mode not supported for this module" return result remote_user = task_vars.get("ansible_ssh_user") or self._play_context.remote_user if not tmp: tmp = self._make_tmp_path(remote_user) self._cleanup_remote_tmp = True module_name = self._task.action env_string = self._compute_environment_string() module_args = self._task.args.copy() if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG: module_args["_ansible_no_log"] = True # configure, upload, and chmod the target module (module_style, shebang, module_data, module_path) = self._configure_module( module_name=module_name, module_args=module_args, task_vars=task_vars ) remote_module_filename = self._connection._shell.get_remote_filename(module_path) remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename) if module_style == "binary": self._transfer_file(module_path, remote_module_path) else: self._transfer_data(remote_module_path, module_data) # configure, upload, and chmod the async_wrapper module (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module( module_name="async_wrapper", module_args=dict(), task_vars=task_vars ) async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path) remote_async_module_path = self._connection._shell.join_path(tmp, async_module_remote_filename) self._transfer_data(remote_async_module_path, async_module_data) argsfile = None if module_style in ("non_native_want_json", "binary"): argsfile = self._transfer_data(self._connection._shell.join_path(tmp, "arguments"), json.dumps(module_args)) elif module_style == "old": args_data = "" for k, v in iteritems(module_args): args_data += '%s="%s" ' % (k, shlex_quote(to_text(v))) argsfile = self._transfer_data(self._connection._shell.join_path(tmp, "arguments"), args_data) remote_paths = tmp, remote_module_path, remote_async_module_path # argsfile doesn't need to be executable, but this saves an extra call to the remote host if argsfile: remote_paths += (argsfile,) self._fixup_perms2(remote_paths, remote_user, execute=True) async_limit = self._task.async
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='surrogate_or_replace'): ''' This is the function which executes the low level shell command, which may be commands to create/remove directories for temporary files, or to run the module code or python directly when pipelining. :kwarg encoding_errors: If the value returned by the command isn't utf-8 then we have to figure out how to transform it to unicode. If the value is just going to be displayed to the user (or discarded) then the default of 'replace' is fine. If the data is used as a key or is going to be written back out to a file verbatim, then this won't work. May have to use some sort of replacement strategy (python3 could use surrogateescape) ''' display.debug("_low_level_execute_command(): starting") if not cmd: # this can happen with powershell modules when there is no analog to a Windows command (like chmod) display.debug("_low_level_execute_command(): no command, exiting") return dict(stdout='', stderr='', rc=254) allow_same_user = C.BECOME_ALLOW_SAME_USER same_user = self._play_context.become_user == self._play_context.remote_user if sudoable and self._play_context.become and (allow_same_user or not same_user): display.debug( "_low_level_execute_command(): using become for this command") cmd = self._play_context.make_become_cmd(cmd, executable=executable) if self._connection.allow_executable: if executable is None: executable = self._play_context.executable # mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876) # only applied for the default executable to avoid interfering with the raw action cmd = self._connection._shell.append_command(cmd, 'sleep 0') if executable: cmd = executable + ' -c ' + shlex_quote(cmd) display.debug("_low_level_execute_command(): executing: %s" % (cmd, )) # Change directory to basedir of task for command execution when connection is local if self._connection.transport == 'local': cwd = os.getcwd() os.chdir(self._loader.get_basedir()) try: rc, stdout, stderr = self._connection.exec_command( cmd, in_data=in_data, sudoable=sudoable) finally: if self._connection.transport == 'local': os.chdir(cwd) # stdout and stderr may be either a file-like or a bytes object. # Convert either one to a text type if isinstance(stdout, binary_type): out = to_text(stdout, errors=encoding_errors) elif not isinstance(stdout, text_type): out = to_text(b''.join(stdout.readlines()), errors=encoding_errors) else: out = stdout if isinstance(stderr, binary_type): err = to_text(stderr, errors=encoding_errors) elif not isinstance(stderr, text_type): err = to_text(b''.join(stderr.readlines()), errors=encoding_errors) else: err = stderr if rc is None: rc = 0 # be sure to remove the BECOME-SUCCESS message now out = self._strip_success_message(out) display.debug( u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, out, err)) return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err)
def make_become_cmd(self, cmd, executable=None): """ helper function to create privilege escalation commands """ prompt = None success_key = None self.prompt = None if self.become: if not executable: executable = self.executable becomecmd = None randbits = ''.join( random.choice(string.ascii_lowercase) for x in range(32)) success_key = 'BECOME-SUCCESS-%s' % randbits success_cmd = shlex_quote('echo %s; %s' % (success_key, cmd)) if executable: command = '%s -c %s' % (executable, success_cmd) else: command = success_cmd # set executable to use for the privilege escalation method, with various overrides exe = self.become_exe or \ getattr(self, '%s_exe' % self.become_method, None) or \ C.DEFAULT_BECOME_EXE or \ getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or \ self.become_method # set flags to use for the privilege escalation method, with various overrides flags = self.become_flags or \ getattr(self, '%s_flags' % self.become_method, None) or \ C.DEFAULT_BECOME_FLAGS or \ getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or \ '' if self.become_method == 'sudo': # If we have a password, we run sudo with a randomly-generated # prompt set using -p. Otherwise we run it with default -n, which makes # it fail if it would have prompted for a password. # Cannot rely on -n as it can be removed from defaults, which should be # done for older versions of sudo that do not support the option. # # Passing a quoted compound command to sudo (or sudo -s) # directly doesn't work, so we shellquote it with shlex_quote() # and pass the quoted string to the user's shell. # force quick error if password is required but not supplied, should prevent sudo hangs. if self.become_pass: prompt = '[sudo via ansible, key=%s] password: '******'%s %s -p "%s" -u %s %s' % ( exe, flags.replace( '-n', ''), prompt, self.become_user, command) else: becomecmd = '%s %s -u %s %s' % (exe, flags, self.become_user, command) elif self.become_method == 'su': # passing code ref to examine prompt as simple string comparisson isn't good enough with su def detect_su_prompt(b_data): b_password_string = b"|".join( [b'(\w+\'s )?' + x for x in b_SU_PROMPT_LOCALIZATIONS]) # Colon or unicode fullwidth colon b_password_string = b_password_string + to_bytes( u' ?(:|:) ?') b_SU_PROMPT_LOCALIZATIONS_RE = re.compile( b_password_string, flags=re.IGNORECASE) return bool(b_SU_PROMPT_LOCALIZATIONS_RE.match(b_data)) prompt = detect_su_prompt becomecmd = '%s %s %s -c %s' % (exe, flags, self.become_user, shlex_quote(command)) elif self.become_method == 'pbrun': prompt = 'Password:'******'%s %s -u %s %s' % (exe, flags, self.become_user, success_cmd) elif self.become_method == 'ksu': def detect_ksu_prompt(b_data): return re.match(b"Kerberos password for .*@.*:", b_data) prompt = detect_ksu_prompt becomecmd = '%s %s %s -e %s' % (exe, self.become_user, flags, command) elif self.become_method == 'pfexec': # No user as it uses it's own exec_attr to figure it out becomecmd = '%s %s "%s"' % (exe, flags, success_cmd) elif self.become_method == 'runas': # become is handled inside the WinRM connection plugin becomecmd = cmd elif self.become_method == 'doas': prompt = 'doas (%s@' % self.remote_user exe = self.become_exe or 'doas' if not self.become_pass: flags += ' -n ' if self.become_user: flags += ' -u %s ' % self.become_user #FIXME: make shell independent becomecmd = '%s %s echo %s && %s %s env ANSIBLE=true %s' % ( exe, flags, success_key, exe, flags, cmd) elif self.become_method == 'dzdo': exe = self.become_exe or 'dzdo' if self.become_pass: prompt = '[dzdo via ansible, key=%s] password: '******'%s -p %s -u %s %s' % ( exe, shlex_quote(prompt), self.become_user, command) else: becomecmd = '%s -u %s %s' % (exe, self.become_user, command) else: raise AnsibleError( "Privilege escalation method not found: %s" % self.become_method) if self.become_pass: self.prompt = prompt self.success_key = success_key return becomecmd return cmd
def test_play_context_make_become_cmd(self): (options, args) = self._parser.parse_args([]) play_context = PlayContext(options=options) default_cmd = "/bin/foo" default_exe = "/bin/bash" sudo_exe = C.DEFAULT_SUDO_EXE or 'sudo' sudo_flags = C.DEFAULT_SUDO_FLAGS su_exe = C.DEFAULT_SU_EXE or 'su' su_flags = C.DEFAULT_SU_FLAGS or '' pbrun_exe = 'pbrun' pbrun_flags = '' pfexec_exe = 'pfexec' pfexec_flags = '' doas_exe = 'doas' doas_flags = ' -n -u foo ' ksu_exe = 'ksu' ksu_flags = '' dzdo_exe = 'dzdo' cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe) self.assertEqual(cmd, default_cmd) play_context.become = True play_context.become_user = '******' play_context.become_method = 'sudo' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual(cmd, """%s %s -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags, play_context.become_user, default_exe, play_context.success_key, default_cmd)) play_context.become_pass = '******' cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe) self.assertEqual(cmd, """%s %s -p "%s" -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags.replace('-n',''), play_context.prompt, play_context.become_user, default_exe, play_context.success_key, default_cmd)) play_context.become_pass = None play_context.become_method = 'su' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual(cmd, """%s %s -c '%s -c '"'"'echo %s; %s'"'"''""" % (su_exe, play_context.become_user, default_exe, play_context.success_key, default_cmd)) play_context.become_method = 'pbrun' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual(cmd, """%s %s -u %s 'echo %s; %s'""" % (pbrun_exe, pbrun_flags, play_context.become_user, play_context.success_key, default_cmd)) play_context.become_method = 'pfexec' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual(cmd, '''%s %s "'echo %s; %s'"''' % (pfexec_exe, pfexec_flags, play_context.success_key, default_cmd)) play_context.become_method = 'doas' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual(cmd, """%s %s echo %s && %s %s env ANSIBLE=true %s""" % (doas_exe, doas_flags, play_context.success_key, doas_exe, doas_flags, default_cmd)) play_context.become_method = 'ksu' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual(cmd, """%s %s %s -e %s -c 'echo %s; %s'""" % (ksu_exe, play_context.become_user, ksu_flags, default_exe, play_context.success_key, default_cmd)) play_context.become_method = 'bad' self.assertRaises(AnsibleError, play_context.make_become_cmd, cmd=default_cmd, executable="/bin/bash") play_context.become_method = 'dzdo' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual(cmd, """%s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, play_context.become_user, default_exe, play_context.success_key, default_cmd)) play_context.become_pass = '******' play_context.become_method = 'dzdo' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual(cmd, """%s -p %s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, shlex_quote(play_context.prompt), play_context.become_user, default_exe, play_context.success_key, default_cmd))
def env_prefix(self, **kwargs): env = self.env.copy() env.update(kwargs) return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k,v in env.items()])
def quote(a): ''' return its argument quoted for shell usage ''' return shlex_quote(a)
def test_play_context_make_become_cmd(self): (options, args) = self._parser.parse_args([]) play_context = PlayContext(options=options) default_cmd = "/bin/foo" default_exe = "/bin/bash" sudo_exe = C.DEFAULT_SUDO_EXE or 'sudo' sudo_flags = C.DEFAULT_SUDO_FLAGS su_exe = C.DEFAULT_SU_EXE or 'su' su_flags = C.DEFAULT_SU_FLAGS or '' pbrun_exe = 'pbrun' pbrun_flags = '' pfexec_exe = 'pfexec' pfexec_flags = '' doas_exe = 'doas' doas_flags = ' -n -u foo ' ksu_exe = 'ksu' ksu_flags = '' dzdo_exe = 'dzdo' cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe) self.assertEqual(cmd, default_cmd) play_context.become = True play_context.become_user = '******' play_context.become_method = 'sudo' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual( cmd, """%s %s -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags, play_context.become_user, default_exe, play_context.success_key, default_cmd)) play_context.become_pass = '******' cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe) self.assertEqual( cmd, """%s %s -p "%s" -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags.replace( '-n', ''), play_context.prompt, play_context.become_user, default_exe, play_context.success_key, default_cmd)) play_context.become_pass = None play_context.become_method = 'su' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual( cmd, """%s %s -c '%s -c '"'"'echo %s; %s'"'"''""" % (su_exe, play_context.become_user, default_exe, play_context.success_key, default_cmd)) play_context.become_method = 'pbrun' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual( cmd, """%s %s -u %s 'echo %s; %s'""" % (pbrun_exe, pbrun_flags, play_context.become_user, play_context.success_key, default_cmd)) play_context.become_method = 'pfexec' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual( cmd, '''%s %s "'echo %s; %s'"''' % (pfexec_exe, pfexec_flags, play_context.success_key, default_cmd)) play_context.become_method = 'doas' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual( cmd, """%s %s echo %s && %s %s env ANSIBLE=true %s""" % (doas_exe, doas_flags, play_context.success_key, doas_exe, doas_flags, default_cmd)) play_context.become_method = 'ksu' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual( cmd, """%s %s %s -e %s -c 'echo %s; %s'""" % (ksu_exe, play_context.become_user, ksu_flags, default_exe, play_context.success_key, default_cmd)) play_context.become_method = 'bad' self.assertRaises(AnsibleError, play_context.make_become_cmd, cmd=default_cmd, executable="/bin/bash") play_context.become_method = 'dzdo' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual( cmd, """%s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, play_context.become_user, default_exe, play_context.success_key, default_cmd)) play_context.become_pass = '******' play_context.become_method = 'dzdo' cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash") self.assertEqual( cmd, """%s -p %s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, shlex_quote( play_context.prompt), play_context.become_user, default_exe, play_context.success_key, default_cmd))
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True): ''' Transfer and run a module along with its arguments. ''' if task_vars is None: task_vars = dict() # if a module name was not specified for this execution, use # the action from the task if module_name is None: module_name = self._task.action if module_args is None: module_args = self._task.args # Get the connection user for permission checks remote_user = task_vars.get( 'ansible_ssh_user') or self._play_context.remote_user self._update_module_args(module_args, task_vars) (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars) display.vvv("Using module file %s" % module_path) if not shebang and module_style != 'binary': raise AnsibleError("module (%s) is missing interpreter line" % module_name) # a remote tmp path may be necessary and not already created remote_module_path = None args_file_path = None if not tmp and self._late_needs_tmp_path(tmp, module_style): tmp = self._make_tmp_path(remote_user) if tmp and \ (module_style != 'new' or \ not self._connection.has_pipelining or \ not self._play_context.pipelining or \ C.DEFAULT_KEEP_REMOTE_FILES or \ self._play_context.become_method == 'su'): remote_module_filename = self._connection._shell.get_remote_filename( module_path) remote_module_path = self._connection._shell.join_path( tmp, remote_module_filename) if module_style in ('old', 'non_native_want_json', 'binary'): # we'll also need a temp file to hold our module arguments args_file_path = self._connection._shell.join_path(tmp, 'args') if remote_module_path or module_style != 'new': display.debug("transferring module to remote %s" % remote_module_path) if module_style == 'binary': self._transfer_file(module_path, remote_module_path) else: self._transfer_data(remote_module_path, module_data) if module_style == 'old': # we need to dump the module args to a k=v string in a file on # the remote system, which can be read and parsed by the module args_data = "" for k, v in iteritems(module_args): args_data += '%s=%s ' % (k, shlex_quote(text_type(v))) self._transfer_data(args_file_path, args_data) elif module_style in ('non_native_want_json', 'binary'): self._transfer_data(args_file_path, json.dumps(module_args)) display.debug("done transferring module to remote") environment_string = self._compute_environment_string() remote_files = None if args_file_path: remote_files = tmp, remote_module_path, args_file_path elif remote_module_path: remote_files = tmp, remote_module_path # Fix permissions of the tmp path and tmp files. This should be # called after all files have been transferred. if remote_files: self._fixup_perms2(remote_files, remote_user) cmd = "" in_data = None if self._connection.has_pipelining and self._play_context.pipelining and not C.DEFAULT_KEEP_REMOTE_FILES and module_style == 'new': in_data = module_data else: if remote_module_path: cmd = remote_module_path rm_tmp = None if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp: if not self._play_context.become or self._play_context.become_user == 'root': # not sudoing or sudoing to root, so can cleanup files in the same step rm_tmp = tmp cmd = self._connection._shell.build_module_command( environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp) cmd = cmd.strip() sudoable = True if module_name == "accelerate": # always run the accelerate module as the user # specified in the play, not the sudo_user sudoable = False res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data) if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp: if self._play_context.become and self._play_context.become_user != 'root': # not sudoing to root, so maybe can't delete files as that other user # have to clean up temp files as original user in a second step tmp_rm_cmd = self._connection._shell.remove(tmp, recurse=True) tmp_rm_res = self._low_level_execute_command(tmp_rm_cmd, sudoable=False) tmp_rm_data = self._parse_returned_data(tmp_rm_res) if tmp_rm_data.get('rc', 0) != 0: display.warning( 'Error deleting remote temporary files (rc: {0}, stderr: {1})' .format( tmp_rm_res.get('rc'), tmp_rm_res.get('stderr', 'No error string available.'))) # parse the main result data = self._parse_returned_data(res) # pre-split stdout into lines, if stdout is in the data and there # isn't already a stdout_lines value there if 'stdout' in data and 'stdout_lines' not in data: data['stdout_lines'] = data.get('stdout', u'').splitlines() display.debug("done with _execute_module (%s, %s)" % (module_name, module_args)) return data
def env_prefix(**args): return ' '.join(['%s=%s' % (k, shlex_quote(text_type(v))) for k,v in args.items()])
def run(self, tmp=None, task_vars=None): ''' transfer the given module name, plus the async module, then run it ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) if self._play_context.check_mode: result['skipped'] = True result['msg'] = 'check mode not supported for this module' return result remote_user = task_vars.get( 'ansible_ssh_user') or self._play_context.remote_user if not tmp: tmp = self._make_tmp_path(remote_user) self._cleanup_remote_tmp = True module_name = self._task.action env_string = self._compute_environment_string() module_args = self._task.args.copy() if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG: module_args['_ansible_no_log'] = True # configure, upload, and chmod the target module (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars) remote_module_filename = self._connection._shell.get_remote_filename( module_path) remote_module_path = self._connection._shell.join_path( tmp, remote_module_filename) if module_style == 'binary': self._transfer_file(module_path, remote_module_path) else: self._transfer_data(remote_module_path, module_data) # configure, upload, and chmod the async_wrapper module (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module( module_name='async_wrapper', module_args=dict(), task_vars=task_vars) async_module_remote_filename = self._connection._shell.get_remote_filename( async_module_path) remote_async_module_path = self._connection._shell.join_path( tmp, async_module_remote_filename) self._transfer_data(remote_async_module_path, async_module_data) argsfile = None if module_style in ('non_native_want_json', 'binary'): argsfile = self._transfer_data( self._connection._shell.join_path(tmp, 'arguments'), json.dumps(module_args)) elif module_style == 'old': args_data = "" for k, v in iteritems(module_args): args_data += '%s="%s" ' % (k, shlex_quote(to_text(v))) argsfile = self._transfer_data( self._connection._shell.join_path(tmp, 'arguments'), args_data) remote_paths = tmp, remote_module_path, remote_async_module_path # argsfile doesn't need to be executable, but this saves an extra call to the remote host if argsfile: remote_paths += argsfile, self._fixup_perms2(remote_paths, remote_user, execute=True) async_limit = self._task. async
def env_prefix(**args): return ' '.join([ '%s=%s' % (k, shlex_quote(text_type(v))) for k, v in args.items() ])
def make_become_cmd(self, cmd, executable=None): """ helper function to create privilege escalation commands """ prompt = None success_key = None self.prompt = None if self.become: if not executable: executable = self.executable becomecmd = None randbits = ''.join(random.choice(string.ascii_lowercase) for x in range(32)) success_key = 'BECOME-SUCCESS-%s' % randbits success_cmd = shlex_quote('echo %s; %s' % (success_key, cmd)) if executable: command = '%s -c %s' % (executable, success_cmd) else: command = success_cmd # set executable to use for the privilege escalation method, with various overrides exe = self.become_exe or \ getattr(self, '%s_exe' % self.become_method, None) or \ C.DEFAULT_BECOME_EXE or \ getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or \ self.become_method # set flags to use for the privilege escalation method, with various overrides flags = self.become_flags or \ getattr(self, '%s_flags' % self.become_method, None) or \ C.DEFAULT_BECOME_FLAGS or \ getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or \ '' if self.become_method == 'sudo': # If we have a password, we run sudo with a randomly-generated # prompt set using -p. Otherwise we run it with default -n, which makes # it fail if it would have prompted for a password. # Cannot rely on -n as it can be removed from defaults, which should be # done for older versions of sudo that do not support the option. # # Passing a quoted compound command to sudo (or sudo -s) # directly doesn't work, so we shellquote it with shlex_quote() # and pass the quoted string to the user's shell. # force quick error if password is required but not supplied, should prevent sudo hangs. if self.become_pass: prompt = '[sudo via ansible, key=%s] password: '******'%s %s -p "%s" -u %s %s' % (exe, flags.replace('-n',''), prompt, self.become_user, command) else: becomecmd = '%s %s -u %s %s' % (exe, flags, self.become_user, command) elif self.become_method == 'su': # passing code ref to examine prompt as simple string comparisson isn't good enough with su def detect_su_prompt(b_data): b_password_string = b"|".join([b'(\w+\'s )?' + x for x in b_SU_PROMPT_LOCALIZATIONS]) # Colon or unicode fullwidth colon b_password_string = b_password_string + to_bytes(u' ?(:|:) ?') b_SU_PROMPT_LOCALIZATIONS_RE = re.compile(b_password_string, flags=re.IGNORECASE) return bool(b_SU_PROMPT_LOCALIZATIONS_RE.match(b_data)) prompt = detect_su_prompt becomecmd = '%s %s %s -c %s' % (exe, flags, self.become_user, shlex_quote(command)) elif self.become_method == 'pbrun': prompt='Password:'******'%s %s -u %s %s' % (exe, flags, self.become_user, success_cmd) elif self.become_method == 'ksu': def detect_ksu_prompt(b_data): return re.match(b"Kerberos password for .*@.*:", b_data) prompt = detect_ksu_prompt becomecmd = '%s %s %s -e %s' % (exe, self.become_user, flags, command) elif self.become_method == 'pfexec': # No user as it uses it's own exec_attr to figure it out becomecmd = '%s %s "%s"' % (exe, flags, success_cmd) elif self.become_method == 'runas': raise AnsibleError("'runas' is not yet implemented") #FIXME: figure out prompt # this is not for use with winrm plugin but if they ever get ssh native on windoez becomecmd = '%s %s /user:%s "%s"' % (exe, flags, self.become_user, success_cmd) elif self.become_method == 'doas': prompt = 'doas (%s@' % self.remote_user exe = self.become_exe or 'doas' if not self.become_pass: flags += ' -n ' if self.become_user: flags += ' -u %s ' % self.become_user #FIXME: make shell independant becomecmd = '%s %s echo %s && %s %s env ANSIBLE=true %s' % (exe, flags, success_key, exe, flags, cmd) elif self.become_method == 'dzdo': exe = self.become_exe or 'dzdo' if self.become_pass: prompt = '[dzdo via ansible, key=%s] password: '******'%s -p %s -u %s %s' % (exe, shlex_quote(prompt), self.become_user, command) else: becomecmd = '%s -u %s %s' % (exe, self.become_user, command) else: raise AnsibleError("Privilege escalation method not found: %s" % self.become_method) if self.become_pass: self.prompt = prompt self.success_key = success_key return becomecmd return cmd