def test_plugins_connection_ssh_put_file(self, mock_ospe, mock_sleep): pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('ssh', pc, new_stdin) conn._build_command = MagicMock() conn._bare_run = MagicMock() mock_ospe.return_value = True conn._build_command.return_value = 'some command to run' conn._bare_run.return_value = (0, '', '') conn.host = "some_host" C.ANSIBLE_SSH_RETRIES = 9 # Test with C.DEFAULT_SCP_IF_SSH set to smart # Test when SFTP works C.DEFAULT_SCP_IF_SSH = 'smart' expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n' conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False) # Test when SFTP doesn't work but SCP does conn._bare_run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')] conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._bare_run.assert_called_with('some command to run', None, checkrc=False) conn._bare_run.side_effect = None # test with C.DEFAULT_SCP_IF_SSH enabled C.DEFAULT_SCP_IF_SSH = True conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._bare_run.assert_called_with('some command to run', None, checkrc=False) conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩') conn._bare_run.assert_called_with('some command to run', None, checkrc=False) # test with C.DEFAULT_SCP_IF_SSH disabled C.DEFAULT_SCP_IF_SSH = False expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n' conn.put_file('/path/to/in/file', '/path/to/dest/file') conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False) expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')), to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n' conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩') conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False) # test that a non-zero rc raises an error conn._bare_run.return_value = (1, 'stdout', 'some errors') self.assertRaises(AnsibleError, conn.put_file, '/path/to/bad/file', '/remote/path/to/file') # test that a not-found path raises an error mock_ospe.return_value = False conn._bare_run.return_value = (0, 'stdout', '') self.assertRaises(AnsibleFileNotFound, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
def _connect(self): ''' Connects to the remote device and starts the terminal ''' if self.connected: return self.paramiko_conn = connection_loader.get('paramiko', self._play_context, '/dev/null') self.paramiko_conn._set_log_channel(self._get_log_channel()) self.paramiko_conn.set_options(direct={'look_for_keys': not bool(self._play_context.password and not self._play_context.private_key_file)}) self.paramiko_conn.force_persistence = self.force_persistence ssh = self.paramiko_conn._connect() display.vvvv('ssh connection done, setting terminal', host=self._play_context.remote_addr) self._ssh_shell = ssh.ssh.invoke_shell() self._ssh_shell.settimeout(self._play_context.timeout) network_os = self._play_context.network_os if not network_os: raise AnsibleConnectionFailure( 'Unable to automatically determine host network os. Please ' 'manually configure ansible_network_os value for this host' ) self._terminal = terminal_loader.get(network_os, self) if not self._terminal: raise AnsibleConnectionFailure('network os %s is not supported' % network_os) display.vvvv('loaded terminal plugin for network_os %s' % network_os, host=self._play_context.remote_addr) self._cliconf = cliconf_loader.get(network_os, self) if self._cliconf: display.vvvv('loaded cliconf plugin for network_os %s' % network_os, host=self._play_context.remote_addr) else: display.vvvv('unable to load cliconf for network_os %s' % network_os) self.receive(prompts=self._terminal.terminal_initial_prompt, answer=self._terminal.terminal_initial_answer, newline=self._terminal.terminal_inital_prompt_newline) display.vvvv('firing event: on_open_shell()', host=self._play_context.remote_addr) self._terminal.on_open_shell() if self._play_context.become and self._play_context.become_method == 'enable': display.vvvv('firing event: on_become', host=self._play_context.remote_addr) auth_pass = self._play_context.become_pass self._terminal.on_become(passwd=auth_pass) display.vvvv('ssh connection has completed successfully', host=self._play_context.remote_addr) self._connected = True return self
def test_plugins_connection_ssh_exec_command(self): pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('ssh', pc, new_stdin) conn._build_command = MagicMock() conn._build_command.return_value = 'ssh something something' conn._run = MagicMock() conn._run.return_value = (0, 'stdout', 'stderr') conn.get_option = MagicMock() conn.get_option.return_value = True res, stdout, stderr = conn.exec_command('ssh') res, stdout, stderr = conn.exec_command('ssh', 'this is some data')
def test_netconf__connect(self, mock_netconf_loader): pc = PlayContext() conn = connection_loader.get("ansible.netcommon.netconf", pc, "/dev/null") mock_manager = MagicMock() mock_manager.session_id = "123456789" netconf.manager.connect = MagicMock(return_value=mock_manager) rc, out, err = conn._connect() self.assertEqual(0, rc) self.assertEqual(b"123456789", out) self.assertEqual(b"", err) self.assertTrue(conn._connected)
def test_netconf_exec_command_invalid_request(self): pc = PlayContext() conn = connection_loader.get("ansible.netcommon.netconf", pc, "/dev/null") conn._connected = True mock_manager = MagicMock(name="self._manager") conn._manager = mock_manager netconf.to_ele.return_value = None out = conn.exec_command("test string") self.assertEqual("unable to parse request", out)
def test_set_options(self, options, direct, expected, kerb): winrm.HAVE_KERBEROS = kerb pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('winrm', pc, new_stdin) conn.set_options(var_options=options, direct=direct) conn._build_winrm_kwargs() for attr, expected in expected.items(): actual = getattr(conn, attr) assert actual == expected, \ "winrm attr '%s', actual '%s' != expected '%s'"\ % (attr, actual, expected)
def __init__(self, play_context, new_stdin, *args, **kwargs): super(NetworkConnectionBase, self).__init__(play_context, new_stdin, *args, **kwargs) self._network_os = self._play_context.network_os self._local = connection_loader.get('local', play_context, '/dev/null') self._local.set_options() self._implementation_plugins = [] self._cached_variables = (None, None, None) # reconstruct the socket_path and set instance values accordingly self._ansible_playbook_pid = kwargs.get('ansible_playbook_pid') self._update_connection_state()
def test_network_cli_close(self, mocked_super): pc = PlayContext() pc.network_os = 'ios' conn = connection_loader.get('network_cli', pc, '/dev/null') terminal = MagicMock(supports_multiplexing=False) conn._terminal = terminal conn._ssh_shell = MagicMock() conn.paramiko_conn = MagicMock() conn._connected = True conn.close() self.assertTrue(terminal.on_close_shell.called) self.assertIsNone(conn._ssh_shell) self.assertIsNone(conn.paramiko_conn)
def test_netconf_exec_command(): pc = PlayContext() conn = connection_loader.get("ansible.netcommon.netconf", pc, "/dev/null") conn._connected = True mock_manager = MagicMock(name="self._manager") conn._manager = mock_manager mock_reply = MagicMock(name="reply") type(mock_reply).data_xml = PropertyMock(return_value="<test/>") mock_manager.rpc.return_value = mock_reply out = conn.exec_command("<test/>") assert out == "<test/>"
def test_libssh_put_file(self, mocked_super, mock_exists): pc = PlayContext() conn = connection_loader.get( "ansible.netcommon.libssh", pc, "/dev/null" ) mock_sftp = MagicMock() attr = {"sftp.return_value": mock_sftp} mock_ssh = MagicMock(**attr) conn.ssh = mock_ssh file_path = "test_libssh.py" conn.put_file(in_path=file_path, out_path=file_path) mock_sftp.put.assert_called_with( to_bytes(file_path), to_bytes(file_path) )
def test_libssh_fetch_file(self, mocked_super, mock_session): pc = PlayContext() pc.remote_addr = "localhost" conn = connection_loader.get( "ansible.netcommon.libssh", pc, "/dev/null" ) conn.ssh = mock_session mock_connect = MagicMock() conn.ssh.connect = mock_connect file_path = "test_libssh.py" conn.fetch_file(in_path=file_path, out_path=file_path) conn.sftp.get.assert_called_with( to_bytes(file_path), to_bytes(file_path) )
def test_set_invalid_extras_options(self, monkeypatch): pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('psrp', pc, new_stdin) conn.set_options( var_options={'_extras': { 'ansible_psrp_mock_test3': True }}) mock_display = MagicMock() monkeypatch.setattr(Display, "warning", mock_display) conn._build_kwargs() assert mock_display.call_args[0][0] == \ 'ansible_psrp_mock_test3 is unsupported by the current psrp version installed'
def test_netconf_exec_command(self): pc = PlayContext() conn = connection_loader.get('netconf', pc, '/dev/null') conn._connected = True mock_reply = MagicMock(name='reply') type(mock_reply).data_xml = PropertyMock(return_value='<test/>') mock_manager = MagicMock(name='self._manager') mock_manager.rpc.return_value = mock_reply conn._manager = mock_manager out = conn.exec_command('<test/>') self.assertEqual('<test/>', out)
def start(self, options): messages = list() result = {} try: messages.append( ('vvvv', 'control socket path is %s' % self.socket_path)) # If this is a relative path (~ gets expanded later) then plug the # key's path on to the directory we originally came from, so we can # find it now that our cwd is / if self.play_context.private_key_file and self.play_context.private_key_file[ 0] not in '~/': self.play_context.private_key_file = os.path.join( self.original_path, self.play_context.private_key_file) self.connection = connection_loader.get( self.play_context.connection, self.play_context, '/dev/null', task_uuid=self._task_uuid, ansible_playbook_pid=self._ansible_playbook_pid) try: self.connection.set_options(direct=options) except ConnectionError as exc: messages.append(('debug', to_text(exc))) raise ConnectionError( 'Unable to decode JSON from response set_options. See the debug log for more information.' ) self.connection._socket_path = self.socket_path self.srv.register(self.connection) messages.extend([('vvvv', msg) for msg in sys.stdout.getvalue().splitlines()]) self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.sock.bind(self.socket_path) self.sock.listen(1) messages.append( ('vvvv', 'local domain socket listeners started successfully')) except Exception as exc: messages.extend(self.connection.pop_messages()) result['error'] = to_text(exc) result['exception'] = traceback.format_exc() finally: result['messages'] = messages self.fd.write(json.dumps(result, cls=AnsibleJSONEncoder)) self.fd.close()
def test_netconf__connect(self, mock_netconf_loader): pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('netconf', pc, new_stdin) mock_manager = MagicMock() mock_manager.session_id = '123456789' netconf.manager.connect = MagicMock(return_value=mock_manager) conn._play_context.network_os = 'default' rc, out, err = conn._connect() self.assertEqual(0, rc) self.assertEqual(b'123456789', out) self.assertEqual(b'', err) self.assertTrue(conn._connected)
def test_network_cli_close_libssh(self, mocked_super): pc = PlayContext() pc.network_os = "ios" conn = connection_loader.get( "ansible.netcommon.network_cli", pc, "/dev/null" ) conn._ssh_type = "libssh" terminal = MagicMock(supports_multiplexing=False) conn._terminal = terminal conn._ssh_shell = MagicMock() conn._ssh_type_conn = MagicMock() conn._connected = True conn.close() self.assertTrue(terminal.on_close_shell.called) self.assertIsNone(conn._ssh_shell) self.assertIsNone(conn._ssh_type_conn)
def __init__(self, play_context, new_stdin, *args, **kwargs): super(NetworkConnectionBase, self).__init__(play_context, new_stdin, *args, **kwargs) self._messages = [] self._conn_closed = False self._network_os = self._play_context.network_os self._local = connection_loader.get("local", play_context, "/dev/null") self._local.set_options() self._sub_plugin = {} self._cached_variables = (None, None, None) # reconstruct the socket_path and set instance values accordingly self._ansible_playbook_pid = kwargs.get("ansible_playbook_pid") self._update_connection_state()
def test_plugins_connection_aws_ssm_close(self, s_check_output): pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('community.aws.aws_ssm', pc, new_stdin) conn.instance_id = "i-12345" conn._session_id = True conn.get_option = MagicMock() conn.get_option.side_effect = ["/abc", "pqr"] conn._session = MagicMock() conn._session.terminate = MagicMock() conn._session.communicate = MagicMock() conn._terminate_session = MagicMock() conn._terminate_session.return_value = '' conn._session_id = MagicMock() conn._session_id.return_value = 'a' conn._client = MagicMock() conn.close()
def _update_connection_state(self): ''' Reconstruct the connection socket_path and check if it exists If the socket path exists then the connection is active and set both the _socket_path value to the path and the _connected value to True. If the socket path doesn't exist, leave the socket path value to None and the _connected value to False ''' ssh = connection_loader.get('ssh', class_only=True) cp = ssh._create_control_path(self._play_context.remote_addr, self._play_context.port, self._play_context.remote_user) tmp_path = unfrackpath(C.PERSISTENT_CONTROL_PATH_DIR) socket_path = unfrackpath(cp % dict(directory=tmp_path)) if os.path.exists(socket_path): self._connected = True self._socket_path = socket_path
def test_kinit_with_missing_executable_subprocess(self, monkeypatch): expected_err = "[Errno 2] No such file or directory: " \ "'/fake/kinit': '/fake/kinit'" mock_popen = MagicMock(side_effect=OSError(expected_err)) monkeypatch.setattr("subprocess.Popen", mock_popen) winrm.HAS_PEXPECT = False pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('winrm', pc, new_stdin) options = {"_extras": {}, "ansible_winrm_kinit_cmd": "/fake/kinit"} conn.set_options(var_options=options) with pytest.raises(AnsibleConnectionFailure) as err: conn._kerb_auth("user@domain", "pass") assert str(err.value) == "Kerberos auth failure when calling " \ "kinit cmd '/fake/kinit': %s" % expected_err
def test_network_cli_exec_command(self, mocked_super): pc = PlayContext() pc.network_os = "ios" conn = connection_loader.get("network_cli", pc, "/dev/null") mock_send = MagicMock(return_value=b"command response") conn.send = mock_send conn._ssh_shell = MagicMock() # test sending a single command and converting to dict out = conn.exec_command("command") self.assertEqual(out, b"command response") mock_send.assert_called_with(command=b"command") # test sending a json string out = conn.exec_command(json.dumps({"command": "command"})) self.assertEqual(out, b"command response") mock_send.assert_called_with(command=b"command")
def test_network_cli_exec_command(self, mocked_super): pc = PlayContext() pc.network_os = 'ios' conn = connection_loader.get('network_cli', pc, '/dev/null') mock_send = MagicMock(return_value=b'command response') conn.send = mock_send conn._ssh_shell = MagicMock() # test sending a single command and converting to dict out = conn.exec_command('command') self.assertEqual(out, b'command response') mock_send.assert_called_with(command=b'command') # test sending a json string out = conn.exec_command(json.dumps({'command': 'command'})) self.assertEqual(out, b'command response') mock_send.assert_called_with(command=b'command')
def ssh_type_conn(self): if self._ssh_type_conn is None: if self.ssh_type == "libssh": connection_plugin = "ansible.netcommon.libssh" elif self.ssh_type == "paramiko": # NOTE: This MUST be paramiko or things will break connection_plugin = "paramiko" else: raise AnsibleConnectionFailure( "Invalid value '%s' set for ssh_type option." " Expected value is either 'libssh' or 'paramiko'" % self._ssh_type ) self._ssh_type_conn = connection_loader.get( connection_plugin, self._play_context, "/dev/null" ) return self._ssh_type_conn
def test_network_cli__connect(self, mocked_super, mocked_terminal_loader): pc = PlayContext() pc.network_os = 'ios' conn = connection_loader.get('network_cli', pc, '/dev/null') conn.ssh = MagicMock() conn.receive = MagicMock() conn._connect() self.assertTrue(conn._terminal.on_open_shell.called) self.assertFalse(conn._terminal.on_become.called) conn._play_context.become = True conn._play_context.become_method = 'enable' conn._play_context.become_pass = '******' conn._connected = False conn._connect() conn._terminal.on_become.assert_called_with(passwd='password')
def _update_connection_state(self): ''' Reconstruct the connection socket_path and check if it exists If the socket path exists then the connection is active and set both the _socket_path value to the path and the _connected value to True. If the socket path doesn't exist, leave the socket path value to None and the _connected value to False ''' ssh = connection_loader.get('ssh', class_only=True) cp = ssh._create_control_path(self._play_context.remote_addr, self._play_context.port, self._play_context.remote_user, self._play_context.connection, self._ansible_playbook_pid) tmp_path = unfrackpath(C.PERSISTENT_CONTROL_PATH_DIR) socket_path = unfrackpath(cp % dict(directory=tmp_path)) if os.path.exists(socket_path): self._connected = True self._socket_path = socket_path
def test_kinit_error(self, use_pexpect): mechanism = "subprocess" expected_err = "kinit: krb5_parse_name: " \ "Configuration file does not specify default realm" def mock_popen_communicate(input=None, timeout=None): return b"", to_bytes(expected_err) mock_subprocess = MagicMock() mock_subprocess.Popen.return_value.communicate = mock_popen_communicate mock_subprocess.Popen.return_value.returncode = 1 mock_pexpect = None if use_pexpect: mechanism = "pexpect" expected_err = "Configuration file does not specify default realm" mock_pexpect = MagicMock() mock_pexpect.spawn.return_value.expect = MagicMock( side_effect=OSError) mock_pexpect.spawn.return_value.read.return_value = to_bytes( expected_err) mock_pexpect.spawn.return_value.exitstatus = 1 modules = { 'pexpect': mock_pexpect, 'subprocess': mock_subprocess, } with patch.dict(sys.modules, modules): pc = PlayContext() new_stdin = StringIO() connection_loader._module_cache = {} conn = connection_loader.get('winrm', pc, new_stdin) conn.set_options(var_options={"_extras": {}}) with pytest.raises(AnsibleConnectionFailure) as err: conn._kerb_auth("invaliduser", "pass") assert str(err.value) == "Kerberos auth failure for principal " \ "invaliduser with %s: %s" % (mechanism, expected_err)
def test_kinit_error_pass_in_output_subprocess(self, monkeypatch): def mock_communicate(input=None, timeout=None): return b"", b"Error with kinit\n" + input mock_popen = MagicMock() mock_popen.return_value.communicate = mock_communicate mock_popen.return_value.returncode = 1 monkeypatch.setattr("subprocess.Popen", mock_popen) winrm.HAS_PEXPECT = False pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('winrm', pc, new_stdin) conn.set_options(var_options={"_extras": {}}) with pytest.raises(AnsibleConnectionFailure) as err: conn._kerb_auth("username", "password") assert str(err.value) == \ "Kerberos auth failure for principal username with subprocess: " \ "Error with kinit\n<redacted>"
def test_plugins_connection_file_transport_command(self, boto_client, s_check_output): pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('community.aws.aws_ssm', pc, new_stdin) conn.get_option = MagicMock() conn.get_option.side_effect = ['1', '2', '3', '4', '5'] conn._get_url = MagicMock() conn._get_url.side_effect = ['url1', 'url2'] boto3 = MagicMock() boto3.client('s3').return_value = MagicMock() conn.get_option.return_value = 1 ssm_action = 'get' get_command = MagicMock() put_command = MagicMock() conn.exec_command = MagicMock() conn.exec_command.return_value = (put_command, None, False) conn.download_fileobj = MagicMock() (returncode, stdout, stderr) = conn.exec_command(put_command, in_data=None, sudoable=False) returncode = 0 (returncode, stdout, stderr) = conn.exec_command(get_command, in_data=None, sudoable=False)
def __init__(self, play_context, new_stdin, *args, **kwargs): super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs) self._ssh_shell = None self._matched_prompt = None self._matched_pattern = None self._last_response = None self._history = list() self._local = connection_loader.get('local', play_context, '/dev/null') self._local.set_options() self._terminal = None self._cliconf = None if self._play_context.verbosity > 3: logging.getLogger('paramiko').setLevel(logging.DEBUG) # reconstruct the socket_path and set instance values accordingly self._update_connection_state()
def test_libssh_exec_command(self, mocked_super): pc = PlayContext() conn = connection_loader.get("ansible.netcommon.libssh", pc, "/dev/null") with self.assertRaises(AnsibleError): conn.exec_command(cmd="ls", in_data=True) mock_chan = MagicMock() mock_chan.request_shell = MagicMock() mock_chan.exec_command = MagicMock() mock_chan.exec_command.return_value = MagicMock(returncode=0, stdout="echo hello", stderr="") attr = {"new_channel.return_value": mock_chan} mock_ssh = MagicMock(**attr) conn.ssh = mock_ssh rc, out, err = conn.exec_command(cmd="echo hello") self.assertEqual((rc, out, err), (0, "echo hello", ""))
def ssh_type_conn(self): self._ssh_type = self.get_option("ssh_type") if self._ssh_type_conn is None: if self._ssh_type not in ["paramiko", "libssh"]: raise AnsibleConnectionFailure( "Invalid value '%s' set for ssh_type option." " Expected value is either 'libssh' or 'paramiko'" % self._ssh_type ) # TODO: Remove this check if/when libssh connection plugin is moved to ansible-base if self._ssh_type == "libssh": self._ssh_type = "ansible.netcommon.libssh" self._ssh_type_conn = connection_loader.get( self._ssh_type, self._play_context, "/dev/null" ) self.queue_message( "vvvv", "ssh type is set to %s" % self.get_option("ssh_type") ) return self._ssh_type_conn
def test_kinit_with_missing_executable_pexpect(self, monkeypatch): pexpect = pytest.importorskip("pexpect") expected_err = "The command was not found or was not " \ "executable: /fake/kinit" mock_pexpect = \ MagicMock(side_effect=pexpect.ExceptionPexpect(expected_err)) monkeypatch.setattr("pexpect.spawn", mock_pexpect) winrm.HAS_PEXPECT = True pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('winrm', pc, new_stdin) options = {"_extras": {}, "ansible_winrm_kinit_cmd": "/fake/kinit"} conn.set_options(var_options=options) with pytest.raises(AnsibleConnectionFailure) as err: conn._kerb_auth("user@domain", "pass") assert str(err.value) == "Kerberos auth failure when calling " \ "kinit cmd '/fake/kinit': %s" % expected_err
def run_ssh_exec(command=None, hostname=None, username=None, keyfile=None): '''Use ansible's connection plugin to execute the command''' with mock.patch('ansible.plugins.connection.ssh.display', MockLogger): pc = MockPlayContext() if hostname: pc.remote_addr = hostname if username: pc.remote_user = username if keyfile: pc.private_key_file = keyfile ssh = connection_loader.get('ssh', pc, None) (rc, so, se) = ssh.exec_command(command) return ( rc, so.decode('utf-8'), se.decode('utf-8') )
def test_network_cli_send(self, mocked_connect, mocked_terminal_re): pc = PlayContext() pc.network_os = "ios" pc.remote_addr = "localhost" conn = connection_loader.get( "ansible.netcommon.network_cli", pc, "/dev/null" ) mock__terminal = MagicMock() mocked_terminal_re.side_effect = [ [re.compile(b"^ERROR")], [re.compile(b"device#")], ] conn._terminal = mock__terminal mock__shell = MagicMock() conn._ssh_shell = mock__shell response = b"""device#command command response device# """ mock__shell.recv.side_effect = [response, None] conn.send(b"command") mock__shell.sendall.assert_called_with(b"command\r") self.assertEqual(to_text(conn._command_response), "command response") mock__shell.reset_mock() mock__shell.recv.side_effect = [b"ERROR: error message device#"] mocked_terminal_re.side_effect = [ [re.compile(b"^ERROR")], [re.compile(b"device#")], ] with self.assertRaises(AnsibleConnectionFailure) as exc: conn.send(b"command") self.assertEqual(str(exc.exception), "ERROR: error message device#")
def test_network_cli__connect_libssh( self, mocked_super, mocked_terminal_loader ): pc = PlayContext() pc.network_os = "ios" conn = connection_loader.get("network_cli", pc, "/dev/null") conn.ssh = MagicMock() conn.receive = MagicMock() conn._terminal = MagicMock() conn.set_options(direct={"ssh_type": "libssh"}) conn._connect() self.assertTrue(conn._terminal.on_open_shell.called) self.assertFalse(conn._terminal.on_become.called) conn._play_context.become = True conn._play_context.become_method = "enable" conn._play_context.become_pass = "******" conn._connected = False conn._connect() conn._terminal.on_become.assert_called_with(passwd="password")
def mock_run_env(request, mocker): pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('ssh', pc, new_stdin) conn._send_initial_data = MagicMock() conn._examine_output = MagicMock() conn._terminate_process = MagicMock() conn._load_name = 'ssh' conn.sshpass_pipe = [MagicMock(), MagicMock()] request.cls.pc = pc request.cls.conn = conn mock_popen_res = MagicMock() mock_popen_res.poll = MagicMock() mock_popen_res.wait = MagicMock() mock_popen_res.stdin = MagicMock() mock_popen_res.stdin.fileno.return_value = 1000 mock_popen_res.stdout = MagicMock() mock_popen_res.stdout.fileno.return_value = 1001 mock_popen_res.stderr = MagicMock() mock_popen_res.stderr.fileno.return_value = 1002 mock_popen_res.returncode = 0 request.cls.mock_popen_res = mock_popen_res mock_popen = mocker.patch('subprocess.Popen', return_value=mock_popen_res) request.cls.mock_popen = mock_popen request.cls.mock_selector = MockSelector() mocker.patch('ansible.compat.selectors.DefaultSelector', lambda: request.cls.mock_selector) request.cls.mock_openpty = mocker.patch('pty.openpty') mocker.patch('fcntl.fcntl') mocker.patch('os.write') mocker.patch('os.close')
def __init__(self, play_context, new_stdin, *args, **kwargs): super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs) self._ssh_shell = None self._matched_prompt = None self._matched_pattern = None self._last_response = None self._history = list() self._play_context = play_context self._local = connection_loader.get('local', play_context, '/dev/null') self._local.set_options() self._terminal = None self._cliconf = None self._ansible_playbook_pid = kwargs.get('ansible_playbook_pid') if self._play_context.verbosity > 3: logging.getLogger('paramiko').setLevel(logging.DEBUG) # reconstruct the socket_path and set instance values accordingly self._update_connection_state()
def test_plugins_connection_ssh__examine_output(self): pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('ssh', pc, new_stdin) conn.check_password_prompt = MagicMock() conn.check_become_success = MagicMock() conn.check_incorrect_password = MagicMock() conn.check_missing_password = MagicMock() def _check_password_prompt(line): if b'foo' in line: return True return False def _check_become_success(line): if b'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' in line: return True return False def _check_incorrect_password(line): if b'incorrect password' in line: return True return False def _check_missing_password(line): if b'bad password' in line: return True return False conn.check_password_prompt.side_effect = _check_password_prompt conn.check_become_success.side_effect = _check_become_success conn.check_incorrect_password.side_effect = _check_incorrect_password conn.check_missing_password.side_effect = _check_missing_password # test examining output for prompt conn._flags = dict( become_prompt=False, become_success=False, become_error=False, become_nopasswd_error=False, ) pc.prompt = True output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nfoo\nline 3\nthis should be the remainder', False) self.assertEqual(output, b'line 1\nline 2\nline 3\n') self.assertEqual(unprocessed, b'this should be the remainder') self.assertTrue(conn._flags['become_prompt']) self.assertFalse(conn._flags['become_success']) self.assertFalse(conn._flags['become_error']) self.assertFalse(conn._flags['become_nopasswd_error']) # test examining output for become prompt conn._flags = dict( become_prompt=False, become_success=False, become_error=False, become_nopasswd_error=False, ) pc.prompt = False pc.success_key = u'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nBECOME-SUCCESS-abcdefghijklmnopqrstuvxyz\nline 3\n', False) self.assertEqual(output, b'line 1\nline 2\nline 3\n') self.assertEqual(unprocessed, b'') self.assertFalse(conn._flags['become_prompt']) self.assertTrue(conn._flags['become_success']) self.assertFalse(conn._flags['become_error']) self.assertFalse(conn._flags['become_nopasswd_error']) # test examining output for become failure conn._flags = dict( become_prompt=False, become_success=False, become_error=False, become_nopasswd_error=False, ) pc.prompt = False pc.success_key = None output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nincorrect password\n', True) self.assertEqual(output, b'line 1\nline 2\nincorrect password\n') self.assertEqual(unprocessed, b'') self.assertFalse(conn._flags['become_prompt']) self.assertFalse(conn._flags['become_success']) self.assertTrue(conn._flags['become_error']) self.assertFalse(conn._flags['become_nopasswd_error']) # test examining output for missing password conn._flags = dict( become_prompt=False, become_success=False, become_error=False, become_nopasswd_error=False, ) pc.prompt = False pc.success_key = None output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nbad password\n', True) self.assertEqual(output, b'line 1\nbad password\n') self.assertEqual(unprocessed, b'') self.assertFalse(conn._flags['become_prompt']) self.assertFalse(conn._flags['become_success']) self.assertFalse(conn._flags['become_error']) self.assertTrue(conn._flags['become_nopasswd_error'])
def run(self, tmp=None, task_vars=None): ''' generates params and passes them on to the rsync module ''' # When modifying this function be aware of the tricky convolutions # your thoughts have to go through: # # In normal ansible, we connect from controller to inventory_hostname # (playbook's hosts: field) or controller to delegate_to host and run # a module on one of those hosts. # # So things that are directly related to the core of ansible are in # terms of that sort of connection that always originate on the # controller. # # In synchronize we use ansible to connect to either the controller or # to the delegate_to host and then run rsync which makes its own # connection from controller to inventory_hostname or delegate_to to # inventory_hostname. # # That means synchronize needs to have some knowledge of the # controller to inventory_host/delegate host that ansible typically # establishes and use those to construct a command line for rsync to # connect from the inventory_host to the controller/delegate. The # challenge for coders is remembering which leg of the trip is # associated with the conditions that you're checking at any one time. if task_vars is None: task_vars = dict() # We make a copy of the args here because we may fail and be asked to # retry. If that happens we don't want to pass the munged args through # to our next invocation. Munged args are single use only. _tmp_args = self._task.args.copy() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect # Store remote connection type self._remote_transport = self._connection.transport # Handle docker connection options if self._remote_transport == 'docker': self._docker_cmd = self._connection.docker_cmd if self._play_context.docker_extra_args: self._docker_cmd = "%s %s" % (self._docker_cmd, self._play_context.docker_extra_args) # self._connection accounts for delegate_to so # remote_transport is the transport ansible thought it would need # between the controller and the delegate_to host or the controller # and the remote_host if delegate_to isn't set. remote_transport = False if self._connection.transport != 'local': remote_transport = True try: delegate_to = self._task.delegate_to except (AttributeError, KeyError): delegate_to = None # ssh paramiko docker buildah and local are fully supported transports. Anything # else only works with delegate_to if delegate_to is None and self._connection.transport not in \ ('ssh', 'paramiko', 'local', 'docker', 'buildah'): result['failed'] = True result['msg'] = ( "synchronize uses rsync to function. rsync needs to connect to the remote " "host via ssh, docker client or a direct filesystem " "copy. This remote host is being accessed via %s instead " "so it cannot work." % self._connection.transport) return result use_ssh_args = _tmp_args.pop('use_ssh_args', None) # Parameter name needed by the ansible module _tmp_args['_local_rsync_path'] = task_vars.get('ansible_rsync_path') or 'rsync' # rsync thinks that one end of the connection is localhost and the # other is the host we're running the task for (Note: We use # ansible's delegate_to mechanism to determine which host rsync is # running on so localhost could be a non-controller machine if # delegate_to is used) src_host = '127.0.0.1' inventory_hostname = task_vars.get('inventory_hostname') dest_host_inventory_vars = task_vars['hostvars'].get(inventory_hostname) try: dest_host = dest_host_inventory_vars['ansible_host'] except KeyError: dest_host = dest_host_inventory_vars.get('ansible_ssh_host', inventory_hostname) dest_host_ids = [hostid for hostid in (dest_host_inventory_vars.get('inventory_hostname'), dest_host_inventory_vars.get('ansible_host'), dest_host_inventory_vars.get('ansible_ssh_host')) if hostid is not None] localhost_ports = set() for host in C.LOCALHOST: localhost_vars = task_vars['hostvars'].get(host, {}) for port_var in C.MAGIC_VARIABLE_MAPPING['port']: port = localhost_vars.get(port_var, None) if port: break else: port = C.DEFAULT_REMOTE_PORT localhost_ports.add(port) # dest_is_local tells us if the host rsync runs on is the same as the # host rsync puts the files on. This is about *rsync's connection*, # not about the ansible connection to run the module. dest_is_local = False if delegate_to is None and remote_transport is False: dest_is_local = True elif delegate_to is not None and delegate_to in dest_host_ids: dest_is_local = True # CHECK FOR NON-DEFAULT SSH PORT inv_port = task_vars.get('ansible_ssh_port', None) or C.DEFAULT_REMOTE_PORT if _tmp_args.get('dest_port', None) is None: if inv_port is not None: _tmp_args['dest_port'] = inv_port # Set use_delegate if we are going to run rsync on a delegated host # instead of localhost use_delegate = False if delegate_to is not None and delegate_to in dest_host_ids: # edge case: explicit delegate and dest_host are the same # so we run rsync on the remote machine targeting its localhost # (itself) dest_host = '127.0.0.1' use_delegate = True elif delegate_to is not None and remote_transport: # If we're delegating to a remote host then we need to use the # delegate_to settings use_delegate = True # Delegate to localhost as the source of the rsync unless we've been # told (via delegate_to) that a different host is the source of the # rsync if not use_delegate and remote_transport: # Create a connection to localhost to run rsync on new_stdin = self._connection._new_stdin # Unike port, there can be only one shell localhost_shell = None for host in C.LOCALHOST: localhost_vars = task_vars['hostvars'].get(host, {}) for shell_var in C.MAGIC_VARIABLE_MAPPING['shell']: localhost_shell = localhost_vars.get(shell_var, None) if localhost_shell: break if localhost_shell: break else: localhost_shell = os.path.basename(C.DEFAULT_EXECUTABLE) self._play_context.shell = localhost_shell # Unike port, there can be only one executable localhost_executable = None for host in C.LOCALHOST: localhost_vars = task_vars['hostvars'].get(host, {}) for executable_var in C.MAGIC_VARIABLE_MAPPING['executable']: localhost_executable = localhost_vars.get(executable_var, None) if localhost_executable: break if localhost_executable: break else: localhost_executable = C.DEFAULT_EXECUTABLE self._play_context.executable = localhost_executable new_connection = connection_loader.get('local', self._play_context, new_stdin) self._connection = new_connection self._override_module_replaced_vars(task_vars) # SWITCH SRC AND DEST HOST PER MODE if _tmp_args.get('mode', 'push') == 'pull': (dest_host, src_host) = (src_host, dest_host) # MUNGE SRC AND DEST PER REMOTE_HOST INFO src = _tmp_args.get('src', None) dest = _tmp_args.get('dest', None) if src is None or dest is None: return dict(failed=True, msg="synchronize requires both src and dest parameters are set") # Determine if we need a user@ user = None if not dest_is_local: # Src and dest rsync "path" handling if boolean(_tmp_args.get('set_remote_user', 'yes'), strict=False): if use_delegate: user = task_vars.get('ansible_delegated_vars', dict()).get('ansible_ssh_user', None) if not user: user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user if not user: user = C.DEFAULT_REMOTE_USER else: user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user # Private key handling private_key = self._play_context.private_key_file if private_key is not None: _tmp_args['private_key'] = private_key # use the mode to define src and dest's url if _tmp_args.get('mode', 'push') == 'pull': # src is a remote path: <user>@<host>, dest is a local path src = self._process_remote(_tmp_args, src_host, src, user, inv_port in localhost_ports) dest = self._process_origin(dest_host, dest, user) else: # src is a local path, dest is a remote path: <user>@<host> src = self._process_origin(src_host, src, user) dest = self._process_remote(_tmp_args, dest_host, dest, user, inv_port in localhost_ports) else: # Still need to munge paths (to account for roles) even if we aren't # copying files between hosts if not src.startswith('/'): src = self._get_absolute_path(path=src) if not dest.startswith('/'): dest = self._get_absolute_path(path=dest) _tmp_args['src'] = src _tmp_args['dest'] = dest # Allow custom rsync path argument rsync_path = _tmp_args.get('rsync_path', None) # backup original become as we are probably about to unset it become = self._play_context.become if not dest_is_local: # don't escalate for docker. doing --rsync-path with docker exec fails # and we can switch directly to the user via docker arguments if self._play_context.become and not rsync_path and self._remote_transport != 'docker': # If no rsync_path is set, become was originally set, and dest is # remote then add privilege escalation here. if self._play_context.become_method == 'sudo': rsync_path = 'sudo rsync' # TODO: have to add in the rest of the become methods here # We cannot use privilege escalation on the machine running the # module. Instead we run it on the machine rsync is connecting # to. self._play_context.become = False _tmp_args['rsync_path'] = rsync_path if use_ssh_args: ssh_args = [ getattr(self._play_context, 'ssh_args', ''), getattr(self._play_context, 'ssh_common_args', ''), getattr(self._play_context, 'ssh_extra_args', ''), ] _tmp_args['ssh_args'] = ' '.join([a for a in ssh_args if a]) # If launching synchronize against docker container # use rsync_opts to support container to override rsh options if self._remote_transport in ['docker', 'buildah']: # Replicate what we do in the module argumentspec handling for lists if not isinstance(_tmp_args.get('rsync_opts'), MutableSequence): tmp_rsync_opts = _tmp_args.get('rsync_opts', []) if isinstance(tmp_rsync_opts, string_types): tmp_rsync_opts = tmp_rsync_opts.split(',') elif isinstance(tmp_rsync_opts, (int, float)): tmp_rsync_opts = [to_text(tmp_rsync_opts)] _tmp_args['rsync_opts'] = tmp_rsync_opts if '--blocking-io' not in _tmp_args['rsync_opts']: _tmp_args['rsync_opts'].append('--blocking-io') if self._remote_transport in ['docker']: if become and self._play_context.become_user: _tmp_args['rsync_opts'].append("--rsh=%s exec -u %s -i" % (self._docker_cmd, self._play_context.become_user)) elif user is not None: _tmp_args['rsync_opts'].append("--rsh=%s exec -u %s -i" % (self._docker_cmd, user)) else: _tmp_args['rsync_opts'].append("--rsh=%s exec -i" % self._docker_cmd) elif self._remote_transport in ['buildah']: _tmp_args['rsync_opts'].append("--rsh=buildah run --") # run the module and store the result result.update(self._execute_module('synchronize', module_args=_tmp_args, task_vars=task_vars)) if 'SyntaxError' in result.get('exception', result.get('msg', '')): # Emit a warning about using python3 because synchronize is # somewhat unique in running on localhost result['exception'] = result['msg'] result['msg'] = ('SyntaxError parsing module. Perhaps invoking "python" on your local (or delegate_to) machine invokes python3. ' 'You can set ansible_python_interpreter for localhost (or the delegate_to machine) to the location of python2 to fix this') return result
def test_plugins_connection_ssh__build_command(self): pc = PlayContext() new_stdin = StringIO() conn = connection_loader.get('ssh', pc, new_stdin) conn._build_command('ssh')