class ThreadedTcp(Tcp): """ TCP connection feeding Moler's connection inside dedicated thread. This is external-IO usable for Moler since it has it's own runner (thread) that can work in background and pull data from TCP connection. """ def __init__(self, moler_connection, port, host="localhost", receive_buffer_size=64 * 4096, logger=None): """Initialization of TCP-threaded connection.""" super(ThreadedTcp, self).__init__(port=port, host=host, receive_buffer_size=receive_buffer_size, logger=logger) self.pulling_thread = None # make Moler happy (3 requirements) :-) self.moler_connection = moler_connection # (1) self.moler_connection.how2send = self.send # (2) def open(self): """Open TCP connection & start thread pulling data from it.""" ret = super(ThreadedTcp, self).open() done = threading.Event() self.pulling_thread = TillDoneThread(target=self.pull_data, done_event=done, kwargs={'pulling_done': done}) self.pulling_thread.start() return ret def close(self): """Close TCP connection & stop pulling thread.""" if self.pulling_thread: self.pulling_thread.join() self.pulling_thread = None super(ThreadedTcp, self).close() def pull_data(self, pulling_done): """Pull data from TCP connection.""" while not pulling_done.is_set(): try: data = self.receive(timeout=0.1) if data: # make Moler happy :-) self.moler_connection.data_received( data, datetime.datetime.now()) # (3) except ConnectionTimeout: continue except RemoteEndpointNotConnected: break except RemoteEndpointDisconnected: break if self.socket is not None: self._close_ignoring_exceptions()
class ThreadedSshShell(IOConnection): """ SshShell connection feeding Moler's connection inside dedicated thread. This is external-IO usable for Moler since it has it's own runner (thread) that can work in background and pull data from SshShell connection. """ def __init__(self, moler_connection, host, port=22, username=None, login=None, password=None, receive_buffer_size=64 * 4096, name=None, logger_name="", existing_client=None): """ Initialization of SshShell-threaded connection. :param moler_connection: moler-dispatching-connection to use for data forwarding :param host: host of ssh server where we want to connect :param port: port of ssh server :param username: username for password based login :param login: alternate naming for username param (as it is used by OpenSSH) for parity with Ssh command :param password: password for password based login :param receive_buffer_size: :param name: name assigned to connection :param logger_name: take that logger from logging :param existing_client: (internal use) for reusing ssh transport of existing sshshell Logger is retrieved by logging.getLogger(logger_name) If logger_name == "" - take default logger "<moler-connection-logger>.io" If logger_name is None - don't use logging """ if name: moler_connection.name = name super(ThreadedSshShell, self).__init__(moler_connection=moler_connection) self.logger = self._select_logger(logger_name, self.name, moler_connection) self.sshshell = SshShell(host=host, port=port, username=username, login=login, password=password, receive_buffer_size=receive_buffer_size, logger=self.logger, existing_client=existing_client) self.pulling_thread = None self.pulling_timeout = 0.1 self._pulling_done = threading.Event() @classmethod def from_sshshell(cls, moler_connection, sshshell, name=None, logger_name=""): """ Build new sshshell based on existing one - it will reuse its transport No need to provide host, port and login credentials - they will be reused. You should use this constructor if you are connecting towards same host/port using same credentials. :param moler_connection: moler-connection may not be reused; we need fresh one :param sshshell: existing connection to reuse it's ssh transport :param logger: new logger for new connection :return: instance of new sshshell connection with reused ssh transport """ if isinstance(sshshell, ThreadedSshShell): sshshell = sshshell.sshshell assert isinstance(sshshell, SshShell) assert issubclass(cls, ThreadedSshShell) new_sshshell = cls(moler_connection=moler_connection, host=sshshell.host, port=sshshell.port, username=sshshell.username, password=sshshell.password, receive_buffer_size=sshshell.receive_buffer_size, name=name, logger_name=logger_name, existing_client=sshshell.ssh_client) return new_sshshell @property def name(self): """Get name of connection""" return self.moler_connection.name @name.setter def name(self, value): """ Set name of connection Io and embedded Moler's connection compose "one logical connection". If connection is using default logger ("moler.connection.<name>.io") then modify logger after connection name change. """ was_using_default_logger = (self.logger is not None) and ( self.logger.name == self._default_logger_name(self.name)) self.moler_connection.name = value if was_using_default_logger: self.logger = logging.getLogger( self._default_logger_name(self.name)) self.sshshell.logger = self.logger @staticmethod def _select_logger(logger_name, connection_name, moler_connection): if logger_name is None: return None # don't use logging default_logger_name = ThreadedSshShell._default_logger_name( connection_name) if logger_name: name = logger_name else: # take it from moler_connection.logger and extend by ".io" if moler_connection.logger is None: name = default_logger_name else: name = "{}.io".format(moler_connection.logger.name) logger = logging.getLogger(name) if name and (name != default_logger_name): msg = "using '{}' logger - not default '{}'".format( name, default_logger_name) logger.log(level=logging.WARNING, msg=msg) return logger @staticmethod def _default_logger_name(connection_name): return "moler.connection.{}.io".format(connection_name) @property def _ssh_transport(self): return self.sshshell._ssh_transport @property def _shell_channel(self): return self.sshshell._shell_channel def __str__(self): address = self.sshshell.__str__() return address def open(self): """ Open Ssh channel to remote shell & start thread pulling data from it. If SshShell was created with "reused ssh transport" then no new transport is created - just shell channel. (such connection establishment is quicker) Else - before creating channel we create ssh transport and perform full login with provided credentials. May be used as context manager: with connection.open(): """ was_closed = self._shell_channel is None self.sshshell.open() is_open = self._shell_channel is not None if was_closed and is_open: self._notify_on_connect() if self.pulling_thread is None: # set reading timeout in same thread where we open shell and before starting pulling thread self.sshshell._settimeout(timeout=self.pulling_timeout) self._pulling_done.clear() self.pulling_thread = TillDoneThread( target=self._pull_data, done_event=self._pulling_done, kwargs={'pulling_done': self._pulling_done}) self.pulling_thread.start() return contextlib.closing(self) def close(self): """ Close SshShell connection. Close channel of that connection & stop pulling thread. If SshShell was created with "reused ssh transport" then closing will close only ssh channel of remote shell. Ssh transport will be closed after it's last channel is closed. """ self._pulling_done.set() if self.pulling_thread: self.pulling_thread.join( ) # _pull_data will do self.sshshell.close() self.pulling_thread = None def send(self, data, timeout=1): """ Send data via SshShell connection. :param data: data :type data: bytes :param timeout: max time to spend on sending all data, default 1 sec :type timeout: float """ self.sshshell.send(data=data, timeout=timeout) def receive(self): """ Pull data bytes from external-IO: data = io_connection.receive() data is intended to forward into Moler's connection: self.moler_connection.data_received(data) """ data = self.sshshell._recv() return data def _pull_data(self, pulling_done): """Pull data from SshShell connection.""" already_notified = False while not pulling_done.is_set(): try: data = self.receive() if data: self.data_received(data, datetime.datetime.now()) # (3) except ConnectionTimeout: continue except RemoteEndpointNotConnected: break except RemoteEndpointDisconnected: self._notify_on_disconnect() already_notified = True break except Exception as err: err_msg = "Unexpected {!r} during pulling for data in {}".format( err, self) if self.sshshell.logger: self.sshshell.logger.exception(err_msg) else: print("ERROR: {}".format(err_msg)) break was_open = self._shell_channel is not None self.sshshell.close() is_closed = self._shell_channel is None if was_open and is_closed and (not already_notified): self._notify_on_disconnect()
class ThreadedTerminal(IOConnection): """ Works on Unix (like Linux) systems only! ThreadedTerminal is shell working under Pty """ def __init__(self, moler_connection, cmd="/bin/bash", select_timeout=0.002, read_buffer_size=4096, first_prompt=r'[%$#]+', target_prompt=r'moler_bash#', set_prompt_cmd='export PS1="moler_bash# "\n', dimensions=(100, 300)): """ :param moler_connection: Moler's connection to join with :param cmd: command to run terminal :param select_timeout: timeout for reading data from terminal :param read_buffer_size: buffer for reading data from terminal :param first_prompt: default terminal prompt on host where Moler is starting :param target_prompt: new prompt which will be set on terminal :param set_prompt_cmd: command to change prompt with new line char on the end of string :param dimensions: dimensions of the psuedoterminal """ super(ThreadedTerminal, self).__init__(moler_connection=moler_connection) self._terminal = None self._shell_operable = Event() self._export_sent = False self.pulling_thread = None self.read_buffer = "" self._select_timeout = select_timeout self._read_buffer_size = read_buffer_size self.dimensions = dimensions self.first_prompt = first_prompt self.target_prompt = target_prompt self._cmd = [cmd] self.set_prompt_cmd = set_prompt_cmd self._re_set_prompt_cmd = re.sub("['\"].*['\"]", "", self.set_prompt_cmd.strip()) def open(self): """Open ThreadedTerminal connection & start thread pulling data from it.""" ret = super(ThreadedTerminal, self).open() if not self._terminal: self._terminal = PtyProcessUnicode.spawn( self._cmd, dimensions=self.dimensions) # need to not replace not unicode data instead of raise exception self._terminal.decoder = codecs.getincrementaldecoder('utf-8')( errors='replace') done = Event() self.pulling_thread = TillDoneThread(target=self.pull_data, done_event=done, kwargs={'pulling_done': done}) self.pulling_thread.start() retry = 0 is_operable = False while (retry < 10) and (not is_operable): is_operable = self._shell_operable.wait(timeout=1) if not is_operable: self.logger.warning( "Terminal open but not fully operable yet.\nREAD_BUFFER: '{}'" .format(self.read_buffer.encode("UTF-8", "replace"))) self._terminal.write('\n') retry += 1 return ret def close(self): """Close ThreadedTerminal connection & stop pulling thread.""" if self.pulling_thread: self.pulling_thread.join() self.pulling_thread = None self.moler_connection.shutdown() super(ThreadedTerminal, self).close() if self._terminal and self._terminal.isalive(): self._terminal.close(force=True) self._terminal = None self._notify_on_disconnect() def send(self, data): """Write data into ThreadedTerminal connection.""" if self._terminal: self._terminal.write(data) def pull_data(self, pulling_done): """Pull data from ThreadedTerminal connection.""" reads = [] while not pulling_done.is_set(): try: reads, _, _ = select.select([self._terminal.fd], [], [], self._select_timeout) except ValueError as exc: self.logger.warning("'{}: {}'".format(exc.__class__, exc)) self._notify_on_disconnect() pulling_done.set() if self._terminal.fd in reads: try: data = self._terminal.read(self._read_buffer_size) if self._shell_operable.is_set(): self.data_received(data) else: self._verify_shell_is_operable(data) except EOFError: self._notify_on_disconnect() pulling_done.set() def _verify_shell_is_operable(self, data): self.read_buffer = self.read_buffer + data lines = self.read_buffer.splitlines() for line in lines: line = remove_all_known_special_chars(line) if not re.search(self._re_set_prompt_cmd, line) and re.search( self.target_prompt, line): self._notify_on_connect() self._shell_operable.set() data = re.sub(self.target_prompt, '', self.read_buffer, re.MULTILINE) self.data_received(data) elif not self._export_sent and re.search( self.first_prompt, self.read_buffer, re.MULTILINE): self.send(self.set_prompt_cmd) self._export_sent = True
class ThreadedTerminal(IOConnection): """ Works on Unix (like Linux) systems only! ThreadedTerminal is shell working under Pty """ def __init__(self, moler_connection, cmd=None, select_timeout=0.002, read_buffer_size=4096, first_prompt=None, dimensions=(100, 300)): super(ThreadedTerminal, self).__init__(moler_connection=moler_connection) self._select_timeout = select_timeout self._read_buffer_size = read_buffer_size self.dimensions = dimensions self._terminal = None self.pulling_thread = None self._shell_operable = Event() if cmd is None: cmd = ['/bin/bash', '--init-file'] self._cmd = ThreadedTerminal._build_bash_command(cmd) if first_prompt: self.prompt = first_prompt else: self.prompt = r'^moler_bash#' def open(self): """Open ThreadedTerminal connection & start thread pulling data from it.""" if not self._terminal: self._terminal = PtyProcessUnicode.spawn( self._cmd, dimensions=self.dimensions) done = Event() self.pulling_thread = TillDoneThread(target=self.pull_data, done_event=done, kwargs={'pulling_done': done}) self.pulling_thread.start() self._shell_operable.wait(timeout=2) def close(self): """Close ThreadedTerminal connection & stop pulling thread.""" if self.pulling_thread: self.pulling_thread.join() self.pulling_thread = None super(ThreadedTerminal, self).close() if self._terminal and self._terminal.isalive(): self._terminal.close() self._terminal = None self._notify_on_disconnect() def send(self, data): """Write data into ThreadedTerminal connection.""" self._terminal.write(data) def pull_data(self, pulling_done): """Pull data from ThreadedTerminal connection.""" read_buffer = "" while not pulling_done.is_set(): reads, _, _ = select.select([self._terminal.fd], [], [], self._select_timeout) if self._terminal.fd in reads: try: data = self._terminal.read(self._read_buffer_size) if self._shell_operable.is_set(): self.data_received(data) else: read_buffer = read_buffer + data if re.search(self.prompt, read_buffer, re.MULTILINE): self._notify_on_connect() self._shell_operable.set() data = re.sub(self.prompt, '', read_buffer, re.MULTILINE) self.data_received(data) except EOFError: self._notify_on_disconnect() pulling_done.set() @staticmethod def _build_bash_command(bash_cmd): abs_path = os.path.dirname(__file__) init_file_path = [ os.path.join(abs_path, "..", "..", "config", "bash_config") ] return bash_cmd + init_file_path
class ThreadedFifoBuffer(FifoBuffer): """ FIFO-in-memory connection inside dedicated thread. This is external-IO usable for Moler since it has it's own runner (thread) that can work in background and pull data from FIFO-mem connection. Usable for integration tests. """ def __init__(self, moler_connection, echo=True, name=None, logger_name=""): """Initialization of FIFO-mem-threaded connection.""" super(ThreadedFifoBuffer, self).__init__(moler_connection=moler_connection, echo=echo, name=name, logger_name=logger_name) self.pulling_thread = None self.injections = Queue() def open(self): """Start thread pulling data from FIFO buffer.""" ret = super(ThreadedFifoBuffer, self).open() done = threading.Event() self.pulling_thread = TillDoneThread(target=self.pull_data, done_event=done, kwargs={'pulling_done': done}) self.pulling_thread.start() self._log(msg="open {}".format(self), level=logging.INFO) self._notify_on_connect() return ret def close(self): """Stop pulling thread.""" if self.pulling_thread: self.pulling_thread.join() self.pulling_thread = None super(ThreadedFifoBuffer, self).close() self._log(msg="closed {}".format(self), level=logging.INFO) self._notify_on_disconnect() def inject(self, input_bytes, delay=0.0): """ Add bytes to end of buffer :param input_bytes: iterable of bytes to inject :param delay: delay before each inject :return: None """ for data in input_bytes: self.injections.put((data, delay)) if not delay: time.sleep(0.05) # give subsequent read() a chance to get data def _inject_deferred(self): if self.deferred_injections: for data, delay in self.deferred_injections: self.injections.put((data, delay)) self.deferred_injections = [] time.sleep(0.05) # give subsequent read() a chance to get data def pull_data(self, pulling_done): """Pull data from FIFO buffer.""" while not pulling_done.is_set(): self.read() # internally forwards to embedded Moler connection try: data, delay = self.injections.get_nowait() if delay: time.sleep(delay) self._inject(data) self.injections.task_done() except Empty: time.sleep(0.01) # give FIFO chance to get data
class ZmqSubprocess(object): """ Connection speaking with program running in subprocess. It uses ZeroMQ sockets as proxy to minimize memory usage (caused by fork dep inside subprocess.Popen) Price payed for this is additional process, 2 threads and 4 ZMQ-sockets. """ def __init__(self, stdin_port, stdout_port, command='/bin/bash', args=None, env=None, starting_path=None): self.command = command self.args = [command] # command have to be arg0 if args: self.args.extend(args) self.env = env # if env == None spawned bash will be given with os.environ self.path = starting_path self.forward_in_sock = create_input_socket_client(port=stdin_port) self.forward_out_sock = create_output_socket_subsciber( port=stdout_port) self._done = threading.Event() self._out_thread = TillDoneThread(target=self.read_subprocess_output, done_event=self._done, name="reader", kwargs={ 'reading_done': self._done, 'forward_sock': self.forward_out_sock }) self.start() def start(self): self._out_thread.start() def stop(self): self._out_thread.join() def send(self, data): """ Send data towards subprocess. :param data: data :type data: str """ print("sending: {}".format(data)) self.forward_in_sock.send_string("{}\n".format(data)) def data_received(self, data): """Incoming-IO API: external-IO should call this method when data is received""" print("Received {}".format(data.strip())) def read_subprocess_output(self, reading_done, forward_sock): print("read_subprocess_output ... STARTED") while not reading_done.is_set(): try: topic, message = forward_sock.recv_multipart(flags=zmq.NOBLOCK) # print("ZMQ Received {} output: {}".format(topic, message)) output = message.decode("utf-8") self.data_received(output) except zmq.Again: pass # no data on nonblocking zmq socket print("read_subprocess_output ... DONE")
class Popen(object): def __init__(self, command2run, stdin_port, stdout_port): self.command2run = command2run self.forward_in_sock = create_input_socket_server(port=stdin_port) self.forward_out_sock = create_output_socket_publisher( port=stdout_port) self._done = threading.Event() self.__subproc = subprocess.Popen(command2run, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) self._out_thread = TillDoneThread( target=self.forward_subprocess_output, done_event=self._done, name="puller", kwargs={ 'pulling_done': self._done, 'sub_process': self.__subproc, 'forward_sock': self.forward_out_sock }) self._in_thread = TillDoneThread(target=self.forward_subprocess_input, done_event=self._done, name="injector", kwargs={ 'pulling_done': self._done, 'sub_process': self.__subproc, 'forward_sock': self.forward_in_sock }) self.start() def start(self): self._out_thread.start() self._in_thread.start() def stop(self): self.__subproc.stdin.write("exit\n".encode("utf-8")) self._out_thread.join() self._in_thread.join() @staticmethod def forward_subprocess_output(pulling_done, sub_process, forward_sock=None): print("forward_subprocess_output ... STARTED") while not pulling_done.is_set(): # for line in iter(sub_process.stdout.readline, b''): line = sub_process.stdout.readline() # BLOCKING !!! topic = 'process.pid:{}'.format(sub_process.pid) # print("Forwarding {} output: {}".format(topic, line.strip())) if forward_sock: forward_sock.send_multipart([topic.encode('utf-8'), line]) sub_process.stdout.close() print("forward_subprocess_output ... DONE") @staticmethod def forward_subprocess_input(pulling_done, sub_process, forward_sock): print("forward_subprocess_input ... STARTED") while not pulling_done.is_set(): if sub_process.poll() is None: # process still running try: data = forward_sock.recv(flags=zmq.NOBLOCK) print("Input forwarder received: {}".format(data)) if not data: # is it same way signaling socket-closed break # forward data to subprocess print("Forwarding {} into subprocess PID:{}".format( data, sub_process.pid)) sub_process.stdin.write(data) except zmq.Again: pass # no data on nonblocking zmq socket else: print("subprocess PID:{} is gone".format(sub_process.pid)) break print("forward_subprocess_input ... DONE")