def _read_stream(self, stream: asyncio.StreamReader) -> None: """ :param stream: asyncio.StreamReader :return: None """ while not stream.at_eof(): data = yield from stream.read(1) if data == b'\n': self.__new_element() else: self.__update_last_item_text(data)
async def _handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): while not reader.at_eof(): try: array = await reader.readline() if reader.at_eof(): break assert array[:1] == b'*' count = parse_int(array[1:-2]) items = [await read_bulk_string(reader) for _ in range(count)] command = items[0] params = items[1:] try: with timeout(REDIS_SERVER_TIMEOUT): await self._execute(command, params, writer) except Exception: logger.exception(f"Command failed: {command}") writer.write(encode_error("Command failed")) except Exception: logger.exception(f"Invalid command: {array}") writer.write(encode_error("Invalid command")) writer.close()
async def _connect_streams(reader: asyncio.StreamReader, writer: asyncio.StreamWriter, queue: "asyncio.Queue[int]", token: CancelToken) -> None: try: while not token.triggered: if reader.at_eof(): break try: size = queue.get_nowait() except asyncio.QueueEmpty: await asyncio.sleep(0) continue data = await token.cancellable_wait(reader.readexactly(size)) writer.write(data) queue.task_done() await token.cancellable_wait(writer.drain()) except OperationCancelled: pass finally: writer.write_eof() if reader.at_eof(): reader.feed_eof()
async def _capture_subprocess_output( stream: asyncio.StreamReader, ) -> List[bytes]: lines = [] while not stream.at_eof(): line = await stream.readline() if line or not stream.at_eof(): lines.append(line.rstrip(b'\n')) return lines
async def receive_message(reader: asyncio.StreamReader): """ Get the initial command message from the agent. Used only when both containers are not on a shared-kernel """ buf = bytearray() while len(buf) != 4 and not reader.at_eof(): buf += await reader.read(4 - len(buf)) length = struct.unpack('!I', bytes(buf))[0] buf = bytearray() while len(buf) != length and not reader.at_eof(): buf += await reader.read(length - len(buf)) return msgpack.unpackb(bytes(buf), use_list=False)
async def _log_subprocess_output( pid: int, stream: asyncio.StreamReader, logger: logging.Logger, level: int, log_processor: Optional[Callable[[str], Tuple[str, int]]] = None, ) -> List[bytes]: while not stream.at_eof(): line = await stream.readline() if line or not stream.at_eof(): log_line = line.rstrip(b'\n').decode() if log_processor is not None: log_line, level = log_processor(log_line) logger.log(level, log_line, extra={"process": pid}) return []
async def pipe(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): """Proxy between a reader/writer pair.""" try: while not reader.at_eof(): writer.write(await reader.read(4096)) finally: writer.close()
async def _wait_for_event( self, event_name: str, event_type: str, reader: asyncio.StreamReader, ) -> Any: matching = self._match_event( event_name, event_type, ) if matching: return matching while True: data = await reader.readline() line = data.decode('utf-8').rstrip() if not len(line) and reader.at_eof(): raise Exception( 'Reached end of output waiting for {0}/{1}'.format( event_name, event_type, )) log.info(line) event = json.loads(line) matching = self._match_event( event_name, event_type, json.loads(line), ) if matching: return matching
async def _read_stream(self, reader: asyncio.StreamReader) -> None: errors_remaining: int = 10 while not reader.at_eof(): try: data = await reader.readuntil(b'\x03') except (ConnectionError, asyncio.IncompleteReadError): break except asyncio.CancelledError: logging.exception("Klippy Stream Read Cancelled") raise except Exception: logging.exception("Klippy Stream Read Error") errors_remaining -= 1 if not errors_remaining or not self.is_connected(): break continue errors_remaining = 10 try: decoded_cmd = json.loads(data[:-1]) self._process_command(decoded_cmd) except Exception: logging.exception( f"Error processing Klippy Host Response: {data.decode()}") if not self.closing: logging.debug("Klippy Disconnection From _read_stream()") await self.close()
async def _log_stream(self, output_stream, input_stream: asyncio.StreamReader): if output_stream not in [sys.stdout, sys.stderr]: raise Exception("output_stream should be stdout or stderr") while not input_stream.at_eof(): message = await input_stream.readline() self._log_message(output_stream, message)
async def read_stream(reader: StreamReader, writer: StreamWriter, log: bool = True) -> AsyncGenerator[str, None]: """Reading line from stream.""" reconnect_counter = 0 try: while not reader.at_eof(): data = await asyncio.wait_for(reader.readline(), timeout=60) message = data.decode('utf-8').strip() if log: logger.debug(message) yield message except asyncio.TimeoutError: logger.exception(f'TimeoutError -> StreamReader timeout get message', exc_info=False) except Exception as ex: logger.exception(f'OtherError -> {ex.__class__.__name__}: {ex}', exc_info=False) finally: reconnect_counter += 1 await asyncio.sleep(60 if reconnect_counter > 60 else reconnect_counter )
async def drain_err(self, stream: asyncio.StreamReader) -> None: assert self.stage is not None while not stream.at_eof(): part = (await stream.read(8192)).decode() COMPILER_LOGGER.log(const.LOG_LEVEL_TRACE, "%s %s Err:", self.request.id, part) await self.stage.update_streams(err=part)
async def _pipe(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): try: while not reader.at_eof(): writer.write(await reader.read(self.CHUNK_SIZE)) finally: writer.close()
async def forward_remote_msg(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): try: ip, port, *_ = writer.get_extra_info('peername') message_id = get_connection_id((ip, port)) seq = 0 while not reader.at_eof(): data: bytes = await reader.read(4096) if seq == 0: tag = BEGIN else: tag = MID if tag == BEGIN or len(data) > 0: message = TCPMessage(id=message_id, tag=tag, data=data) await self.ws.send(message.to_bytes()) seq += 1 message = TCPMessage(id=message_id, tag=END, data=b'') await self.ws.send(message.to_bytes()) except asyncio.CancelledError: pass finally: writer.close() await writer.wait_closed()
async def handler(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): nonlocal data header_size = struct.calcsize('!L') while not reader.at_eof(): try: header = await reader.readexactly(header_size) except asyncio.IncompleteReadError: break payload_size = struct.unpack("!L", header)[0] try: payload = await reader.readexactly(payload_size) except asyncio.IncompleteReadError: break for metric in pickle.loads(payload): data.append(metric) if len(data) == count: event.set() writer.close() reader.feed_eof()
async def pipe( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, processor: str, delay: Delay, ) -> None: try: while not reader.at_eof(): chunk = await reader.read(self.chunk_size) if delay.timeout > 0: log.debug( "%r sleeping %.3f seconds on %s", self, delay.timeout, processor, ) await delay.wait() writer.write(await self.__processors[processor](chunk)) if not self.buffered: await writer.drain() finally: writer.close() await writer.wait_closed()
async def pipe(reader: asyncio.StreamReader, writer: asyncio.StreamWriter, **kwargs): try: while not reader.at_eof(): writer.write(await reader.read(1024)) except Exception as e: logging.error(e)
async def handler(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): while not reader.at_eof(): self.data += await reader.read(1) if self.data: self.event.set()
async def handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): addr = writer.get_extra_info('peername') log.info("Client connected %r", addr) while not reader.at_eof(): try: async with async_timeout.timeout(5): line = await reader.readline() if line: metric = line.decode() name, value, timestamp = metric.split(" ", 3) timestamp = float(timestamp) if value == 'nan': value = None else: value = float(value) if '.' in value else int(value) await self.storage.write_async((name, (timestamp, value))) except asyncio.CancelledError: log.info('Client connection closed after timeout') break except: # noqa continue log.info("Client disconnected %r", addr)
async def _handler(reader: StreamReader, writer: StreamWriter, req_info: RequestInfo) -> None: length_body = 0 # создаем очереди для записи в потоке file_stream = FileStream() WEAK_FILE_STREAM.add(file_stream) # создаем поток create_task(file_writer(stream=file_stream)) while not reader.at_eof(): chunk = await reader.read(128**2) length_body += len(chunk) if chunk == b'': break await file_stream.put_in_queue_chunk(chunk) if length_body == int(req_info.headers['Content-Length']): break await file_stream.put_in_queue_chunk(End()) # Ждем обработки файла file_info = await file_stream.get_out_queue_chunk() if isinstance(file_info, Error): raise ServerError('error: write file') if isinstance(file_info, dict): # TODO где-нибудь сохранить hash_file и path logger.info(file_info) writer.write( f'HTTP/1.1 200 OK\r\nContent-Type: application/json; charset=utf-8\r\nConnection: close\r\n\r\n{json.dumps(file_info)}\r\n\r\n' .encode('utf-8'))
async def _handshake( initiator: "HandshakeInitiator", reader: asyncio.StreamReader, writer: asyncio.StreamWriter, token: CancelToken, ) -> Tuple[bytes, bytes, keccak_256, keccak_256]: """See the handshake() function above. This code was factored out into this helper so that we can create Peers with directly connected readers/writers for our tests. """ initiator_nonce = keccak(os.urandom(HASH_LEN)) auth_msg = initiator.create_auth_message(initiator_nonce) auth_init = initiator.encrypt_auth_message(auth_msg) writer.write(auth_init) auth_ack = await token.cancellable_wait( reader.read(ENCRYPTED_AUTH_ACK_LEN), timeout=REPLY_TIMEOUT) if reader.at_eof(): # This is what happens when Parity nodes have blacklisted us # (https://github.com/ethereum/py-evm/issues/901). raise HandshakeFailure("%s disconnected before sending auth ack", repr(initiator.remote)) ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message( auth_ack) aes_secret, mac_secret, egress_mac, ingress_mac = initiator.derive_secrets( initiator_nonce, responder_nonce, ephemeral_pubkey, auth_init, auth_ack) return aes_secret, mac_secret, egress_mac, ingress_mac
async def json_lines_with_timeout(reader: asyncio.StreamReader, timeout: 'seconds' = DEFAULT_TIMEOUT): while not reader.at_eof(): line = await asyncio.wait_for(reader.readline(), timeout) try: yield json.loads(line) except json.JSONDecodeError as e: pass
async def handle_conn(reader: StreamReader, writer: StreamWriter): # print("get connList") # print("GFD: " + str(len(connList)) + " members: " + str(connList)) while True: try: while not reader.at_eof(): data = await reader.readuntil(b"\r\n") # print(data.decode()) # print(data.decode()) name, status = data.decode().strip("\r\n").split("\n") print(name + " " + status) # for i in connList: # members += i + ", " # print("GFD: " + str(len(connList)) + " members: " + str(connList)) # lock.acquire() # print("before: ") # print(connList) if int(status) == 0: if name in connList: connList.remove(name) # print("GFD: " + str(len(connList)) + " members: " + str(connList)) else: if name not in connList: connList.append(name) # print("GFD: " + str(len(connList)) + " members: " + str(connList)) # print(connList) print("GFD: " + str(len(connList)) + " members: " + str(connList)) # lock.release() except (KeyboardInterrupt): return except: # print("get except") pass
async def handle(self, reader: StreamReader, writer: StreamWriter) -> None: data = b'' while True: try: if reader.at_eof(): break data += await reader.read(1024) if not data or reader.at_eof() or data[-4:] == b'\r\n\r\n': break except ConnectionError: break if len(data) > 0: request = data.decode('utf-8').strip('\r\n') response, fileGenerator = self.executor.execute(request) data = ResponseSerializer.serialize(response=response) writer.write(data) await writer.drain() if fileGenerator != None: while True: try: chunk = next(fileGenerator) writer.write(chunk) await writer.drain() if not chunk: raise StopIteration except ConnectionResetError: info('ConnectionResetError') writer.close() break except StopIteration: writer.close() break writer.close()
async def on_connected(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): logger.info('Connected from %s', self.remote_addr) self.begin_control() while not reader.at_eof(): line = await reader.readline() line = line.decode().strip() if not line: if not reader.at_eof(): logger.warning('Empty command received') continue self._process_command(line) logger.info('Disonnected') writer.write_eof() self.end_control() self.connection_task = None
async def _read_all(stream: StreamReader): output: bytes = b"" while not stream.at_eof(): try: output += await stream.read() return output except asyncio.LimitOverrunError as err: output += await stream.readexactly(err.consumed) return output
async def _pipe(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): try: while not reader.at_eof(): bytes_read = await reader.read(TcpProxy.MAX_BYTES) writer.write(bytes_read) await writer.drain() finally: writer.close()
async def read_lines_until(reader: StreamReader, timeout: float): lines = [] while not reader.at_eof(): try: line = await shield(wait_for(reader.readuntil(b'\r'), timeout)) lines.append(line.decode("ascii")) # pylint: disable=protected-access except concurrent.futures._base.TimeoutError: break return lines
async def handle_stdin(reader: asyncio.StreamReader, proc_input, proc): """ Deamon to handle messages from the agent. Used only when both containers are not on a shared kernel""" try: while not reader.at_eof(): message = await receive_message(reader) status = handle_stdin_message(message, proc_input, proc) if status == "pipe_closed": return except: # This task will raise an exception when the loop stops return
async def tcp_handler(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): addr = writer.get_extra_info('peername') while not reader.at_eof(): try: line = await reader.readline() if line: print("%s:%d" % addr, line.decode().strip()) except: break
async def readlines(stream: asyncio.StreamReader): pattern = re.compile(br'[\r\n]+') data = bytearray() while not stream.at_eof(): lines = pattern.split(data) data[:] = lines.pop(-1) for line in lines: yield line data.extend(await stream.read(1024))
async def echo_connection_received(client_reader: StreamReader, client_writer: StreamWriter): print(f"Got a connection from {client_writer.get_extra_info('peername')}") loop.create_task(bug()) while not client_reader.at_eof(): data: bytes = await client_reader.readline() print(f"Received data: {data.decode()}") client_writer.write(data) await client_writer.drain() print("Closed")
class HttpBodyReader(): _expect_sent = None _waiting = None def __init__(self, headers, parser, transport, **kw): self.headers = headers self.parser = parser self.reader = StreamReader(**kw) self.reader.set_transport(transport) self.feed_data = self.reader.feed_data self.feed_eof = self.reader.feed_eof def waiting_expect(self): '''``True`` when the client is waiting for 100 Continue. ''' if self._expect_sent is None: if (not self.reader.at_eof() and self.headers.has('expect', '100-continue')): return True self._expect_sent = '' return False def can_continue(self): if self.waiting_expect(): if self.parser.get_version() < (1, 1): raise HttpException(status=417) else: msg = '%s 100 Continue\r\n\r\n' % http_protocol(self.parser) self._expect_sent = msg self.reader._transport.write(msg.encode(DEFAULT_CHARSET)) def fail(self): if self.waiting_expect(): raise HttpException(status=417) def read(self, n=-1): self.can_continue() return self.reader.read(n=n) def readline(self): self.can_continue() return self.reader.readline() def readexactly(self, n): self.can_continue() return self.reader.readexactly(n)
def handle_stdin(self, reader: asyncio.StreamReader): """ Handle messages from the agent """ try: while not reader.at_eof(): buf = bytearray() while len(buf) != 4: buf += yield from reader.read(4 - len(buf)) length = struct.unpack('I', bytes(buf))[0] buf = bytearray() while len(buf) != length: buf += yield from reader.read(length - len(buf)) message = msgpack.unpackb(bytes(buf), encoding="utf8", use_list=False) yield from self.handle_stdin_message(message) except asyncio.CancelledError: return except KeyboardInterrupt: return except: self._logger.exception("Exception occured while reading stdin") os._exit(1) # DIE!
async def read_lines_matching(reader: StreamReader, *patterns: Pattern): waiting_for = {} for (index, pattern) in enumerate(patterns): if isinstance(pattern, str): pattern = re.compile(pattern) waiting_for[pattern] = index values = [None] * len(waiting_for) while not reader.at_eof(): if all(values): break line = await reader.readuntil(b'\r') line = line.decode("ascii").strip() logger.debug("got line: %s", line) for pattern in waiting_for: index = waiting_for[pattern] if not values[index]: match = pattern.fullmatch(line) if match: values[index] = match[1] return values