async def dispatch(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: first_packet = await reader.read(2048) for index, data in enumerate(first_packet): reader._buffer.insert(index, data) if not first_packet: writer.close() await writer.wait_closed() return elif first_packet[0] == 4: # Socks4 handler = getattr(self, "socks4") elif first_packet[0] == 5: # Socks5 handler = getattr(self, "socks5") else: # HTTP try: method = first_packet.split(b" ", maxsplit=1)[0].decode("ascii") except UnicodeDecodeError: return await TCPSocket(reader, writer).close() handler = getattr(self, "http_" + method.lower(), self.http_default) try: tcp = TCPSocket(reader, writer) await handler(tcp) except ConnectionError: pass finally: await tcp.close()
async def _handle_service_connection(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: if self._is_signing_service_connection_active: writer.close() else: tasks: list = [] try: successful = await self._authenticate_signing_service(reader, writer) if not successful: writer.close() return request_consumer_task = self._loop.create_task( request_consumer( self._request_queue, self._response_queue_pool, self._message_tracker, writer ) ) tasks.append(request_consumer_task) response_producer_task = self._loop.create_task( response_producer( self._response_queue_pool, reader, self._message_tracker ) ) tasks.append(response_producer_task) heartbeat_producer_task = self._loop.create_task( heartbeat_producer( writer, ) ) tasks.append(heartbeat_producer_task) done_tasks, pending_tasks = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) for future in pending_tasks: future.cancel() for future in done_tasks: exception_from_task = future.exception() if exception_from_task is not None: raise exception_from_task except asyncio.CancelledError: # CancelledError shall not be treated as crash and logged in Sentry. It is only logged as info logger.debug(f"CancelledError in Signing Service connection handler.") except Exception as exception: # pylint: disable=broad-except crash_logger.error( f"Exception occurred in Signing Service connection handler: {exception}, Traceback: {traceback.format_exc()}" ) raise finally: logger.debug(f"Canceling tasks upon exit of Signing Service connection handler. Number of tasks to cancel: {len(tasks)}.") # cancel all tasks - if task is already done/cancelled it makes no harm self._cancel_pending_tasks(tasks) self._is_signing_service_connection_active = False
async def handle_client(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): async def receive(): pass async def send(data): if data["type"] == "http.response.start": protocol = "HTTP/1.1" status = HTTPStatus(data["status"]) status_line = f"{protocol} {status.value} {status.phrase}" headers = [status_line] for header in data["headers"]: key, value = header[0].decode(), header[1].decode() headers.append(f"{key}: {value}") headers.append("") writer.writelines([f"{line}\r\n".encode() for line in headers]) await writer.drain() elif data["type"] == "http.response.body": writer.writelines([data["body"], "\r\n".encode()]) await writer.drain() else: raise Exception("Not implemented") scope = await build_scope(reader, writer) await app(scope, receive, send) writer.close()
async def register(reader: asyncio.StreamReader, writer: asyncio.StreamWriter, chat_username: Optional[str]) -> str: """Register new user.""" key = '' try: async for message in read_stream(reader, writer, log=False): if message == HELLO_USER_MSG: writer.write(f'\n'.encode()) await writer.drain() data = await reader.readline() message = data.decode('utf-8').strip() if message == NICKNAME_MSG: if not chat_username: chat_username = input(message + ' ').replace('\\n', '').replace('\n', '') writer.write(f'{chat_username}\n\n'.encode()) await writer.drain() data = await reader.readline() message = data.decode('utf-8').strip() with suppress(JSONDecodeError): token = json.loads(message) if token: key = token['account_hash'] key_file = os.path.abspath(__file__ + '/../key') async with aiofiles.open(key_file, mode='w', buffering=1) as aiofile: await aiofile.write(key) return key except asyncio.CancelledError: writer.close()
async def pipe(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): """Proxy between a reader/writer pair.""" try: while not reader.at_eof(): writer.write(await reader.read(4096)) finally: writer.close()
async def handle_request( reader: asyncio.StreamReader, writer: asyncio.StreamWriter, buff=4096, cd='utf8') -> None: while True: request = await reader.read(buff) request = request.decode(cd) addr = writer.get_extra_info('peername') print(f"Received {request!r} from {addr!r}") if not request: break else: if PUT_PATTERN.match(request): response = handle_put(request) elif GET_PATTERN.match(request): response = handle_get(request) else: response = raise_error() print(f"Send {response} at {addr}") writer.write(response.encode()) await writer.drain() print(f"Closing connection with {addr}") writer.close()
async def proxy(self, client_reader: StreamReader, client_writer: StreamWriter, remote_reader: StreamReader, remote_writer: StreamWriter): client_read_task = create_task(client_reader.read(READ_BYTES_DEFAULT)) remote_read_task = create_task(remote_reader.read(READ_BYTES_DEFAULT)) while client_read_task and remote_read_task: done, pending = await asyncio.wait( {client_read_task, remote_read_task}, return_when=FIRST_COMPLETED) if client_read_task in done: client_read_task = await self._proxy_connection( in_read=client_read_task, out_read=remote_read_task, in_reader=client_reader, out_writer=remote_writer) if remote_read_task in done: remote_read_task = await self._proxy_connection( in_read=remote_read_task, out_read=client_read_task, in_reader=remote_reader, out_writer=client_writer) if client_read_task: client_read_task.cancel() if remote_read_task: remote_read_task.cancel() remote_writer.close()
def __call__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): '''Handle a request Coroutine.''' _logger.debug('New proxy connection.') session = self._new_session(reader, writer) self.event_dispatcher.notify(self.Event.begin_session, session) is_error = False try: yield from session() except Exception as error: if not isinstance(error, StopIteration): error = True if isinstance(error, (ConnectionAbortedError, ConnectionResetError)): # Client using the proxy has closed the connection _logger.debug('Proxy error', exc_info=True) else: _logger.exception('Proxy error') writer.close() else: raise finally: self.event_dispatcher.notify(self.Event.end_session, session, error=is_error) writer.close() _logger.debug('Proxy connection closed.')
async def _conn_handler(self, reader: StreamReader, writer: StreamWriter): client_ip = writer.get_extra_info('peername')[0] if client_ip not in self.peers: # only allow connection from peers writer.close() print(f"Refuse connection from {client_ip}") # update online status self.peers[client_ip].update({"is_online": True}) # get message msg_type = MsgType(int.from_bytes(await reader.readexactly(1), "big")) msg_length = int.from_bytes(await reader.readexactly(8), "big") if msg_type == MsgType.REQ_INDEX and self._event_listener[ "on_request_index"]: client_index = pickle.loads(await reader.readexactly(msg_length)) print(f"{client_ip} request index exchange") await self._event_listener["on_request_index"](writer, client_index) elif msg_type == MsgType.REQ_INDEX_UPDATE and self._event_listener[ "on_request_index_update"]: client_index = pickle.loads(await reader.readexactly(msg_length)) print(f"{client_ip} request index update") await self._event_listener["on_request_index_update"](writer, client_index) elif msg_type == MsgType.REQ_FILE and self._event_listener[ "on_request_file"]: msg = pickle.loads(await reader.readexactly(msg_length)) print( f"{client_ip} request file {msg['file_path']} blk:{msg['block_index']}" ) await self._event_listener["on_request_file"](writer, msg["file_path"], msg["block_index"]) else: writer.close() print(f"Invalid message from {client_ip}")
async def server_remote_serve(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): logger.info(f"Connection from {writer.get_extra_info('peername')}") if not self.ws or self.ws.closed: logger.error(f"Client is not started. ") writer.close() return if len(self.buffers) > self.max_connections: logger.error(f"Max connection {self.max_connections} exceeded. ") writer.close() return peer_name = writer.get_extra_info('peername') ip, port, *_ = peer_name message_id = get_connection_id((ip, port)) self.buffers[message_id] = asyncio.Queue() done, pending = await asyncio.wait([asyncio.wait([self.forward_buffer_msg(writer), self.forward_remote_msg(reader, writer)]), asyncio.ensure_future(self.wait_event(self.client_down))], return_when=asyncio.FIRST_COMPLETED) for coro in pending: coro.cancel()
async def _pipe(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): try: while not reader.at_eof(): writer.write(await reader.read(self.CHUNK_SIZE)) finally: writer.close()
async def client_handler(self, reader: StreamReader, writer: StreamWriter) -> None: if __debug__: print('Client connected', flush=True) while True: line = await reader.readline() if not line: if __debug__: print('Reached end of reader. Returnning', flush=True) writer.close() await writer.wait_closed() return request = request_selector(line) if isinstance(request, RequestPing): response = request.response_ping() elif isinstance(request, RequestRun): run_stdout = await self.run_command( args_to_run=request.args_to_run, std_in_out_mode=PIPE if request.wait_response else None, ) if run_stdout is None: continue else: response = request.response_run(text=run_stdout, ) writer.write(response) await writer.drain()
async def socket_request_handler(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): global num_tasks, max_tasks if num_tasks >= max_tasks: logging.info( "In request_handler, discarded request due to max_tasks limit.") try: await prefixed_socket_async_send(writer, "max_tasks_error") except: pass writer.close() return num_tasks += 1 try: symbol = await prefixed_socket_async_recv(reader) logging.info("In request_handler, handle task %s." % symbol) csv_string = await get_result(symbol) except Exception as e: logging.error(e) csv_string = "ERROR" await prefixed_socket_async_send(writer, csv_string) writer.close() num_tasks -= 1
async def _handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): while not reader.at_eof(): try: array = await reader.readline() if reader.at_eof(): break assert array[:1] == b'*' count = parse_int(array[1:-2]) items = [await read_bulk_string(reader) for _ in range(count)] command = items[0] params = items[1:] try: with timeout(REDIS_SERVER_TIMEOUT): await self._execute(command, params, writer) except Exception: logger.exception(f"Command failed: {command}") writer.write(encode_error("Command failed")) except Exception: logger.exception(f"Invalid command: {array}") writer.write(encode_error("Invalid command")) writer.close()
async def http_server_callback(reader: StreamReader, writer: StreamWriter): raw_header = await reader.readuntil(b'\r\n\r\n') header = raw_header.decode().split('\r\n') try: method, raw_path, _ = header[0].split() if method not in ['GET', 'HEAD']: # time.sleep(2) # DON'T BLOCK ME!!! response: bytes = await get_error_response(405) print(method, raw_path, status[405]) else: path = base_dir / raw_path.lstrip('/') if path.is_dir(): response: bytes = await get_dir_response(path, raw_path) print(method, raw_path, status[200]) elif path.is_file(): byte_range = parse_range(header[1:]) response: bytes = await get_file_response(path, byte_range) print(method, raw_path, status[206 if byte_range else 200]) else: response: bytes = await get_error_response(404) print(method, raw_path, status[404]) except ValueError: response: bytes = await get_error_response(400) writer.write(response) await writer.drain() writer.close()
async def forward_remote_msg(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): try: ip, port, *_ = writer.get_extra_info('peername') message_id = get_connection_id((ip, port)) seq = 0 while not reader.at_eof(): data: bytes = await reader.read(4096) if seq == 0: tag = BEGIN else: tag = MID if tag == BEGIN or len(data) > 0: message = TCPMessage(id=message_id, tag=tag, data=data) await self.ws.send(message.to_bytes()) seq += 1 message = TCPMessage(id=message_id, tag=END, data=b'') await self.ws.send(message.to_bytes()) except asyncio.CancelledError: pass finally: writer.close() await writer.wait_closed()
async def _server_handler(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): """Handler for the server loop. Extracts incoming requests, executes them, and returns the result back to the sender node. Args: reader (asyncio.StreamReader): Stream reader object for incoming node. writer (asyncio.StreamWriter): Stream writer object for incoming node. """ # Replaces the 0.0.0.0 with the host's actual IP sock = writer.get_extra_info("socket") self.address.host = sock.getsockname()[0] self.logger.debug(f"Received connection from {sock.getpeername()[0]}") # Read request and parse for a response request = await reader.read(self.__buf) response = await self.parse_request(request) # Return response back to sender writer.write(response.encode("utf-8")) await writer.drain() self.logger.debug(f"Sent response back to {sock.getpeername()[0]}") # Close down writer writer.close() await writer.wait_closed()
async def process_stream(rd: asyncio.StreamReader, wt: asyncio.StreamWriter): data = await rd.read(4096) raw_addr = data.split(b' ')[1] padded_addr = padding(raw_addr, conf['padding-length']) addr, _ = mask(padded_addr, new_key) try: rmt_rd, rmt_wt = await asyncio.open_connection( host=conf['server'], port=conf['port'], ssl=ctx, ssl_handshake_timeout=30) except: wt.close() return rmt_wt.write(addr) await rmt_wt.drain() wt.write(b'HTTP/1.1 200 Connection Established\r\n\r\n') await wt.drain() await asyncio.gather(stream_copy(rd, rmt_wt), stream_copy(rmt_rd, wt), return_exceptions=True)
async def HandleRPC(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: conn = self.create_conn() while True: frag_hdr_data = await reader.read(4) if (len(frag_hdr_data) != 4): break frag_len = struct.unpack(">I", frag_hdr_data)[0] if ((frag_len & 0x80000000) == 0): raise Exception("Partial fragments not implemented") frag_len = frag_len & 0x7FFFFFFF data = await reader.read(frag_len) if (len(data) != frag_len): break msg_up = RPCUnpacker(data) msg = msg_up.unpack_rpc_msg() #pprint(msg) reply_data = await conn.handleMsg(msg, buf=data, buf_ix=msg_up.get_position()) #print(f"rdata={reply_data}") if (reply_data is None): raise Exception("cannot handle message") writer.write(struct.pack(">I", 0x80000000 | len(reply_data))) writer.write(reply_data) print(f"Closing socket") writer.close() if (sys.hexversion > 0x03070000): await writer.wait_closed()
async def handle_local_connections(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): data = await reader.readline() message = data.strip() response = self.process_message(message) + b'\n' writer.write(response) await writer.drain() writer.close()
async def handle_broadcast(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: # log address addr = writer.get_extra_info("peername") logging.info(f"Broadcasting to {addr!r}") while self._broadcasting: # wait for data from serial port try: await asyncio.wait_for(self._datavalid.wait(), timeout=0.5) # set timeout such that there is never pileup except asyncio.TimeoutError: continue # take lock of db and prepare packet with self._dblock: values: List[float] = self._values alarms = self._alarms if len(self._alarms) > 0 else None broadcast_packet = {} broadcast_packet["sensors"] = values broadcast_packet["alarms"] = alarms # add alarms key/value pair logging.info(f"Send: {json.dumps(broadcast_packet,indent=4)}") try: writer.write(json.dumps(broadcast_packet).encode()) await writer.drain() except (ConnectionResetError, BrokenPipeError): # Connection lost, stop trying to broadcast and free up socket logging.warning(f"Connection lost with {addr!r}") self._broadcasting = False # take control of datavalid and reset it with self._dvlock: self._datavalid.clear() self._broadcasting = True writer.close()
async def pipe( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, processor: str, delay: Delay, ) -> None: try: while not reader.at_eof(): chunk = await reader.read(self.chunk_size) if delay.timeout > 0: log.debug( "%r sleeping %.3f seconds on %s", self, delay.timeout, processor, ) await delay.wait() writer.write(await self.__processors[processor](chunk)) if not self.buffered: await writer.drain() finally: writer.close() await writer.wait_closed()
async def _server_callback(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: """Callback when a connection is made to the server Read the data sent from the client, execute the requested command, and send the reply back to the client. """ try: logger.debug("Connection made to server") data = await reader.read() logger.debug("EOF received by server") req, is_json = _IPC.unpack(data) except IPCError: logger.warning("Invalid data received, closing connection") else: rep = self.handler(req) result = _IPC.pack(rep, is_json=is_json) logger.debug("Sending result on receive EOF") writer.write(result) logger.debug("Closing connection on receive EOF") writer.write_eof() finally: writer.close() await writer.wait_closed()
async def __call__(self, reader: StreamReader, writer: StreamWriter) -> None: conn = ManageSieveConnection(self._config, reader, writer) try: await conn.run(self._login) finally: writer.close()
async def _incoming(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: """ Accept an incoming connection and signal the upper_half. This method does the minimum necessary to accept a single incoming connection. It signals back to the upper_half ASAP so that any errors during session initialization can occur naturally in the caller's stack. :param reader: Incoming `asyncio.StreamReader` :param writer: Incoming `asyncio.StreamWriter` """ peer = writer.get_extra_info('peername', 'Unknown peer') self.logger.debug("Incoming connection from %s", peer) if self._reader or self._writer: # Sadly, we can have more than one pending connection # because of https://bugs.python.org/issue46715 # Close any extra connections we don't actually want. self.logger.warning("Extraneous connection inadvertently accepted") writer.close() return # A connection has been accepted; stop listening for new ones. assert self._accepted is not None await self._stop_server() self._reader, self._writer = (reader, writer) self._accepted.set()
async def server_handler(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): assert await reader.readuntil(b"\r\n\r\n" ) == b"GET /hello HTTP/1.1\r\n\r\n" writer.write(b"HTTP/1.1 204 No Content\r\n\r\n") await writer.drain() writer.close()
async def close_stream_writer(writer: asyncio.StreamWriter) -> None: """Close an asyncio.StreamWriter and wait for it to finish closing. Safe to call if the stream is closed or being closed, though in the latter case this function may raise `asyncio.CancelledError`. This function swallows `ConnectionResetError`, because that means the stream writer is closed. Parameters ---------- writer : `asyncio.StreamWriter` Asynchronous stream writer to close. Raises ------ asyncio.CancelledError If the writer is already being closed. I am not sure if this is expected behavior or a bug in Python. """ try: writer.close() await writer.wait_closed() except ConnectionResetError: pass
async def on_connection(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): message = await read_message(reader) logging.debug("Message %s:", message) try: event = json.loads(message, encoding="UTF-8") command = event["command"] if command not in ["memory_stats", "time_stats"]: raise ValueError(f"{command} is illegal!") handler = getattr(self.profiler, command, None) if not handler: raise ValueError(f"{message} is malformed") reply_message = handler writer.write(pickle.dumps(reply_message)) await writer.drain() except ( UnicodeDecodeError, json.JSONDecodeError, TypeError, ) as err: self.profiler.logger.error("Error occurred while transmission: %s", err) writer.write(pickle.dumps(err)) await writer.drain() finally: writer.close()
async def remove_client(self, writer: asyncio.StreamWriter): ''' Close the cient input & output streams ''' if writer.can_write_eof(): writer.write_eof() writer.close() await writer.wait_closed() self._logger.info("Disconnected client")
async def communicating_with_participants(self: object, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, address: tuple) -> None: """ This coroutine communicates with all Participants. """ self.logger.info('Awaiting connection from all participants.') await self.waiting_state() if self.commit: await self.broadcast(self.protocols.GLOBAL_COMMIT) return if not self.commit: await self.broadcast(self.protocols.GLOBAL_ABORT) return data = await reader.read(1024) self.logger.info('Received {} from host: {} at port: {}'.format( data.decode(), *address)) if data == self.protocols.SUCCESSFUL_COMMIT or data == self.protocols.SUCCESSFUL_ABORT: self.logger.warning( 'Closing stream of host: {} at port: '.format(*address)) writer.close() await writer.wait_closed()
async def handle_request(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): response = False try: header_string = await reader.readuntil(separator=b'\r\n\r\n') except asyncio.exceptions.IncompleteReadError: return -1 except OSError: return -1 finally: pass header = RequestHeader().parse(header_string.decode()) if header.method == 'OPTIONS': response = cors(header.origin['value']) else: handle, var, query, ser_func = self.router.get(header.path) if handle: response = await handle( Request(header, reader, writer, self.loop), **var) response = await response_refs.get(ser_func)(response) else: response = await json_response( {"message": f"Path {header.path} not found."}, code=404) if response: writer.write(response) await writer.drain() writer.close() return -1
async def forward_buffer_msg(self, writer: asyncio.StreamWriter): ip, port, *_ = writer.get_extra_info('peername') message_id = get_connection_id((ip, port)) try: logger.debug(f"Start forwarding {hex(message_id)}") while message_id in self.buffers: message = await self.buffers[message_id].get() logger.debug(f"Receive message: {len(message)}") if len(message) == 0: return writer.write(message) await writer.drain() except asyncio.CancelledError: logger.debug(f"Cancelled") except ConnectionResetError: logger.debug(f"Connection lost from {(ip, port)}") pass finally: del self.buffers[message_id] writer.close() await writer.wait_closed() logger.debug(f"Finish forwarding {hex(message_id)}") pass
async def __call__(self, reader: StreamReader, writer: StreamWriter) -> None: conn = IMAPConnection(self.commands, self._config, reader, writer) state = ConnectionState(self._login, self._config) try: await conn.run(state) finally: writer.close()
async def _client_wrapper(self, reader: StreamReader, writer: StreamWriter) -> None: try: return await self._callback( reader=reader, writer=writer, ) except asyncio.CancelledError: pass finally: writer.close()
def connected_cb(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): """ A callback for connected clients. """ client = writer.get_extra_info("peername") sclient = ':'.join(str(_) for _ in client) logger.info("Recieved connection from {}:{}".format(*client)) # Read a subscription message. try: sub = yield from reader.read(65536) except ConnectionResetError: rlogger.info("Client {} closed connection".format(sclient)) return if not sub: logger.error("Client {} terminated connection abnormally".format(sclient)) return try: sub_data = msgpack.unpackb(sub) except (msgpack.UnpackException, ValueError) as e: logger.error("Recieved unknown subscription message from {}:{}".format(*client)) yield from writer.drain() writer.close() return # Get the data from the subscription message. if not b'queue' in sub_data: logger.error("Recieved null queue from {}".format(sclient)) yield from writer.drain() writer.close() return queue_to_sub = sub_data[b"queue"] action = sub_data.get(b"action", 0) queue_created = False if queue_to_sub not in queues: queues[queue_to_sub] = [0, asyncio.Queue()] logger.debug("Created queue {}".format(queue_to_sub)) queue_created = True logger.debug("Client {} subscribed to queue {} in mode {} ({})".format(sclient, queue_to_sub, action, "push" if not action else "pull")) if action == 0: loop.create_task(QueueWaiter(reader, writer, queue_to_sub)) else: loop.create_task(QueueSender(reader, writer, queue_to_sub)) msgpack.pack({"created": queue_created}, writer)
def accept_fdms_client(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): asyncio.Task(fdms.fdms_session(reader, writer), loop=loop).add_done_callback(lambda fut: writer.close())