async def _handshake(initiator: 'HandshakeInitiator', reader: asyncio.StreamReader, writer: asyncio.StreamWriter ) -> Tuple[bytes, bytes, PreImage, PreImage]: """See the handshake() function above. This code was factored out into this helper so that we can create Peers with directly connected readers/writers for our tests. """ initiator_nonce = keccak(os.urandom(HASH_LEN)) auth_msg = initiator.create_auth_message(initiator_nonce) auth_init = initiator.encrypt_auth_message(auth_msg) writer.write(auth_init) auth_ack = await reader.read(ENCRYPTED_AUTH_ACK_LEN) ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message(auth_ack) aes_secret, mac_secret, egress_mac, ingress_mac = initiator.derive_secrets( initiator_nonce, responder_nonce, ephemeral_pubkey, auth_init, auth_ack ) return aes_secret, mac_secret, egress_mac, ingress_mac
async def send(self, writer: asyncio.StreamWriter): """ Future to send a message over the WebSocket :param writer: StreamWriter used to send the message :return: """ opcode = self.opcode data = self.data frame = bytearray() head = 0b00000000 head |= 0b10000000 head |= opcode frame.append(head) next_byte = 0b00000000 if data: payload_length = len(data) else: payload_length = 0 if 65535 >= payload_length >= 126: next_byte |= 126 extended_bytes = struct.pack("!H", payload_length) elif payload_length > 65535: next_byte |= 127 extended_bytes = struct.pack("!Q", payload_length) else: next_byte |= payload_length extended_bytes = None frame.append(next_byte) if extended_bytes: frame.extend(extended_bytes) if data: frame.extend(data) writer.write(frame) await writer.drain()
async def negotiate_socks4_userid( reader: asyncio.StreamReader, writer: asyncio.StreamWriter, host: Union[str, ipaddress.IPv4Address, ipaddress.IPv6Address], port: int, args: Optional[Dict[str, str]], ) -> None: try: host = ipaddress.IPv4Address(host) except ValueError: raise ValueError('SOCKS4 only supports IPv4 address') if args: args_bytes = encode_args(args) else: args_bytes = b'' writer.write(b''.join(( b'\x04', # ver enums.SOCKS4Command.CONNECT, port.to_bytes(2, 'big'), host.packed, args_bytes, b'\0', ))) buf = await reader.readexactly(8) assert buf[0] == 0, 'Invalid SOCKS4 reply version' reply = enums.SOCKS4Reply(buf[1:2]) if reply is not enums.SOCKS4Reply.GRANTED: raise exceptions.PTSOCKS4ConnectError(reply)
async def _write_ext_msg( writer: asyncio.StreamWriter, command: bytes, body: bytes, ) -> None: assert len(command) == 2 body_len = len(body).to_bytes(2, 'big') writer.write(command + body_len + body) await writer.drain()
async def _relay_data_side( reader: asyncio.StreamReader, writer: asyncio.StreamWriter, ) -> None: """Pass data and EOF from reader to writer.""" while True: buf = await reader.read(BUF_SIZE) if not buf: # EOF break writer.write(buf) await writer.drain() writer.write_eof() await writer.drain()
def test_put(self, loop): result = 39 result_queue = Queue() called_write = Queue() called_write_eof = Queue() writer = StreamWriter(None, None, None, None) unix_socket = UnixSocket(None, loop) unix_socket.writer = writer def write(data): called_write.put(True) if data != b'\n': result_queue.put(unix_socket.decode(data)) def write_eof(): called_write_eof.put(True) writer.write = write writer.write_eof = write_eof async def run(): unix_socket.ready.set() await unix_socket.put(result) loop.run_until_complete(run()) check_queue_multi(called_write, [True] * 2) check_queue(result_queue, result)
async def authenticate( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, ) -> bool: """(async) Authenticate a connecting client. Returns: True if authentication is successful and False otherwise. The caller is responsible for closing the connection in case of failure. """ writer.write(enums.ExtOrPortAuthTypes.SAFE_COOKIE + enums.ExtOrPortAuthTypes.END_AUTH_TYPES) client_auth_type = await reader.readexactly(1) if client_auth_type != enums.ExtOrPortAuthTypes.SAFE_COOKIE: return False client_nonce = await reader.readexactly(self.nonce_len) server_nonce = secrets.token_bytes(self.nonce_len) server_hash = self.hash(b''.join(( self.server_hash_header, client_nonce, server_nonce))) writer.write(server_hash + server_nonce) client_hash = await reader.readexactly(self.hash_len) result = hmac.compare_digest(client_hash, self.hash(b''.join(( self.client_hash_header, client_nonce, server_nonce)))) writer.write(int(result).to_bytes(1, 'big')) return result
async def handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): data: bytes = await reader.read(2) # Read ID. if data != b"\x30\x05": writer.write(ERROR_ID) return logger.info(f"Got ID packets: {data}.") auth: bytes = await reader.read(DIGEST_SIZE) logger.info(f"Got digest: {auth}.") length: int = struct.unpack("!I", await reader.read(4))[0] data = b"" while len(data) < length: newdata = await reader.read(length - len(data)) if len(newdata) == 0: break data += newdata logger.info(f"Got message ength: {length}, data: {data}.") try: logger.info(f"Decoded: {data.decode('UTF-8')}") message: Dict[str, Any] = json.loads(data.decode("UTF-8")) logger.info(f"Loaded: {message}") # Any of these will throw a KeyError with broken packets. message["type"], message["meta"], message["cont"] except: logger.exception("hrrm") writer.write(ERROR_PACK) return stomach: hmac.HMAC = hmac.new(AUTHKEY, data, sha512) if not hmac.compare_digest(stomach.digest(), auth): writer.write(ERROR_HMAC) return logger.info(message) writer.write(ERROR_OK) for event in events: try: await event(message) except: logger.exception("Caught exception inside commloop event handler.")
async def send_message(message: str, writer: StreamWriter): writer.write((message + '\n').encode()) await writer.drain()
async def _proxy_peer( self, multiplexer: Multiplexer, client_hello: bytes, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, ): """Proxy data between end points.""" transport = writer.transport try: ip_address = ipaddress.ip_address( writer.get_extra_info("peername")[0]) except TypeError: _LOGGER.error("Can't read source IP") return # Open multiplexer channel try: channel = await multiplexer.create_channel(ip_address) except MultiplexerTransportError: _LOGGER.error("New transport channel to peer fails") return from_proxy = None from_peer = None try: await channel.write(client_hello) # Process stream into multiplexer while not transport.is_closing(): if not from_proxy: from_proxy = self._loop.create_task(reader.read(4096)) if not from_peer: from_peer = self._loop.create_task(channel.read()) # Wait until data need to be processed async with async_timeout.timeout(TCP_SESSION_TIMEOUT): await asyncio.wait([from_proxy, from_peer], return_when=asyncio.FIRST_COMPLETED) # From proxy if from_proxy.done(): if from_proxy.exception(): raise from_proxy.exception() await channel.write(from_proxy.result()) from_proxy = None # From peer if from_peer.done(): if from_peer.exception(): raise from_peer.exception() writer.write(from_peer.result()) from_peer = None # Flush buffer await writer.drain() except (MultiplexerTransportError, OSError, RuntimeError): _LOGGER.debug("Transport closed by Proxy for %s", channel.uuid) with suppress(MultiplexerTransportError): await multiplexer.delete_channel(channel) except asyncio.TimeoutError: _LOGGER.debug("Close TCP session after timeout for %s", channel.uuid) with suppress(MultiplexerTransportError): await multiplexer.delete_channel(channel) except MultiplexerTransportClose: _LOGGER.debug("Peer close connection for %s", channel.uuid) finally: # Cleanup peer reader if from_peer: if not from_peer.done(): from_peer.cancel() else: # Avoid exception was never retrieved from_peer.exception() # Cleanup proxy reader if from_proxy and not from_proxy.done(): from_proxy.cancel()
def handler(self, reader: StreamReader, writer: StreamWriter) -> Generator[Any, None, None]: data: Optional[bytes] try: host, port = writer.get_extra_info("peername") version: Optional[Version] = None cow_version: Optional[Version] = None self.log.info("Incoming connection from %s:%s." % (host, port)) # Initial handshake writer.write( struct.pack(">QQH", self.INIT_PASSWD, self.CLISERV_MAGIC, self.NBD_HANDSHAKE_FLAGS)) yield from writer.drain() data = yield from reader.readexactly(4) try: client_flags = struct.unpack(">L", data)[0] except struct.error: raise IOError("Handshake failed, disconnecting.") # We support both fixed and unfixed new-style negotiation. # The specification actually allows a client supporting "fixed" to not set this bit in its reply ("SHOULD"). fixed = (client_flags & self.NBD_FLAG_FIXED_NEWSTYLE) != 0 if not fixed: self.log.warning( "Client did not signal fixed new-style handshake.") client_flags ^= self.NBD_FLAG_FIXED_NEWSTYLE if client_flags > 0: raise IOError( "Handshake failed, unknown client flags %s, disconnecting." % (client_flags)) # Negotiation phase while True: header = yield from reader.readexactly(16) try: (magic, opt, length) = struct.unpack(">QLL", header) except struct.error: raise IOError( "Negotiation failed: Invalid request, disconnecting.") if magic != self.CLISERV_MAGIC: raise IOError("Negotiation failed: Bad magic number: %s." % magic) if length: data = yield from reader.readexactly(length) if len(data) != length: raise IOError( "Negotiation failed: %s bytes expected." % length) else: data = None self.log.debug("[%s:%s]: opt=%s, length=%s, data=%s" % (host, port, opt, length, data)) if opt == self.NBD_OPT_EXPORTNAME: if not data: raise IOError( "Negotiation failed: No export name was provided.") version_uid = VersionUid(data.decode("ascii")) if version_uid not in [ v.uid for v in self.store.get_versions() ]: if not fixed: raise IOError( "Negotiation failed: Unknown export name.") writer.write( struct.pack(">QLLL", self.NBD_OPT_REPLY_MAGIC, opt, self.NBD_REP_ERR_UNSUP, 0)) yield from writer.drain() continue self.log.info("[%s:%s] Negotiated export: %s." % (host, port, version_uid.v_string)) # We have negotiated a version and it will be used until the client disconnects version = self.store.get_versions( version_uid=version_uid)[0] self.store.open(version) self.log.info("[%s:%s] Version %s has been opened." % (host, port, version.uid)) export_flags = self.NBD_EXPORT_FLAGS if self.read_only: export_flags |= self.NBD_FLAG_READ_ONLY self.log.info("[%s:%s] Export is read only." % (host, port)) else: self.log.info("[%s:%s] Export is read/write." % (host, port)) # In case size is not a multiple of 4096 we extend it to the the maximum support block # size of 4096 size = math.ceil(version.size / 4096) * 4096 writer.write(struct.pack('>QH', size, export_flags)) writer.write(b"\x00" * 124) yield from writer.drain() # Transition to transmission phase break elif opt == self.NBD_OPT_LIST: # Don't use version as a loop variable so we don't conflict with the outer scope usage for list_version in self.store.get_versions(): list_version_encoded = list_version.uid.v_string.encode( "ascii") writer.write( struct.pack(">QLLL", self.NBD_OPT_REPLY_MAGIC, opt, self.NBD_REP_SERVER, len(list_version_encoded) + 4)) writer.write( struct.pack(">L", len(list_version_encoded))) writer.write(list_version_encoded) yield from writer.drain() writer.write( struct.pack(">QLLL", self.NBD_OPT_REPLY_MAGIC, opt, self.NBD_REP_ACK, 0)) yield from writer.drain() elif opt == self.NBD_OPT_ABORT: writer.write( struct.pack(">QLLL", self.NBD_OPT_REPLY_MAGIC, opt, self.NBD_REP_ACK, 0)) yield from writer.drain() raise _NbdServerAbortedNegotiationError() else: # We don't support any other option if not fixed: raise IOError("Unsupported option: %s." % (opt)) writer.write( struct.pack(">QLLL", self.NBD_OPT_REPLY_MAGIC, opt, self.NBD_REP_ERR_UNSUP, 0)) yield from writer.drain() # Transmission phase while True: header = yield from reader.readexactly(28) try: (magic, cmd, handle, offset, length) = struct.unpack(">LLQQL", header) except struct.error: raise IOError("Invalid request, disconnecting.") if magic != self.NBD_REQUEST_MAGIC: raise IOError("Bad magic number, disconnecting.") cmd_flags = cmd & self.NBD_CMD_MASK_FLAGS cmd = cmd & self.NBD_CMD_MASK_COMMAND self.log.debug( "[%s:%s]: cmd=%s, cmd_flags=%s, handle=%s, offset=%s, len=%s" % (host, port, cmd, cmd_flags, handle, offset, length)) # We don't support any command flags if cmd_flags != 0: yield from self.nbd_response(writer, handle, error=self.EINVAL) continue if cmd == self.NBD_CMD_DISC: self.log.info("[%s:%s] disconnecting" % (host, port)) break elif cmd == self.NBD_CMD_WRITE: data = yield from reader.readexactly(length) if len(data) != length: raise IOError("%s bytes expected, disconnecting." % length) if self.read_only: yield from self.nbd_response(writer, handle, error=self.EPERM) continue if not cow_version: cow_version = self.store.get_cow_version(version) try: self.store.write(cow_version, offset, data) except Exception as exception: self.log.error( "[%s:%s] NBD_CMD_WRITE: %s\n%s." % (host, port, exception, traceback.format_exc())) yield from self.nbd_response(writer, handle, error=self.EIO) continue yield from self.nbd_response(writer, handle) elif cmd == self.NBD_CMD_READ: try: data = self.store.read(version, cow_version, offset, length) except Exception as exception: self.log.error( "[%s:%s] NBD_CMD_READ: %s\n%s." % (host, port, exception, traceback.format_exc())) yield from self.nbd_response(writer, handle, error=self.EIO) continue yield from self.nbd_response(writer, handle, data=data) elif cmd == self.NBD_CMD_FLUSH: # Return success right away when we're read only or when we haven't written anything yet. if self.read_only or not cow_version: yield from self.nbd_response(writer, handle) continue try: self.store.flush(cow_version) except Exception as exception: self.log.error( "[%s:%s] NBD_CMD_FLUSH: %s\n%s." % (host, port, exception, traceback.format_exc())) yield from self.nbd_response(writer, handle, error=self.EIO) continue yield from self.nbd_response(writer, handle) else: self.log.warning("[%s:%s] Unknown cmd %s, ignoring." % (host, port, cmd)) yield from self.nbd_response(writer, handle, error=self.EINVAL) continue except _NbdServerAbortedNegotiationError: self.log.info("[%s:%s] Client aborted negotiation." % (host, port)) except (asyncio.IncompleteReadError, IOError) as exception: self.log.error("[%s:%s] %s" % (host, port, exception)) finally: if cow_version: self.store.fixate(cow_version) if version: self.store.close(version) writer.close()
def _write_response(writer: asyncio.StreamWriter, data: Union[bytes, str, int, Error, list, tuple, None]): if isinstance(data, bytes): writer.write(b"$%d\r\n%s\r\n" % (len(data), data)) elif isinstance(data, str): writer.write(b"+%d\r\n%s\r\n" % (len(data), data.encode())) elif isinstance(data, int): writer.write(b":%d\r\n" % data) elif isinstance(data, Error): writer.write(b"-%s\r\n" % data.message.encode()) elif isinstance(data, (list, tuple)): writer.write(b"*%d\r\n" % len(data)) for item in data: _write_response(writer, item) elif data is None: writer.write(b"$-1\r\n") else: raise RadishProtocolError("Unrecognized type: %s" % type(data))
async def handle_connection(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): try: cliipaddr = writer.get_extra_info('peername') ownipaddr = writer.get_extra_info('sockname') logging.debug('{} <-- {}'.format(ownipaddr, cliipaddr)) protocol_indicator = await asyncio.wait_for(reader.read(4), timeout) if protocol_indicator == b'DKG ': await respond_to_nonce_with_signature(reader, writer, node.private_key, timeout) cliethaddr = await determine_address_via_nonce(reader, writer, timeout) if cliethaddr is None: logging.debug('(s) could not verify client signature; closing connection') return if cliethaddr not in accepted_addresses: logging.debug('(s) client address {:40x} not accepted'.format(cliethaddr)) return await establish_channel(cliethaddr, reader, writer, node) elif len(protocol_indicator) > 0: req = HTTPRequest(protocol_indicator + await asyncio.wait_for(reader.readline(), timeout), reader) contentlen = req.headers.get('Content-Length') if contentlen is not None: contentlen = int(contentlen) req.body = await reader.read(contentlen) res = JSONRPCResponseManager.handle(req.body, default_dispatcher) res_data = await get_response_data(res, timeout) db.Session.remove() if res_data is None: writer.write(b'HTTP/1.1 204 No Content\r\n\r\n') else: res_str = json.dumps(res_data, indent=2, sort_keys=True).encode('UTF-8') writer.write(b'HTTP/1.1 200 OK\r\n' b'Content-Type: application/json; charset=UTF-8\r\n' b'Content-Length: ') writer.write(str(len(res_str) + 1).encode('UTF-8')) writer.write(b'\r\n\r\n') writer.write(res_str) writer.write(b'\n') finally: writer.close()
async def handle_relay(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): data = await reader.readline() message = data.decode('utf-8').strip('\n') if message == 'RSAREQUEST': logging.debug(f'RECV RSAREQUEST {writer.get_extra_info("peername")}') writer.write( base64.urlsafe_b64encode(relay.public.save_pkcs1()) + b'\n') await writer.drain() writer.close() return else: try: key, data = message.split('§') fernet = Fernet( rsa.decrypt(base64.urlsafe_b64decode(key.encode('utf-8')), relay.private)) data = fernet.decrypt( base64.urlsafe_b64decode(data.encode('utf-8'))).decode('utf-8') except rsa.DecryptionError: logging.exception('Error decrypting message: ') writer.write(b'error\n') await writer.drain() writer.close() return command, arguments = data.split(':', maxsplit=1) arguments = json.loads(arguments) if command == 'PING': if not arguments['network'] in relay.networks: relay.networks[arguments['network']] = {} if arguments['advertise']: if arguments['peer_id'] in relay.networks[ arguments['network']].keys(): relay.networks[arguments['network']][ arguments['peer_id']]['last_ping'] = time.time() else: relay.networks[arguments['network']][ arguments['peer_id']] = { 'id': arguments['peer_id'], 'name': arguments['peer_name'], 'network': arguments['network'], 'public_key': rsa.PublicKey.load_pkcs1( base64.urlsafe_b64decode( arguments['public_key'].encode('utf-8'))), 'last_ping': time.time(), 'buffer': [] } relay.relays.extend(arguments['relays']) relay.relays = list(set(relay.relays)) return_packet = json.dumps({ 'peers': { i: [ relay.networks[arguments['network']][i]['name'], base64.urlsafe_b64encode( relay.networks[arguments['network']][i] ['public_key'].save_pkcs1()).decode('utf-8') ] for i in relay.networks[arguments['network']].keys() }, 'relays': relay.relays, 'public_key': base64.urlsafe_b64encode( relay.public.save_pkcs1()).decode('utf-8'), 'buffer': copy.deepcopy(relay.networks[arguments['network']][ arguments['peer_id']]['buffer']) }) relay.networks[arguments['network']][ arguments['peer_id']]['buffer'] = [] else: relay.relays.extend(arguments['relays']) relay.relays = list(set(relay.relays)) return_packet = json.dumps({ 'peers': { i: [ relay.networks[arguments['network']][i]['name'], base64.urlsafe_b64encode( relay.networks[arguments['network']][i] ['public_key'].save_pkcs1()).decode('utf-8') ] for i in relay.networks[arguments['network']].keys() }, 'relays': relay.relays, 'public_key': base64.urlsafe_b64encode( relay.public.save_pkcs1()).decode('utf-8'), 'buffer': [] }) elif command == 'CMND': logging.debug( f'DATA: {arguments["network"]}.{arguments["originator"]} -> {arguments["network"]}.{arguments["target"]}' ) relay.networks[arguments['network']][ arguments['target']]['buffer'].insert( 0, { 'id': arguments['id'], 'originator': arguments['originator'], 'data': arguments['data'], 'type': 'cmd' }) return_packet = '{}' elif command == 'RESP': logging.debug( f'DATA: {arguments["network"]}.{arguments["originator"]} -> {arguments["network"]}.{arguments["target"]}' ) relay.networks[arguments['network']][ arguments['target']]['buffer'].insert( 0, { 'id': arguments['id'], 'originator': arguments['originator'], 'data': arguments['data'], 'type': 'resp' }) return_packet = '{}' dat = base64.urlsafe_b64encode( fernet.encrypt(return_packet.encode('utf-8'))) + b'\n' writer.write(dat) await writer.drain() writer.close() return
def to_stream(self, writer: asyncio.StreamWriter): writer.write(self.to_bytes()) yield from writer.drain() self.protocol_ts = datetime.now()
async def to_stream(self, writer: asyncio.StreamWriter): writer.write(self.to_bytes()) await writer.drain()
async def _send_request(writer: asyncio.StreamWriter, block: Block) -> None: writer.write( messages.Request.encode(block.index, block.offset, block.length)) await writer.drain()
async def _send_interested(writer: asyncio.StreamWriter) -> None: writer.write(messages.Interested.encode()) await writer.drain()
async def _send_handshake(writer: asyncio.StreamWriter, info_hash: bytes, peer_id: bytes) -> None: writer.write(messages.Handshake.encode(info_hash, peer_id)) await writer.drain()
async def negotiate_socks5_userpass( reader: asyncio.StreamReader, writer: asyncio.StreamWriter, host: Union[str, ipaddress.IPv4Address, ipaddress.IPv6Address], port: int, args: Optional[Dict[str, str]], ) -> None: if args: args_bytes = encode_args(args) if len(args_bytes) > 255 * 2: raise ValueError('Encoded args too long') username = args_bytes[:255] password = args_bytes[255:] if not password: password = b'\0' writer.write(b'\x05\x01' # SOCKS5, 1 auth method + enums.SOCKS5AuthType.USERNAME_PASSWORD) buf = await reader.readexactly(2) assert buf[0] == 5, 'Invalid server SOCKS version' if buf[1:2] != enums.SOCKS5AuthType.USERNAME_PASSWORD: raise RuntimeError( f'PT rejected userpass auth method, returned {buf[1:2]!r}') writer.write(b''.join(( b'\x01', # userpass sub-negotiation version 1 len(username).to_bytes(1, 'big'), username, len(password).to_bytes(1, 'big'), password, ))) buf = await reader.readexactly(2) assert buf[0] == 1, 'Invalid server USERPASS sub-negotiation version' if buf[1] != 0: raise RuntimeError( f'PT rejected username/password, returned {buf[1:2]!r}') else: writer.write(b'\x05\x01' # SOCKS5, 1 auth method + enums.SOCKS5AuthType.NO_AUTH) buf = await reader.readexactly(2) assert buf[0] == 5, 'Invalid server SOCKS version' if buf[1:2] != enums.SOCKS5AuthType.NO_AUTH: raise RuntimeError( f'PT rejected noauth auth method, returned {buf[1:2]!r}') try: host = ipaddress.ip_address(host) except ValueError: host_type = enums.SOCKS5AddressType.DOMAIN_NAME host_bytes = host.encode('idna') host_len = len(host_bytes) if host_len > 255: raise ValueError('Hostname too long') host_bytes = host_len.to_bytes(1, 'big') + host_bytes else: if host.version == 6: host_type = enums.SOCKS5AddressType.IPV6_ADDRESS else: host_type = enums.SOCKS5AddressType.IPV4_ADDRESS host_bytes = host.packed writer.write(b''.join(( b'\x05', # SOCKS5 enums.SOCKS5Command.CONNECT, b'\0', # reserved host_type, host_bytes, port.to_bytes(2, 'big'), ))) # buf = version, reply, reserved, addr_type, 1st byte of address buf = await reader.readexactly(5) assert buf[0] == 5, 'Invalid server SOCKS version' reply = enums.SOCKS5Reply(buf[1:2]) if reply is not enums.SOCKS5Reply.SUCCESS: raise exceptions.PTSOCKS5ConnectError(reply) assert buf[2] == 0, 'Invalid RSV field' bind_addr_type = enums.SOCKS5AddressType(buf[3:4]) if bind_addr_type is enums.SOCKS5AddressType.IPV4_ADDRESS: # consume remaining address and port in one call to readexactly() await reader.readexactly(-1 + 4 + 2) elif bind_addr_type is enums.SOCKS5AddressType.IPV6_ADDRESS: await reader.readexactly(-1 + 16 + 2) else: await reader.readexactly(buf[4] + 2)
async def telnet_server(reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: global active_sessions, live_session while True: data = await reader.readline() if not data: break try: data_str = data.decode("utf-8") except UnicodeDecodeError as e: print(e) continue parts = data_str.rstrip().split(" ") print(parts) if parts[0] == "Q": result: Dict[str, Dict[str, str]] = {} for sid, sess in active_sessions.items(): result[sid] = sess.to_dict() writer.write((json.dumps({ "live": live_session.to_dict() if live_session is not None else None, "active": result, }) + "\r\n").encode("utf-8")) elif parts[0] == "SEL": sid = parts[1] if sid == "NUL": if live_session is not None: await live_session.deactivate() live_session = None print("OKAY") writer.write("OKAY\r\n".encode("utf-8")) else: print("WONT no_live_session") writer.write("WONT no_live_session\r\n".encode("utf-8")) else: if sid not in active_sessions: print("WONT no_such_session") writer.write("WONT no_such_session\r\n".encode("utf-8")) else: session = active_sessions[sid] if session is None: print("OOPS no_such_session") writer.write( "OOPS no_such_session\r\n".encode("utf-8")) elif live_session is not None and live_session.connection_id == sid: print("WONT already_live") writer.write("WONT already_live\r\n".encode("utf-8")) else: if live_session is not None: await live_session.deactivate() await session.activate() live_session = session print("OKAY") writer.write("OKAY\r\n".encode("utf-8")) else: writer.write("WHAT\r\n".encode("utf-8")) await writer.drain() writer.close()
async def _write_data(stream: StreamWriter, data: str): stream.write(data.encode("UTF-8")) stream.close()
async def to_stream(self, writer: asyncio.StreamWriter): writer.write(self.to_bytes()) await writer.drain() self.protocol_ts = datetime.now()
async def execute(writer: asyncio.StreamWriter, key, value): MARCONA_CACHE[key] = value writer.write(simple_string_response(b'OCK'))
async def write_error(writer: asyncio.StreamWriter, message: str) -> None: json_error = json.dumps({'error': message}) + '\n' writer.write(json_error.encode()) await writer.drain()
async def handle_request(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: # listen for queries on the request socket data = await reader.read(300) request = json.loads(data.decode("utf-8")) # logging addr = writer.get_extra_info("peername") logging.info(f"Answering request from {addr}") try: reqtype = request["type"] if reqtype == "cmd": reqcmd = request["cmd"] if reqcmd == "CMD_START" or reqcmd == "CMD_STOP": # temporary, since CMD_START and CMD_STOP are now deprecated reqcmdtype = "GENERAL" # fake a general command logging.warning( "CMD_START AND CMD_STOP are deprecated and will be removed in a future release." ) reqcmd = reqcmd.split("_")[1] else: reqcmdtype = request["cmdtype"] reqparam = request["param"] if request[ "param"] is not None else 0 command = CommandFormat( cmdType=CMD_TYPE[reqcmdtype].value, cmdCode=CMD_MAP[reqcmdtype].value[reqcmd].value, param=reqparam) self._lli.writePayload(command) # processed and sent to controller, send ack to GUI since it's in enum payload = {"type": "ack"} elif reqtype == "broadcast": # ignore for the minute pass elif reqtype == "alarm": # acknowledgement of alarm from gui try: # delete alarm if it exists with self._dblock: self._alarms.remove(request["ack"]) payload = {"type": "ack"} except NameError as e: raise HEVPacketError( f"Alarm could not be removed. May have been removed already. {e}" ) else: raise HEVPacketError(f"Invalid request type") packet = json.dumps(payload).encode() # send reply and close connection writer.write(packet) await writer.drain() writer.close() except (NameError, KeyError, HEVPacketError) as e: # invalid request: reject immediately logging.warning(f"Invalid packet: {e}") payload = {"type": "nack"} packet = json.dumps(payload).encode() writer.write(packet) await writer.drain() writer.close()
async def connection_loop(execute_rpc: Callable[[Any], Any], reader: asyncio.StreamReader, writer: asyncio.StreamWriter, logger: logging.Logger, cancel_token: CancelToken) -> None: # TODO: we should look into using an io.StrinIO here for more efficient # writing to the end of the string. raw_request = '' while True: request_bytes = b'' try: request_bytes = await wait_with_token(reader.readuntil(b'}'), token=cancel_token) except asyncio.LimitOverrunError as e: logger.info( "Client request was too long. Erasing buffer and restarting..." ) request_bytes = await wait_with_token(reader.read(e.consumed), token=cancel_token) await wait_with_token(write_error( writer, "reached limit: %d bytes, starting with '%s'" % ( e.consumed, request_bytes[:20], ), ), token=cancel_token) continue raw_request += request_bytes.decode() bad_prefix, raw_request = strip_non_json_prefix(raw_request) if bad_prefix: logger.info("Client started request with non json data: %r", bad_prefix) await wait_with_token( write_error(writer, 'Cannot parse json: ' + bad_prefix), token=cancel_token, ) try: request = json.loads(raw_request) except json.JSONDecodeError: # invalid json request, keep reading data until a valid json is formed logger.debug("Invalid JSON, waiting for rest of message: %r", raw_request) continue # reset the buffer for the next message raw_request = '' if not request: logger.debug("Client sent empty request") await wait_with_token( write_error(writer, 'Invalid Request: empty'), token=cancel_token, ) continue try: result = execute_rpc(request) except Exception as e: logger.exception("Unrecognized exception while executing RPC") await wait_with_token( write_error(writer, "unknown failure: " + str(e)), token=cancel_token, ) else: writer.write(result.encode()) await wait_with_token(writer.drain(), token=cancel_token)
def send_line_to_writer(writer: asyncio.StreamWriter, line): print('->', line) writer.write(line.encode('utf-8') + b'\r\n')
async def write(writer: asyncio.StreamWriter, payload: Any): """ Send a regular message or an exception through the stream """ message = Message(payload) writer.write(message.pack()) await writer.drain()
async def _receive_handshake(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: self.logger.debug("Receiving handshake...") # Use reader to read the auth_init msg until EOF msg = await wait_with_token( reader.read(ENCRYPTED_AUTH_MSG_LEN), token=self.cancel_token, ) # Use decode_authentication(auth_init_message) on auth init msg try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, self.privkey) # Try to decode as EIP8 except DecryptionError: msg_size = big_endian_to_int(msg[:2]) remaining_bytes = msg_size - ENCRYPTED_AUTH_MSG_LEN + 2 msg += await wait_with_token( reader.read(remaining_bytes), token=self.cancel_token, ) ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, self.privkey) # Get remote's address: IPv4 or IPv6 ip, socket, *_ = writer.get_extra_info("peername") remote_address = Address(ip, socket) # Create `HandshakeResponder(remote: kademlia.Node, privkey: datatypes.PrivateKey)` instance initiator_remote = Node(initiator_pubkey, remote_address) responder = HandshakeResponder(initiator_remote, self.privkey) # Call `HandshakeResponder.create_auth_ack_message(nonce: bytes)` to create the reply responder_nonce = secrets.token_bytes(HASH_LEN) auth_ack_msg = responder.create_auth_ack_message(nonce=responder_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) # Use the `writer` to send the reply to the remote writer.write(auth_ack_ciphertext) await writer.drain() # Call `HandshakeResponder.derive_shared_secrets()` and use return values to create `Peer` aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce=initiator_nonce, responder_nonce=responder_nonce, remote_ephemeral_pubkey=ephem_pubkey, auth_init_ciphertext=msg, auth_ack_ciphertext=auth_ack_ciphertext) # Create and register peer in peer_pool peer = self.peer_class(remote=initiator_remote, privkey=self.privkey, reader=reader, writer=writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, chaindb=self.chaindb, network_id=self.network_id) await self.do_p2p_handshake(peer)
def write_long(writer: asyncio.StreamWriter, v: int): writer.write(struct.pack('>q', v))
async def login(reader: StreamReader, writer: StreamWriter, name: str) -> bool: global mysql_pool if mysql_pool is None: writer.write(b"database error\n") return False bitsize = 256 str_len = bitsize // 4 + 2 client_eph: int server_eph: int writer.write(b"exchange\n") client_eph_str = await reader.readline() client_eph_str = client_eph_str.strip() if len(client_eph_str) != str_len: writer.write(b"too short\n") return False try: client_eph = int(client_eph_str, 16) except ValueError: writer.write(b"not a number\n") return False server_eph = randbits(bitsize) server_eph_str = hex(server_eph).encode() writer.write(server_eph_str + b"\n") data = hex(server_eph * client_eph).encode() signature = b64decode(await reader.readline()) async with mysql_pool.acquire() as conn: async with conn.cursor() as cur: await cur.execute("select certificate from users where name = %s", (name, )) (public_key, ) = await cur.fetchone() try: key = crypto.load_certificate(crypto.FILETYPE_PEM, public_key.encode()) crypto.verify(key, signature, data, "sha256") except crypto.Error as e: print(f"Verification failed: {e}") writer.write(b"fail\n") writer.write_eof() return False writer.write(b"ok\n") return True
def write_bytes(writer: asyncio.StreamWriter, b: bytes): n = len(b) write_int(writer, n) writer.write(b)
async def server_handler(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): assert await reader.readuntil(b"\r\n\r\n") == b"GET /hello HTTP/1.1\r\n\r\n" writer.write(b"HTTP/1.1 204 No Content\r\n\r\n") await writer.drain() writer.close()
def fdms_session(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): online = None ''':type: (FdmsHeader, FdmsTransaction)''' add_on = None ''':type: (FdmsHeader, FdmsTransaction)''' offline = list() writer.write(bytes((ENQ,))) yield from writer.drain() while True: # Get Request attempt = 0 while True: try: if attempt > 4: return request = yield from asyncio.wait_for(read_fdms_packet(reader), timeout=15.0) if len(request) == 0: return control_byte = request[0] if control_byte == STX: lrs = functools.reduce(lambda x, y: x ^ int(y), request[2:-1], int(request[1])) if lrs != request[-1]: raise ValueError('LRS sum') pos, header = parse_header(request) txn = header.create_txn() txn.parse(request[pos:-2]) if header.txn_type == FdmsTransactionType.Online.value: if online is None: online = (header, txn) else: add_on = (header, txn) else: offline.append((header, txn)) if header.protocol_type == '2': break # Respond with ACK attempt = 0 writer.write(bytes((ACK,))) elif control_byte == EOT: break # Close session except asyncio.TimeoutError: return # Respond with NAK except Exception as e: logging.getLogger(LOG_NAME).debug('Request error: %s', str(e)) attempt += 1 writer.write(bytes((NAK,))) yield from writer.drain() if online is None: return # Process Transactions & Send Response for txn in offline: rs = process_txn(txn) offline.clear() if add_on is not None: process_add_on_txn(online, add_on) add_on = None rs = process_txn(online) # Send Response rs_bytes = rs.response() if rs.action_code == FdmsActionCode.HostSpecificPoll or rs.action_code == FdmsActionCode.RevisionInquiry: writer.write(rs_bytes) yield from writer.drain() else: attempt = 0 while True: if attempt >= 4: return writer.write(rs_bytes) yield from writer.drain() control_byte = 0 try: while True: rs_head = yield from asyncio.wait_for(reader.read(1), timeout=4.0) if len(rs_head) == 0: return control_byte = rs_head[0] & 0x7f if control_byte == ACK: break elif control_byte == NAK: break # Close session except asyncio.TimeoutError as e: return if control_byte == ACK: break else: attempt += 1 if online[0].wcc in {'B', 'C'}: # Send ENQ writer.write(bytes((ENQ,))) yield from writer.drain() continue else: break writer.write(bytes((EOT,))) yield from writer.drain() if writer.can_write_eof(): writer.write_eof()
async def receive_connection(cls, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, private_key: datatypes.PrivateKey, token: CancelToken) -> TransportAPI: try: msg = await token.cancellable_wait( reader.readexactly(ENCRYPTED_AUTH_MSG_LEN), timeout=REPLY_TIMEOUT, ) except asyncio.IncompleteReadError as err: raise HandshakeFailure from err try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, private_key, ) except DecryptionError as non_eip8_err: # Try to decode as EIP8 msg_size = big_endian_to_int(msg[:2]) remaining_bytes = msg_size - ENCRYPTED_AUTH_MSG_LEN + 2 try: msg += await token.cancellable_wait( reader.readexactly(remaining_bytes), timeout=REPLY_TIMEOUT, ) except asyncio.IncompleteReadError as err: raise HandshakeFailure from err try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, private_key, ) except DecryptionError as eip8_err: raise HandshakeFailure( f"Failed to decrypt both EIP8 handshake: {eip8_err} and " f"non-EIP8 handshake: {non_eip8_err}") else: got_eip8 = True else: got_eip8 = False peername = writer.get_extra_info("peername") if peername is None: socket = writer.get_extra_info("socket") sockname = writer.get_extra_info("sockname") raise HandshakeFailure( "Received incoming connection with no remote information:" f"socket={repr(socket)} sockname={sockname}") ip, socket, *_ = peername remote_address = Address(ip, socket) cls.logger.debug("Receiving handshake from %s", remote_address) initiator_remote = Node(initiator_pubkey, remote_address) responder = HandshakeResponder(initiator_remote, private_key, got_eip8, token) responder_nonce = secrets.token_bytes(HASH_LEN) auth_ack_msg = responder.create_auth_ack_message(responder_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) if writer.transport.is_closing() or reader.at_eof(): raise HandshakeFailure("Connection is closing") # Use the `writer` to send the reply to the remote writer.write(auth_ack_ciphertext) await token.cancellable_wait(writer.drain()) # Call `HandshakeResponder.derive_shared_secrets()` and use return values to create `Peer` aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce=initiator_nonce, responder_nonce=responder_nonce, remote_ephemeral_pubkey=ephem_pubkey, auth_init_ciphertext=msg, auth_ack_ciphertext=auth_ack_ciphertext) transport = cls( remote=initiator_remote, private_key=private_key, reader=reader, writer=writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, ) return transport
async def send_to_peer(connection: StreamWriter, message: Message) -> NoReturn: msg_serialized = message.serialize() connection.write(msg_serialized) await connection.drain()
async def _receive_handshake( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: msg = await self.wait( reader.read(ENCRYPTED_AUTH_MSG_LEN), timeout=REPLY_TIMEOUT) ip, socket, *_ = writer.get_extra_info("peername") remote_address = Address(ip, socket) self.logger.debug("Receiving handshake from %s", remote_address) got_eip8 = False try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, self.privkey) except DecryptionError: # Try to decode as EIP8 got_eip8 = True msg_size = big_endian_to_int(msg[:2]) remaining_bytes = msg_size - ENCRYPTED_AUTH_MSG_LEN + 2 msg += await self.wait( reader.read(remaining_bytes), timeout=REPLY_TIMEOUT) try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, self.privkey) except DecryptionError as e: self.logger.debug("Failed to decrypt handshake: %s", e) return initiator_remote = Node(initiator_pubkey, remote_address) responder = HandshakeResponder(initiator_remote, self.privkey, got_eip8, self.cancel_token) responder_nonce = secrets.token_bytes(HASH_LEN) auth_ack_msg = responder.create_auth_ack_message(responder_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) # Use the `writer` to send the reply to the remote writer.write(auth_ack_ciphertext) await self.wait(writer.drain()) # Call `HandshakeResponder.derive_shared_secrets()` and use return values to create `Peer` aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce=initiator_nonce, responder_nonce=responder_nonce, remote_ephemeral_pubkey=ephem_pubkey, auth_init_ciphertext=msg, auth_ack_ciphertext=auth_ack_ciphertext ) # Create and register peer in peer_pool peer = self.peer_class( remote=initiator_remote, privkey=self.privkey, reader=reader, writer=writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, headerdb=self.headerdb, network_id=self.network_id, inbound=True, ) if self.peer_pool.is_full: peer.disconnect(DisconnectReason.too_many_peers) elif not self.peer_pool.is_valid_connection_candidate(peer.remote): peer.disconnect(DisconnectReason.useless_peer) total_peers = len(self.peer_pool.connected_nodes) inbound_peer_count = len([ peer for peer in self.peer_pool.connected_nodes.values() if peer.inbound ]) if total_peers > 1 and inbound_peer_count / total_peers > DIAL_IN_OUT_RATIO: # make sure to have at least 1/4 outbound connections peer.disconnect(DisconnectReason.too_many_peers) else: # We use self.wait() here as a workaround for # https://github.com/ethereum/py-evm/issues/670. await self.wait(self.do_handshake(peer))
async def _receive_handshake( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter ) -> None: msg = await self.wait( reader.read(ENCRYPTED_AUTH_MSG_LEN), timeout=REPLY_TIMEOUT ) ip, socket, *_ = writer.get_extra_info("peername") remote_address = Address(ip, socket) self.logger.debug("Receiving handshake from %s", remote_address) got_eip8 = False try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, self.privkey ) except DecryptionError: # Try to decode as EIP8 got_eip8 = True msg_size = big_endian_to_int(msg[:2]) remaining_bytes = msg_size - ENCRYPTED_AUTH_MSG_LEN + 2 msg += await self.wait(reader.read(remaining_bytes), timeout=REPLY_TIMEOUT) try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, self.privkey ) except DecryptionError as e: self.logger.debug("Failed to decrypt handshake: %s", e) return initiator_remote = Node(initiator_pubkey, remote_address) responder = HandshakeResponder( initiator_remote, self.privkey, got_eip8, self.cancel_token ) responder_nonce = os.urandom(HASH_LEN) auth_ack_msg = responder.create_auth_ack_message(responder_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) # Use the `writer` to send the reply to the remote writer.write(auth_ack_ciphertext) await self.wait(writer.drain()) # Call `HandshakeResponder.derive_shared_secrets()` and use return values to create `Peer` aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce=initiator_nonce, responder_nonce=responder_nonce, remote_ephemeral_pubkey=ephem_pubkey, auth_init_ciphertext=msg, auth_ack_ciphertext=auth_ack_ciphertext, ) connection = PeerConnection( reader=reader, writer=writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, ) # Create and register peer in peer_pool peer = self.peer_pool.get_peer_factory().create_peer( remote=initiator_remote, connection=connection, inbound=True ) if ( peer.remote in self.peer_pool.connected_nodes or peer.remote.pubkey in self.peer_pool.dialedout_pubkeys ): self.logger.debug("already connected or dialed, disconnecting...") await peer.disconnect(DisconnectReason.already_connected) return if self.peer_pool.is_full: await peer.disconnect(DisconnectReason.too_many_peers) return elif NO_SAME_IP and not self.peer_pool.is_valid_connection_candidate( peer.remote ): await peer.disconnect(DisconnectReason.useless_peer) return total_peers = len(self.peer_pool) inbound_peer_count = len( [peer for peer in self.peer_pool.connected_nodes.values() if peer.inbound] ) if ( total_peers > 1 and inbound_peer_count / total_peers > self.allow_dial_in_ratio ): # make sure to have at least (1-allow_dial_in_ratio) outbound connections out of total connections await peer.disconnect(DisconnectReason.too_many_peers) else: # We use self.wait() here as a workaround for # https://github.com/ethereum/py-evm/issues/670. await self.wait(self.do_handshake(peer))
def to_stream(self, writer: asyncio.StreamWriter): writer.write(self.to_bytes()) yield from writer.drain()
async def send_data(writer: StreamWriter, msg: str): writer.write(f"{msg}\n".encode()) await writer.drain() logging.debug(f":sent:{msg}")
def write_int(writer: asyncio.StreamWriter, v: int): writer.write(struct.pack('>i', v))
async def sendMessage(writer: asyncio.StreamWriter, msg: Dict[str, Any]) -> None: ''' Encode and send a message to the server. ''' data = json.dumps(msg) print('\n<{!s} sending {!r}>\n'.format(datetime.datetime.now(), data)) writer.write(data.encode('utf-8') + b'\n') await writer.drain()