def _read_stream(self, stream: asyncio.StreamReader) -> None: """ :param stream: asyncio.StreamReader :return: None """ while not stream.at_eof(): data = yield from stream.read(1) if data == b'\n': self.__new_element() else: self.__update_last_item_text(data)
async def proxy(self, client_reader: StreamReader, client_writer: StreamWriter, remote_reader: StreamReader, remote_writer: StreamWriter): client_read_task = create_task(client_reader.read(READ_BYTES_DEFAULT)) remote_read_task = create_task(remote_reader.read(READ_BYTES_DEFAULT)) while client_read_task and remote_read_task: done, pending = await asyncio.wait( {client_read_task, remote_read_task}, return_when=FIRST_COMPLETED) if client_read_task in done: client_read_task = await self._proxy_connection( in_read=client_read_task, out_read=remote_read_task, in_reader=client_reader, out_writer=remote_writer) if remote_read_task in done: remote_read_task = await self._proxy_connection( in_read=remote_read_task, out_read=client_read_task, in_reader=remote_reader, out_writer=client_writer) if client_read_task: client_read_task.cancel() if remote_read_task: remote_read_task.cancel() remote_writer.close()
async def _handshake( initiator: "HandshakeInitiator", reader: asyncio.StreamReader, writer: asyncio.StreamWriter, token: CancelToken, ) -> Tuple[bytes, bytes, keccak_256, keccak_256]: """See the handshake() function above. This code was factored out into this helper so that we can create Peers with directly connected readers/writers for our tests. """ initiator_nonce = keccak(os.urandom(HASH_LEN)) auth_msg = initiator.create_auth_message(initiator_nonce) auth_init = initiator.encrypt_auth_message(auth_msg) writer.write(auth_init) auth_ack = await token.cancellable_wait( reader.read(ENCRYPTED_AUTH_ACK_LEN), timeout=REPLY_TIMEOUT) if reader.at_eof(): # This is what happens when Parity nodes have blacklisted us # (https://github.com/ethereum/py-evm/issues/901). raise HandshakeFailure("%s disconnected before sending auth ack", repr(initiator.remote)) ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message( auth_ack) aes_secret, mac_secret, egress_mac, ingress_mac = initiator.derive_secrets( initiator_nonce, responder_nonce, ephemeral_pubkey, auth_init, auth_ack) return aes_secret, mac_secret, egress_mac, ingress_mac
async def client_loop(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): client_addr = writer.get_extra_info('peername') _LOGGER.debug(f'client {client_addr} connected') # Register broadcast writer self.clients[client_addr] = writer try: while True: try: # Just read and forgett, we only send data anyways # We need to do a read whonsi a while to catch client disconnects await asyncio.wait_for(reader.read(), timeout=0.1) except asyncio.exceptions.TimeoutError: pass # writer.write(b'BOO\n') await asyncio.sleep(1) except BrokenPipeError: # Client disconnected, abort task pass except Exception as E: _LOGGER.error(E) _LOGGER.error(traceback.format_exc()) finally: _LOGGER.info('Client disconnected') del self.clients[client_addr]
async def handle_connection(reader: StreamReader, writer: StreamWriter) -> None: data = "" while True: try: chunk = await asyncio.wait_for(reader.read(256), timeout=2) if chunk == b'': break data += chunk.decode('UTF-8') except concurrent.futures.TimeoutError: break print(data) data, body = data.split('\r\n\r\n', 1) request, headers = data.split('\r\n', 1) headers = parse_headers(headers) headers['method'], headers['path'], headers['protocol'] = request.split() headers['body'] = body print(headers) response = "" if headers['path'] == "/sendMessage": response = send_message(headers['body']) elif headers['path'] == "/getMessage": response = get_message() elif headers['path'] == "/findMessages": response = find_messages(headers['body']) writer.write(response.encode('UTF-8')) await writer.drain() writer.close()
async def multi_read(reader: asyncio.StreamReader, target: Target) -> Tuple[bool, Any]: count_size = target.max_size try: data = b'' while True: try: future_reader = reader.read(count_size) _data = await asyncio.wait_for(future_reader, timeout=0.5) if _data: data += _data count_size = count_size - len(data) else: break if count_size <= 0: break except Exception: break if len(data) == 0: return False, create_error_template(target, 'empty') else: return True, data except Exception as e: return False, create_error_template(target, str(e))
async def _handshake( initiator: 'HandshakeInitiator', reader: asyncio.StreamReader, writer: asyncio.StreamWriter, token: CancelToken, ) -> Tuple[bytes, bytes, sha3.keccak_256, sha3.keccak_256]: """See the handshake() function above. This code was factored out into this helper so that we can create Peers with directly connected readers/writers for our tests. """ initiator_nonce = keccak(os.urandom(HASH_LEN)) auth_msg = initiator.create_auth_message(initiator_nonce) auth_init = initiator.encrypt_auth_message(auth_msg) writer.write(auth_init) auth_ack = await wait_with_token(reader.read(ENCRYPTED_AUTH_ACK_LEN), token=token, timeout=REPLY_TIMEOUT) ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message( auth_ack) aes_secret, mac_secret, egress_mac, ingress_mac = initiator.derive_secrets( initiator_nonce, responder_nonce, ephemeral_pubkey, auth_init, auth_ack) return aes_secret, mac_secret, egress_mac, ingress_mac
async def reader_cycle(reader_: StreamReader, client_name: str): """ The function listens to incoming messages from the concrete client. Then the message is put to the Queue of receiver in address_storage :param reader_: stream receives incoming messages from the client :param client_name: the name of the client we receive message in the stream from :return: None """ while True: try: message = await asyncio.wait_for( reader_.read(1024), timeout=0.5) # wait 0.5 seconds for incoming message except TimeoutError: # otherwise raise error to return control to writer Task # it is the desired behaviour so just pass pass else: if message: message = message.decode('utf8') result = message.split("##") if len(result ) == 2: # check if incoming message have correct format to_whom, words = result # build message to send to Queue of receiver # '|' is used to separate messages from each other, since several senders can send message # to one clients simultaneously and client need some mechanism to separate these messages # from one another. mes = f"{client_name}##{words}|" # client_name is the name of sender in the case # now put the message in a right place in our pseudo db storage if to_whom in address_storage.keys(): address_storage[to_whom].put(mes)
async def connection_loop(execute_rpc: Callable[[Any], Any], reader: asyncio.StreamReader, writer: asyncio.StreamWriter, logger: logging.Logger, cancel_token: CancelToken) -> None: # TODO: we should look into using an io.StrinIO here for more efficient # writing to the end of the string. raw_request = '' while True: request_bytes = b'' try: request_bytes = await cancel_token.cancellable_wait(reader.readuntil(b'}')) except asyncio.LimitOverrunError as e: logger.info("Client request was too long. Erasing buffer and restarting...") request_bytes = await cancel_token.cancellable_wait(reader.read(e.consumed)) await cancel_token.cancellable_wait(write_error( writer, f"reached limit: {e.consumed} bytes, starting with '{request_bytes[:20]!r}'", )) continue raw_request += request_bytes.decode() bad_prefix, raw_request = strip_non_json_prefix(raw_request) if bad_prefix: logger.info("Client started request with non json data: %r", bad_prefix) await cancel_token.cancellable_wait( write_error(writer, 'Cannot parse json: ' + bad_prefix), ) try: request = json.loads(raw_request) except json.JSONDecodeError: # invalid json request, keep reading data until a valid json is formed logger.debug("Invalid JSON, waiting for rest of message: %r", raw_request) continue # reset the buffer for the next message raw_request = '' if not request: logger.debug("Client sent empty request") await cancel_token.cancellable_wait( write_error(writer, 'Invalid Request: empty'), ) continue try: result = await execute_rpc(request) except Exception as e: logger.exception("Unrecognized exception while executing RPC") await cancel_token.cancellable_wait( write_error(writer, "unknown failure: " + str(e)), ) else: writer.write(result.encode()) await cancel_token.cancellable_wait(writer.drain())
def read_next_packet(reader: asyncio.StreamReader, ensure_ack=False) -> pb.Packet: tpl = struct.Struct('>I') data = yield from reader.read(tpl.size) data_len = tpl.unpack(data)[0] data = yield from reader.read(data_len) pack = pb.Packet.create_from_bytes(data) if ensure_ack: if pack.type != pb.PacketType.ACK: raise pb.CanalException( 'unexpected packet type when ack is expected') ack = pb.Ack.create_from_bytes(pack.body) if ack.error_code > 0: raise pb.CanalException(ack.error_message) return pack
def from_stream(cls, reader: asyncio.StreamReader, fixed_header: MQTTFixedHeader, variable_header: MQTTVariableHeader): data = bytearray() data_length = fixed_header.remaining_length - variable_header.bytes_length length_read = 0 while length_read < data_length: buffer = yield from reader.read(data_length - length_read) data.extend(buffer) length_read = len(data) return cls(data)
async def wrapped_recv( reader: asyncio.StreamReader) -> Optional[bytes]: if timeout is None: return await reader.read(1024) else: try: return await asyncio.wait_for(reader.read(1024), max(timeout, 0.0001)) except asyncio.TimeoutError: return None
async def single_read(reader: asyncio.StreamReader, target: Target, custom_max_size: int = 0, operation_description: str = '') -> Tuple[bool, Any]: # region old if not custom_max_size: future_reader = reader.read(target.max_size) else: future_reader = reader.read(custom_max_size) try: # через asyncio.wait_for - задаем время на чтение из # соединения data = await asyncio.wait_for(future_reader, timeout=target.read_timeout) return True, data except Exception as e: result = create_error_template(target, str(e), description=operation_description) return False, result
async def single_read(reader: asyncio.StreamReader, target: Target) -> Tuple[bool, Any]: # region old future_reader = reader.read(target.max_size) try: # через asyncio.wait_for - задаем время на чтение из # соединения data = await asyncio.wait_for(future_reader, timeout=target.read_timeout) return True, data except Exception as e: result = create_error_template(target, str(e)) return False, result
async def wrapped_recv( reader: asyncio.StreamReader) -> Optional[bytes]: read_coroutine = reader.read(1024) if timeout is None: return await read_coroutine else: try: return await asyncio.wait_for(read_coroutine, timeout) except (asyncio.TimeoutError, ssl.SSLError, ssl.SSLWantReadError): return None
async def respond_to_nonce_with_signature(reader: asyncio.StreamReader, writer: asyncio.StreamWriter, private_key: int, timeout: 'seconds' = DEFAULT_TIMEOUT): noncebytes = await asyncio.wait_for(reader.read(32), timeout) nonce = int.from_bytes(noncebytes, byteorder='big') logging.debug('got nonce: {:064x}'.format(nonce)) signature = util.sign_with_key(noncebytes, private_key, hash=None) logging.debug('sending nonce signature rsv ({:064x}, {:064x}, {:02x})'.format(*signature)) writer.write(util.signature_to_bytes(signature))
def handle_stdin(self, reader: asyncio.StreamReader): """ Handle messages from the agent """ try: while not reader.at_eof(): buf = bytearray() while len(buf) != 4: buf += yield from reader.read(4 - len(buf)) length = struct.unpack('I', bytes(buf))[0] buf = bytearray() while len(buf) != length: buf += yield from reader.read(length - len(buf)) message = msgpack.unpackb(bytes(buf), encoding="utf8", use_list=False) yield from self.handle_stdin_message(message) except asyncio.CancelledError: return except KeyboardInterrupt: return except: self._logger.exception("Exception occured while reading stdin") os._exit(1) # DIE!
async def __process_request(self, reader: StreamReader, writer: StreamWriter): try: dst_address = self.__get_dst_address(writer) except InvalidClientAddress as error: log.debug('Error %r occurred getting client address.', str(error)) writer.transport.abort() return except Exception as error: log.exception( 'Unexpected error %r occurred getting client information.', str(error)) return log.debug('Got destination address %s.', dst_address) try: first_chunk = await wait_for(reader.read(n=1), IO_TIMEOUT) log.debug('First chunk %r.', first_chunk) if first_chunk == b'\x16': await self.__process_https(dst_address, first_chunk, reader, writer) elif first_chunk.isalpha(): await self.__process_http(first_chunk, reader, writer) else: log.debug('Unknown protocol, first chunk %r.', first_chunk) await self.__process_unknown(dst_address, first_chunk, reader, writer) except CancelledError: log.debug('Task has been cancelled.') writer.transport.abort() raise except (AsyncTimeoutError, TimeoutError): log.debug('Timeout for request is over.') writer.transport.abort() except (BrokenPipeError, ConnectionResetError, IncompleteReadError): log.debug('Connection has been closed unexpectedly.') writer.transport.abort() except (OSError, gaierror) as error: if error.errno in (EAFNOSUPPORT, EPFNOSUPPORT): log.debug('Unsupported protocol family.') elif error.errno in (EAI_AGAIN, EHOSTDOWN, EHOSTUNREACH, ENETUNREACH): log.debug('Network or host is temporary unavailable.') else: log.exception( 'Unexpected OS error occurred processing request.') writer.transport.abort() except Exception: log.exception('Unexpected error occurred processing request.') writer.transport.abort() finally: writer.close()
def read_fdms_packet(reader: asyncio.StreamReader) -> bytes: buffer = bytearray() ba = yield from reader.read(1) b = ba[0] if b > 0x7f: b &= 0x7f buffer.append(b) if b == STX: got_etx = False while True: ba = yield from reader.read(1) b = ba[0] if b > 0x7f: b &= 0x7f buffer.append(b) if got_etx: break else: got_etx = (b == ETX) return buffer
async def _proxy_connection( self, in_read: Task, out_read: Task, in_reader: StreamReader, out_writer: StreamWriter) -> Optional[asyncio.Task]: data: bytes = in_read.result() if not data: out_read.cancel() return out_writer.write(data) await out_writer.drain() return asyncio.create_task(in_reader.read(512))
async def forward_stream(reader: StreamReader, writer: StreamWriter, event: asyncio.Event): buffer_size, timeout = 1024, 1 while not event.is_set(): try: data = await asyncio.wait_for(reader.read(buffer_size), timeout) except asyncio.TimeoutError: continue if data == b'': # when it closed event.set() break writer.write(data) # TODO: The case of writer is the closed stream await writer.drain()
async def pipe(self, reader: StreamReader): buffer = [] while True: try: value = await asyncio.wait_for(reader.read(1), 0.1) if not value: # The stream has ended, write out the buffer if it contains anything self._write_buffer(buffer) break buffer.append(value) if len(buffer) >= 1000: self._write_buffer(buffer) buffer.clear() except asyncio.TimeoutError: self._write_buffer(buffer) buffer.clear()
async def handle_connection(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): try: cliipaddr = writer.get_extra_info('peername') ownipaddr = writer.get_extra_info('sockname') logging.debug('{} <-- {}'.format(ownipaddr, cliipaddr)) protocol_indicator = await asyncio.wait_for(reader.read(4), timeout) if protocol_indicator == b'DKG ': await respond_to_nonce_with_signature(reader, writer, node.private_key, timeout) cliethaddr = await determine_address_via_nonce(reader, writer, timeout) if cliethaddr is None: logging.debug('(s) could not verify client signature; closing connection') return if cliethaddr not in accepted_addresses: logging.debug('(s) client address {:40x} not accepted'.format(cliethaddr)) return await establish_channel(cliethaddr, reader, writer, node) elif len(protocol_indicator) > 0: req = HTTPRequest(protocol_indicator + await asyncio.wait_for(reader.readline(), timeout), reader) contentlen = req.headers.get('Content-Length') if contentlen is not None: contentlen = int(contentlen) req.body = await reader.read(contentlen) res = JSONRPCResponseManager.handle(req.body, default_dispatcher) res_data = await get_response_data(res, timeout) db.Session.remove() if res_data is None: writer.write(b'HTTP/1.1 204 No Content\r\n\r\n') else: res_str = json.dumps(res_data, indent=2, sort_keys=True).encode('UTF-8') writer.write(b'HTTP/1.1 200 OK\r\n' b'Content-Type: application/json; charset=UTF-8\r\n' b'Content-Length: ') writer.write(str(len(res_str) + 1).encode('UTF-8')) writer.write(b'\r\n\r\n') writer.write(res_str) writer.write(b'\n') finally: writer.close()
class HttpBodyReader(): _expect_sent = None _waiting = None def __init__(self, headers, parser, transport, **kw): self.headers = headers self.parser = parser self.reader = StreamReader(**kw) self.reader.set_transport(transport) self.feed_data = self.reader.feed_data self.feed_eof = self.reader.feed_eof def waiting_expect(self): '''``True`` when the client is waiting for 100 Continue. ''' if self._expect_sent is None: if (not self.reader.at_eof() and self.headers.has('expect', '100-continue')): return True self._expect_sent = '' return False def can_continue(self): if self.waiting_expect(): if self.parser.get_version() < (1, 1): raise HttpException(status=417) else: msg = '%s 100 Continue\r\n\r\n' % http_protocol(self.parser) self._expect_sent = msg self.reader._transport.write(msg.encode(DEFAULT_CHARSET)) def fail(self): if self.waiting_expect(): raise HttpException(status=417) def read(self, n=-1): self.can_continue() return self.reader.read(n=n) def readline(self): self.can_continue() return self.reader.readline() def readexactly(self, n): self.can_continue() return self.reader.readexactly(n)
async def __process_https(self, dst_address: DstAddress, first_chunk: bytes, down_reader: StreamReader, down_writer: StreamWriter): request = await wait_for(down_reader.read(n=CHUNK_SIZE), IO_TIMEOUT) async def fallback(): up_reader, up_writer = await wait_for( open_connection(**dst_address.as_dict()), CONNECT_TIMEOUT) await self.__pump_traffic(up_reader, up_writer, down_reader, down_writer, first_chunk, request) try: host = await self.__get_server_name_indication(first_chunk + request) except CancelledError: raise except TlsProtocolError as error: log.debug( 'TLS error %r occurred connecting to %s, falling back to direct pumping traffic.', str(error), dst_address) await fallback() return except Exception: log.exception( 'Unexpected error occurred in TLS request to %s, falling back to pumping traffic.', dst_address) await fallback() return log.debug('Got HTTPS host %r.', host) if self.__rules.redirect_to_socks(host): log.debug('Redirecting HTTPS host %r@%d to SOCKS server.', host, dst_address.port) up_reader, up_writer = await self.__connect_socks_server( host, dst_address.port) else: log.debug('Processing HTTPS host %r as is (connecting to %s:%d).', host, host, dst_address.port) up_reader, up_writer = await wait_for( open_connection(host=host, port=dst_address.port), CONNECT_TIMEOUT) await self.__pump_traffic(up_reader, up_writer, down_reader, down_writer, first_chunk, request)
async def process_reader(reader: asyncio.StreamReader): try: command = await asyncio.wait_for(reader.read(1), CLIENT_CONNECTION_TIMEOUT) except asyncio.TimeoutError: raise RadishConnectionError("Timeout error") if not command: raise RadishConnectionError("Empty request") try: return await { b"-": _process_error, b":": _process_integer, b"$": _process_byte_string, b"+": _process_utf_string, b"*": _process_array, }[command](reader) except KeyError: raise RadishBadRequest("Bad first byte")
def QueueWaiter(reader: asyncio.StreamReader, writer: asyncio.StreamWriter, queue_name: str): """ A coroutine for waiting upon new items to be placed into the Queue. """ client = writer.get_extra_info("peername") sclient = ':'.join(str(_) for _ in client) while True: try: data = yield from reader.read(65536) except ConnectionResetError: rlogger.info("Client {} closed connection".format(sclient)) return if not data: rlogger.info("Client {} closed connection".format(sclient)) return # Unpack try: sub_data = msgpack.unpackb(data, encoding="utf-8") except (msgpack.UnpackException, ValueError) as e: rlogger.error("Recieved non-msgpack push from {}".format(sclient)) continue rlogger.debug("Recieved data from client {}: {}".format(sclient, sub_data)) assert isinstance(sub_data, dict) action = sub_data.get("action", -1) if not action == 0: rlogger.error("Recieved non-push action on push channel from client {} (action: {})" .format(sclient, action)) continue # Get data to place data = sub_data.get("data", None) if not data: rlogger.error("Recieved no data on push channel from client {}".format(sclient)) continue # Increment and get message number queues[queue_name][0] += 1 msgnum = queues[queue_name][0] queue = queues[queue_name][1] # Put it on the queue assert isinstance(queue, asyncio.Queue) yield from queue.put([msgnum, data]) # Respond to the client response = {"msgnum": msgnum, "status": 0} rlogger.debug("Sending response with message number {}".format(msgnum)) msgpack.pack(response, writer)
def connected_cb(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): """ A callback for connected clients. """ client = writer.get_extra_info("peername") sclient = ':'.join(str(_) for _ in client) logger.info("Recieved connection from {}:{}".format(*client)) # Read a subscription message. try: sub = yield from reader.read(65536) except ConnectionResetError: rlogger.info("Client {} closed connection".format(sclient)) return if not sub: logger.error("Client {} terminated connection abnormally".format(sclient)) return try: sub_data = msgpack.unpackb(sub) except (msgpack.UnpackException, ValueError) as e: logger.error("Recieved unknown subscription message from {}:{}".format(*client)) yield from writer.drain() writer.close() return # Get the data from the subscription message. if not b'queue' in sub_data: logger.error("Recieved null queue from {}".format(sclient)) yield from writer.drain() writer.close() return queue_to_sub = sub_data[b"queue"] action = sub_data.get(b"action", 0) queue_created = False if queue_to_sub not in queues: queues[queue_to_sub] = [0, asyncio.Queue()] logger.debug("Created queue {}".format(queue_to_sub)) queue_created = True logger.debug("Client {} subscribed to queue {} in mode {} ({})".format(sclient, queue_to_sub, action, "push" if not action else "pull")) if action == 0: loop.create_task(QueueWaiter(reader, writer, queue_to_sub)) else: loop.create_task(QueueSender(reader, writer, queue_to_sub)) msgpack.pack({"created": queue_created}, writer)
async def proxy(r: asyncio.StreamReader, w: asyncio.StreamWriter): while True: reader = self.loop.create_task(r.read(65536)) waiter = self.loop.create_task(broken.wait()) await asyncio.wait( [reader, waiter], return_when=asyncio.FIRST_COMPLETED, ) if waiter.done(): reader.cancel() w.close() break else: waiter.cancel() data = await reader if not data: w.close() break w.write(data)
async def __process_output(_out: asyncio.StreamReader, _output_callback: Callable): # Runs within proc_loop try: while True: buf = b'' line = None while line is None: try: # Handle an incomplete line output such as when # a command prompt leaves the input cursor at the end. c = await asyncio.wait_for(_out.read(1), 0.1) except asyncio.futures.TimeoutError: if buf: line = buf # except Exception as ex: # print("Exception", type(ex), ex, file=sys.stderr, flush=True) # pass else: buf += c if c == b'\n': line = buf # Handle EOF elif c == b'': line = buf if line: # First send whatever line we have left part = partial(_output_callback, line) asyncio.run_coroutine_threadsafe(output_callback_queue.put(part), parent_loop) # Then send a marker saying we're done part = partial(_output_callback, None) asyncio.run_coroutine_threadsafe(output_callback_queue.put(part), parent_loop) return if line: part = partial(_output_callback, line) asyncio.run_coroutine_threadsafe(output_callback_queue.put(part), parent_loop) else: break except Exception as ex: print("Error in __process_output:", ex.__class__.__name__, ex, file=sys.stderr, flush=True) traceback.print_tb(sys.exc_info()[2])
async def _handle_client(self, client_reader: StreamReader, client_writer: StreamWriter): peer = client_writer.get_extra_info('peername') rospy.loginfo(f"Client at {peer}") data = await asyncio.wait_for(client_reader.readexactly(34), timeout=30) public_key, model = _parse_header(data) self.sessions[peer] = { "public": public_key, "model": model, "buffer": bytearray(), "measurement": Measurement() } rospy.loginfo(f"Welcome to the party: ({public_key},{model})") try: while True: try: # Timeout is kinda big but it covers the maximum timeout for SDS011 sensor # and gets rid of dropped connections data = await asyncio.wait_for(client_reader.read(128), timeout=2000.0) self.sessions[peer]["buffer"].extend(data) except: rospy.logwarn("Timeout") return if data: status, measurement = self._parse_frame(peer) if status: self.sessions[peer]["measurement"] = measurement rospy.logdebug(measurement) else: rospy.logwarn("Received no data") # exit echo loop and disconnect return except Exception as e: rospy.logwarn(e)
async def determine_address_via_nonce(reader: asyncio.StreamReader, writer: asyncio.StreamWriter, timeout: 'seconds' = DEFAULT_TIMEOUT) -> int: # TODO: Make a nonce registry for extra security nonce = util.random.randrange(2**256) noncebytes = nonce.to_bytes(32, byteorder='big') logging.debug('sending nonce {:064x}'.format(nonce)) writer.write(nonce.to_bytes(32, byteorder='big')) rsv_bytes = await asyncio.wait_for(reader.read(65), timeout) signature = util.bytes_to_signature(rsv_bytes) logging.debug('received signature rsv ({:064x}, {:064x}, {:02x})'.format(*signature)) try: address = util.address_from_message_and_signature(noncebytes, signature, hash=None) except ValueError as err: logging.debug('could not recover address: {}'.format(err)) return None logging.debug('got address: {:040x}'.format(address)) return address
async def _handshake_shared_secret(reader: asyncio.StreamReader, writer: asyncio.StreamWriter, endpoint: EndPoint, keys, token): ephemeral_keys = ecies.generate_random() shared_secret = ecies.make_shared_secret(keys.private_bytes, endpoint.pubkey) random = os.urandom(16) random_secret = sha3_256(random).digest() sequence = pad32(int_to_big32(0)) exchange_secret = sha3_256(shared_secret + random_secret + sequence).digest() sig = ephemeral_keys.sign(exchange_secret) encode_sig = encode_signature(sig) # --- temporary version = pad16(int_to_big16(1)) handshake_suffix = pad16(int_to_big16(0)) # --------------------------------------------- # payload = encode_payload(sequence, encode_sig, keys.public_bytes, random_secret, version, handshake_suffix) cipher = ecies.encrypt(ephemeral_keys.public_bytes, payload) if writer.is_closing(): raise ConnectionError( 'during open_connection handling, connection closed') writer.write(cipher) await writer.drain() token.cancellable_wait(reader.read(169), timeout=5) if reader.at_eof(): raise ConnectionError('disconnected')
def fdms_session(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): online = None ''':type: (FdmsHeader, FdmsTransaction)''' add_on = None ''':type: (FdmsHeader, FdmsTransaction)''' offline = list() writer.write(bytes((ENQ,))) yield from writer.drain() while True: # Get Request attempt = 0 while True: try: if attempt > 4: return request = yield from asyncio.wait_for(read_fdms_packet(reader), timeout=15.0) if len(request) == 0: return control_byte = request[0] if control_byte == STX: lrs = functools.reduce(lambda x, y: x ^ int(y), request[2:-1], int(request[1])) if lrs != request[-1]: raise ValueError('LRS sum') pos, header = parse_header(request) txn = header.create_txn() txn.parse(request[pos:-2]) if header.txn_type == FdmsTransactionType.Online.value: if online is None: online = (header, txn) else: add_on = (header, txn) else: offline.append((header, txn)) if header.protocol_type == '2': break # Respond with ACK attempt = 0 writer.write(bytes((ACK,))) elif control_byte == EOT: break # Close session except asyncio.TimeoutError: return # Respond with NAK except Exception as e: logging.getLogger(LOG_NAME).debug('Request error: %s', str(e)) attempt += 1 writer.write(bytes((NAK,))) yield from writer.drain() if online is None: return # Process Transactions & Send Response for txn in offline: rs = process_txn(txn) offline.clear() if add_on is not None: process_add_on_txn(online, add_on) add_on = None rs = process_txn(online) # Send Response rs_bytes = rs.response() if rs.action_code == FdmsActionCode.HostSpecificPoll or rs.action_code == FdmsActionCode.RevisionInquiry: writer.write(rs_bytes) yield from writer.drain() else: attempt = 0 while True: if attempt >= 4: return writer.write(rs_bytes) yield from writer.drain() control_byte = 0 try: while True: rs_head = yield from asyncio.wait_for(reader.read(1), timeout=4.0) if len(rs_head) == 0: return control_byte = rs_head[0] & 0x7f if control_byte == ACK: break elif control_byte == NAK: break # Close session except asyncio.TimeoutError as e: return if control_byte == ACK: break else: attempt += 1 if online[0].wcc in {'B', 'C'}: # Send ENQ writer.write(bytes((ENQ,))) yield from writer.drain() continue else: break writer.write(bytes((EOT,))) yield from writer.drain() if writer.can_write_eof(): writer.write_eof()
async def _receive_handshake(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: msg = await self.wait(reader.read(ENCRYPTED_AUTH_MSG_LEN), timeout=REPLY_TIMEOUT) ip, socket, *_ = writer.get_extra_info("peername") remote_address = Address(ip, socket) self.logger.debug("Receiving handshake from %s", remote_address) got_eip8 = False try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, self.privkey) except DecryptionError: # Try to decode as EIP8 got_eip8 = True msg_size = big_endian_to_int(msg[:2]) remaining_bytes = msg_size - ENCRYPTED_AUTH_MSG_LEN + 2 msg += await self.wait(reader.read(remaining_bytes), timeout=REPLY_TIMEOUT) try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, self.privkey) except DecryptionError as e: self.logger.debug("Failed to decrypt handshake: %s", e) return initiator_remote = Node(initiator_pubkey, remote_address) responder = HandshakeResponder(initiator_remote, self.privkey, got_eip8, self.cancel_token) responder_nonce = secrets.token_bytes(HASH_LEN) auth_ack_msg = responder.create_auth_ack_message(responder_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) # Use the `writer` to send the reply to the remote writer.write(auth_ack_ciphertext) await self.wait(writer.drain()) # Call `HandshakeResponder.derive_shared_secrets()` and use return values to create `Peer` aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce=initiator_nonce, responder_nonce=responder_nonce, remote_ephemeral_pubkey=ephem_pubkey, auth_init_ciphertext=msg, auth_ack_ciphertext=auth_ack_ciphertext) # Create and register peer in peer_pool peer = self.peer_class( remote=initiator_remote, privkey=self.privkey, reader=reader, writer=writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, headerdb=self.headerdb, network_id=self.network_id, inbound=True, ) if self.peer_pool.is_full: peer.disconnect(DisconnectReason.too_many_peers) else: # We use self.wait() here as a workaround for # https://github.com/ethereum/py-evm/issues/670. await self.wait(self.do_handshake(peer))
async def _receive_handshake( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: msg = await self.wait_first( reader.read(ENCRYPTED_AUTH_MSG_LEN), timeout=REPLY_TIMEOUT) ip, socket, *_ = writer.get_extra_info("peername") remote_address = Address(ip, socket) self.logger.debug("Receiving handshake from %s", remote_address) try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, self.privkey) except DecryptionError: # Try to decode as EIP8 msg_size = big_endian_to_int(msg[:2]) remaining_bytes = msg_size - ENCRYPTED_AUTH_MSG_LEN + 2 msg += await self.wait_first( reader.read(remaining_bytes), timeout=REPLY_TIMEOUT) try: ephem_pubkey, initiator_nonce, initiator_pubkey = decode_authentication( msg, self.privkey) except DecryptionError as e: self.logger.debug("Failed to decrypt handshake: %s", e) return # Create `HandshakeResponder(remote: kademlia.Node, privkey: datatypes.PrivateKey)` instance initiator_remote = Node(initiator_pubkey, remote_address) responder = HandshakeResponder(initiator_remote, self.privkey, self.cancel_token) # Call `HandshakeResponder.create_auth_ack_message(nonce: bytes)` to create the reply responder_nonce = secrets.token_bytes(HASH_LEN) auth_ack_msg = responder.create_auth_ack_message(nonce=responder_nonce) auth_ack_ciphertext = responder.encrypt_auth_ack_message(auth_ack_msg) # Use the `writer` to send the reply to the remote writer.write(auth_ack_ciphertext) await writer.drain() # Call `HandshakeResponder.derive_shared_secrets()` and use return values to create `Peer` aes_secret, mac_secret, egress_mac, ingress_mac = responder.derive_secrets( initiator_nonce=initiator_nonce, responder_nonce=responder_nonce, remote_ephemeral_pubkey=ephem_pubkey, auth_init_ciphertext=msg, auth_ack_ciphertext=auth_ack_ciphertext ) # Create and register peer in peer_pool peer = self.peer_class( remote=initiator_remote, privkey=self.privkey, reader=reader, writer=writer, aes_secret=aes_secret, mac_secret=mac_secret, egress_mac=egress_mac, ingress_mac=ingress_mac, headerdb=self.headerdb, network_id=self.network_id, inbound=True, ) await self.do_handshake(peer)