async def close_con(stream): # Close a connection to a client await stream.drain() stream.close() await stream.wait_closed() try: del states[stream.remote] except BaseException: pass logger.debug( f"Disconnected nicely from {stream.remote[0]}:{stream.remote[1]}.") return False, stream
def update_repo(git_dir, git_url, root, plugin_name, do_clone=False): if do_clone: try: os.rename(root, f"{root}_backup_{int(time.time())}") logger.debug(f"Renamed {root} for clone.") except FileNotFoundError: pass logger.debug(f"Cloning from {git_url}...") git_dir.clone(git_url) logger.info(f"Updated {plugin_name}!") return if not os.path.isdir(os.path.join(root, ".git")): return update_repo(git_dir, git_url, root, plugin_name, True) try: logger.debug(f"Pulling from {git_url}...") res = git.Git(root).pull() # pull latest from remote except BaseException as e: logger.debug(f"Failed to pull from {git_url}, attempting to clone...") return update_repo(git_dir, git_url, root, plugin_name, True) if res == "Already up to date.": logger.info(f"No updates found for {plugin_name}.") else: logger.info(f"Updated {plugin_name}!")
async def handle_con(reader, writer): # Handle a connection from a client stream = Stream(reader, writer) logger.debug( f"Connection received from {stream.remote[0]}:{stream.remote[1]}.") continue_ = True while continue_: try: continue_, stream = await handle_packet(stream) except BaseException as e: logger.error(logger.f_traceback(e)) break await close_con(stream)
def pack_packet(cls, packet: Packet, comp_thresh: int = -1) -> bytes: """ Packs a Packet object into bytes. """ logger.debug( # We log this here for consistency and not having to implement it everywhere f"OUT: state:unknown | id:0x{packet.id:02X} | packet:{type(packet).__name__}" ) data = cls.pack_varint(packet.id) + packet.encode() if comp_thresh >= 1: if len(data) >= comp_thresh: data = cls.pack_varint(len(data)) + zlib.compress(data) else: data = cls.pack_varint(0) + data return cls.pack_varint(len(data)) + data
async def handle_packet(stream: Stream): packet_length = 0 # Basically an implementation of Buffer.unpack_varint() # except designed to read directly from a a StreamReader # and also to handle legacy server list ping packets for i in range(5): try: read = await asyncio.wait_for(stream.read(1), 5) except asyncio.TimeoutError: logger.debug("Closing due to timeout on read...") return False, stream if read == b"": logger.debug("Closing due to invalid read....") return False, stream if i == 0 and read == b"\xFE": logger.warn("Legacy ping attempted, legacy ping is not supported.") return False, stream b = struct.unpack("B", read)[0] packet_length |= (b & 0x7F) << 7 * i if not b & 0x80: break if packet_length & (1 << 31): packet_length -= 1 << 32 buf = Buffer(await stream.read(packet_length)) state = STATES.encode(states.get(stream.remote, 0)) packet = buf.unpack_packet(state, PACKET_MAP) logger.debug( f"IN : state:{state:<11} | id:0x{packet.id:02X} | packet:{type(packet).__name__}" ) for handler in pymine_api.packet.PACKET_HANDLERS[state][packet.id]: resp_value = await handler(stream, packet) try: continue_, stream = resp_value except ( ValueError, TypeError, ): logger.warn( f"Invalid return from packet handler: {handler.__module__}.{handler.__qualname__}" ) continue if not continue_: return False, stream return continue_, stream