async def sendWorldData(self, world: World): # Send Level Initialize Packet Logger.debug(f"{self.ip} | Sending Level Initialize Packet", module="network") await self.dispacher.sendPacket(Packets.Response.LevelInitialize) # Preparing To Send Map Logger.debug(f"{self.ip} | Preparing To Send Map", module="network") worldGzip = world.gzipMap(includeSizeHeader=True) # Generate GZIP # World Data Needs To Be Sent In Chunks Of 1024 Characters chunks = [ worldGzip[i:i + 1024] for i in range(0, len(worldGzip), 1024) ] # Looping Through All Chunks And Sending Data Logger.debug(f"{self.ip} | Sending Chunk Data", module="network") for chunkCount, chunk in enumerate(chunks): # Sending Chunk Data Logger.verbose( f"{self.ip} | Sending Chunk Data {chunkCount + 1} of {len(chunks)}", module="network") await self.dispacher.sendPacket( Packets.Response.LevelDataChunk, chunk, percentComplete=int((100 / len(chunks)) * chunkCount)) # Send Level Finalize Packet Logger.debug(f"{self.ip} | Sending Level Finalize Packet", module="network") await self.dispacher.sendPacket(Packets.Response.LevelFinalize, world.sizeX, world.sizeY, world.sizeZ)
async def listenForPackets(self, packetDict: dict = {}, headerSize: int = 1, ignoreUnknownPackets: bool = False, timeout: int = NET_TIMEOUT): try: # Reading First Byte For Packet Header rawData = await asyncio.wait_for( self.handler.reader.readexactly( headerSize # Size of packet header ), timeout) Logger.verbose( f"CLIENT -> SERVER | CLIENT: {self.handler.ip} | Incoming Player Loop Packet Id {rawData}", module="network") # Convert Packet Header to Int packetHeader = int.from_bytes(rawData, byteorder="big") # Check if packet is to be expected if packetHeader not in packetDict.keys(): # Ignore if ignoreUnknownPackets flag is set if not ignoreUnknownPackets: Logger.debug( f"Player Sent Unknown Packet Header {rawData} ({packetHeader})", module="network") raise ClientError(f"Unknown Packet {packetHeader}") # Get packet using packetId packet = PacketManager.Request.getPacketById(packetHeader) # Reading and Appending Rest Of Packet Data (Packet Body) rawData += await asyncio.wait_for( self.handler.reader.readexactly( packet.SIZE - headerSize # Size of packet body (packet minus header size) ), timeout) Logger.verbose( f"CLIENT -> SERVER | CLIENT: {self.handler.ip} | DATA: {rawData}", module="network") # Attempting to Deserialize Packets try: # Deserialize Packet serializedData = await packet.deserialize( self.handler.player, rawData) return packetHeader, serializedData except Exception as e: if packet.CRITICAL or type(e) in CRITICAL_REQUEST_ERRORS: raise e # Pass Down Exception To Lower Layer else: # TODO: Remove Hacky Type Ignore return packetHeader, packet.onError(e) # type: ignore except asyncio.TimeoutError: raise ClientError("Did Not Receive Packet In Time!") except Exception as e: raise e # Pass Down Exception To Lower Layer
async def sendPacket(self, packet: Type[AbstractResponsePacket], *args, timeout: int = NET_TIMEOUT, **kwargs): try: # Generate Packet rawData = await packet.serialize(*args, **kwargs) # Send Packet Logger.verbose( f"SERVER -> CLIENT | CLIENT: {self.handler.ip} | ID: {packet.ID} {packet.NAME} | SIZE: {packet.SIZE} | DATA: {rawData}", module="network") if self.handler.isConnected: self.handler.writer.write(bytes(rawData)) await self.handler.writer.drain() else: Logger.debug( f"Packet {packet.NAME} Skipped Due To Closed Connection!", module="network") except Exception as e: # Making Sure These Errors Always Gets Raised (Ignore onError) if packet.CRITICAL or type(e) in CRITICAL_RESPONSE_ERRORS: raise e # Pass Down Exception To Lower Layer else: # TODO: Remove Hacky Type Ignore return packet.onError(e) # type: ignore
async def handlePlayerMovement(self, posX: int, posY: int, posZ: int, posYaw: int, posPitch: int): # Format, Process, and Handle incoming player movement requests. Logger.verbose(f"Handling Player Movement From Player {self.name}", module="player") # Checking If Player Is Joined To A World if self.worldPlayerManager is None: Logger.error(f"Player {self.name} Trying To handleBlockUpdate When No World Is Joined", module="player") return None # Skip Rest # Updating Current Player Position self.posX = posX self.posY = posY self.posZ = posZ self.posYaw = posYaw self.posPitch = posPitch # Sending Player Position Update Packet To All Players await self.worldPlayerManager.sendWorldPacket( Packets.Response.PlayerPositionUpdate, self.playerId, posX, posY, posZ, posYaw, posPitch, ignoreList=[self] # not sending to self as that may cause some de-sync issues )
def register(self, commandClass: Type[AbstractCommand], module: AbstractModule): Logger.debug( f"Registering Command {commandClass.NAME} From Module {module.NAME}", module=f"{module.NAME}-submodule-init") command: AbstractCommand = super()._initSubmodule(commandClass, module) # Handling Special Cases if OVERRIDE is Set if command.OVERRIDE: # Check If Override Is Going To Do Anything # If Not, Warn if command.NAME not in self._command_list.keys(): Logger.warn( f"Command {command.NAME} From Module {command.MODULE.NAME} Is Trying To Override A Command That Does Not Exist! If This Is An Accident, Remove The 'override' Flag.", module=f"{module.NAME}-submodule-init") else: Logger.debug( f"Command {command.NAME} Is Overriding Command {self._command_list[command.NAME].NAME}", module=f"{module.NAME}-submodule-init") # Un-registering All Activators for the Command Being Overwritten. Prevents Issues! Logger.debug( f"Un-registering Activators for Command {self._command_list[command.NAME].NAME}", module=f"{module.NAME}-submodule-init") for activator in list( self._command_list[command.NAME].ACTIVATORS.keys()): # Deleting from Cache del self._activators[activator] # Checking If Command Name Is Already In Commands List # Ignoring if OVERRIDE is set if command.NAME in self._command_list.keys() and not command.OVERRIDE: raise InitRegisterError( f"Command {command.NAME} Has Already Been Registered! If This Is Intentional, Set the 'override' Flag to True" ) # Setting Activators To Default If None if command.ACTIVATORS is None: command.ACTIVATORS = [] if len(command.ACTIVATORS) == 0: Logger.warn( f"Command {command.NAME} Was Registered Without Any Activators. Using Name As Command Activator.", module=f"{module.NAME}-submodule-init") command.ACTIVATORS = [command.NAME.lower()] # Add Activators To Command Cache Logger.debug( f"Adding Activators {command.ACTIVATORS} To Activator Cache", module=f"{module.NAME}-submodule-init") for activator in command.ACTIVATORS: Logger.verbose(f"Adding Activator {activator}", module=f"{module.NAME}-submodule-init") # If Activator Already Exists, Error if activator not in self._activators: self._activators[activator] = command else: raise InitRegisterError( f"Another Command Has Already Registered Command Activator {activator}" ) # Add Command to Commands List self._command_list[command.NAME] = command
async def spawnCurrentPlayers(self, playerSelf: Player): # Update Joining Players of The Currently In-Game Players # Loop Through All Players for player in self.players: # Checking if Player Exists if player is None: continue # Checking if player is not self if player is playerSelf: continue # Attempting to Send Packet try: await playerSelf.networkHandler.dispacher.sendPacket( Packets.Response.SpawnPlayer, player.playerId, player.name, player.posX, player.posY, player.posZ, player.posYaw, player.posPitch, ) except Exception as e: if e not in CRITICAL_RESPONSE_ERRORS: # Something Broke! Logger.error(f"An Error Occurred While Sending World Packet {Packets.Response.SpawnPlayer.NAME} To {player.networkHandler.ip} - {type(e).__name__}: {e}", module="world-packet-dispatcher") else: # Bad Timing with Connection Closure. Ignoring Logger.verbose(f"Ignoring Error While Sending World Packet {Packets.Response.SpawnPlayer.NAME} To {player.networkHandler.ip}", module="world-packet-dispatcher")
def getBlock(self, blockX: int, blockY: int, blockZ: int): # Gets Block Obj Of Requested Block Logger.verbose(f"Getting World Block {blockX}, {blockY}, {blockZ}", module="world") # Check If Block Is Out Of Range if blockX >= self.sizeX or blockY >= self.sizeY or blockZ >= self.sizeZ: raise BlockError( f"Requested Block Is Out Of Range ({blockX}, {blockY}, {blockZ})" ) return BlockManager.getBlockById( self.mapArray[blockX + self.sizeX * (blockZ + self.sizeZ * blockY)])
def _convertArgs(name: str, param: inspect.Parameter, arg: str): Logger.verbose(f"Transforming Argument Data For Argument {name}", module="command") # If There Is No Type To Convert, Ignore if param.annotation == inspect._empty: # type: ignore return arg # Try to parse, if error, cancel try: return param.annotation(arg) except ValueError: raise CommandError( f"Argument '{name}' Expected {param.annotation.__name__} But Got '{type(arg).__name__}'" )
def internal(cls): Logger.verbose(f"Registered {submodule.NAME} {name} version {version}", module="submodule-import") # Set Class Variables cls.NAME = name cls.DESCRIPTION = description cls.VERSION = version cls.OVERRIDE = override cls.MANAGER = submodule # Set Obsidian Submodule to True -> Notifies Init that This Class IS a Submodule cls.obsidian_submodule = True # Return cls Obj for Decorator return cls
def _ensureNoCycles(current: Type[AbstractModule], previous: List[str]): Logger.verbose( f"Travelling Down Dependency Tree. CUR: {current} PREV: {previous}", module="cycle-check") # If Current Name Appears In Any Previous Dependency, There Is An Infinite Cycle if current.NAME in previous: raise DependencyError( f"Circular dependency Detected: {' -> '.join([*previous, current.NAME])}" ) Logger.verbose( f"Current Modules Has Dependencies {current.DEPENDENCIES}", module="cycle-check") # Run DFS through All Dependencies for dependency in current.DEPENDENCIES: _ensureNoCycles(dependency.MODULE, [*previous, current.NAME])
async def sendGlobalPacket(self, packet: AbstractResponsePacket, *args, ignoreList: List[Player] = [], **kwargs): # Send packet to ALL members connected to server (all worlds) Logger.verbose(f"Sending Packet {packet.NAME} To All Connected Players", module="global-packet-dispatcher") # Loop Through All Players for player in self.players: # Checking if player is not in ignoreList if player not in ignoreList: try: # Sending Packet To Player await player.networkHandler.dispacher.sendPacket(packet, *args, **kwargs) except Exception as e: if e not in CRITICAL_RESPONSE_ERRORS: # Something Broke! Logger.error(f"An Error Occurred While Sending Global Packet {packet.NAME} To {player.networkHandler.ip} - {type(e).__name__}: {e}", module="global-packet-dispatcher") else: # Bad Timing with Connection Closure. Ignoring Logger.verbose(f"Ignoring Error While Sending Global Packet {packet.NAME} To {player.networkHandler.ip}", module="global-packet-dispatcher")
def register(self, packetClass: Type[AbstractPacket], module: AbstractModule): Logger.debug( f"Registering Packet {packetClass.NAME} From Module {module.NAME}", module=f"{module.NAME}-submodule-init") packet: AbstractPacket = super()._initSubmodule(packetClass, module) # Handling Special Cases if OVERRIDE is Set if packet.OVERRIDE: # Check If Override Is Going To Do Anything # If Not, Warn if (packet.ID not in self._packet_list.keys()) and ( packet.NAME not in self.getAllPacketIds()): Logger.warn( f"Packet {packet.NAME} (ID: {packet.ID}) From Module {packet.MODULE.NAME} Is Trying To Override A Packet That Does Not Exist! If This Is An Accident, Remove The 'override' Flag.", module=f"{module.NAME}-submodule-init") else: Logger.debug( f"Packet {packet.NAME} Is Overriding Packet {self._packet_list[packet.NAME].NAME} (ID: {packet.ID})", module=f"{module.NAME}-submodule-init") # Checking If Packet And PacketId Is Already In Packets List # Ignoring if OVERRIDE is set if packet.NAME in self._packet_list.keys() and not packet.OVERRIDE: raise InitRegisterError( f"Packet {packet.NAME} Has Already Been Registered! If This Is Intentional, Set the 'override' Flag to True" ) if packet.ID in self.getAllPacketIds() and not packet.OVERRIDE: raise InitRegisterError( f"Packet Id {packet.ID} Has Already Been Registered! If This Is Intentional, Set the 'override' Flag to True" ) # Only Used If Request if self.direction is PacketDirections.REQUEST: # Cast Packet into Request Packet Type requestPacket: AbstractRequestPacket = packet # type: ignore # Add To Packet Cache If Packet Is Used In Main Player Loop if requestPacket.PLAYERLOOP: Logger.verbose( f"Adding Packet {requestPacket.ID} To Main Player Loop Request Packet Cache", module=f"{module.NAME}-submodule-init") self.loopPackets[requestPacket.ID] = requestPacket # Add Packet to Packets List self._packet_list[packet.NAME] = packet
def _initSubmodules(self): # Loop through all the submodules in the order of the sorted graph for module in self._sorted_module_graph: try: # Loop Through All Items within Module Logger.debug(f"Checking All Items in {module.NAME}", module=f"{module.NAME}-submodule-init") # Loop Through All Items In Class for _, item in module.__dict__.items(): # Check If Item Has "obsidian_submodule" Flag if hasattr(item, "obsidian_submodule"): Logger.verbose( f"{item} Is A Submodule! Adding As {module.NAME} Submodule.", module=f"{module.NAME}-submodule-init") # Register Submodule Using information Provided by Submodule Class item.MANAGER.register(item, module) except FatalError as e: # Pass Down Fatal Error To Base Server raise e except Exception as e: # Handle Exception if Error Occurs self._error_list.append( (module.NAME, "Init-Submodule")) # Module Loaded WITH Errors # If the Error is an Init Error (raised on purpose), Don't print out TB if type(e) is InitError: printTb = False else: printTb = True Logger.error( f"Error While Initializing Submodules For {module.NAME} - {type(e).__name__}: {e}\n", module="submodule-init", printTb=printTb) Logger.warn( "!!! Module Errors May Cause Compatibility Issues And/Or Data Corruption !!!\n", module="submodule-init") Logger.warn(f"Skipping Module {module.NAME}?", module="submodule-init") Logger.askConfirmation() # Remove Module Logger.warn(f"Removing Module {module.NAME} From Loader!", module="submodule-init") del self._module_list[module.NAME]
def _buildDependencyGraph(self): Logger.debug("Generating Dependency Graph", module="module-prep") # Define Visited Set visited = set() # Reset and Clear Current Module Graph self._sorted_module_graph = [] # Helper Function to Run Topological Sort DFS def _topologicalSort(module): Logger.verbose(f"Running Topological Sort on {module.NAME}", module="topological-sort") # Adding Module to Visited Set to Prevent Looping visited.add(module.NAME) # DFS Going Bottom First Logger.verbose( f"Attempting Topological Sort on {module.NAME}'s Dependencies {module.DEPENDENCIES}", module="topological-sort") for dependency in module.DEPENDENCIES: if dependency.NAME not in visited: _topologicalSort(dependency.MODULE) # Current Module has No Further Dependencies. Adding To Graph! self._sorted_module_graph.append(module) Logger.verbose( f"Added {module.NAME} To Dependency Graph. DG Is Now {self._sorted_module_graph}", module="topological-sort") # Run Topological Sort on All Non-Visited Modules for module_name in list(self._module_list.keys()): Logger.verbose(f"Attempting Topological Sort on {module_name}", module="module-prep") if module_name not in visited: _topologicalSort(self._module_list[module_name]) # Print Out Status Logger.debug( f"Finished Generating Dependency Graph. Result: {self._sorted_module_graph}", module="module-prep")
async def sendWorldPacket(self, packet: Type[AbstractResponsePacket], *args, ignoreList: List[Player] = [], **kwargs): # Send packet to all members in world Logger.verbose(f"Sending Packet {packet.NAME} To All Players On {self.world.name}", module="world-packet-dispatcher") # Loop Through All Players for player in self.players: # Checking if Player Exists if player is None: continue # Checking if player is not in ignoreList if player in ignoreList: continue # Attempting to Send Packet try: await player.networkHandler.dispacher.sendPacket(packet, *args, **kwargs) except Exception as e: if e not in CRITICAL_RESPONSE_ERRORS: # Something Broke! Logger.error(f"An Error Occurred While Sending World Packet {packet.NAME} To {player.networkHandler.ip} - {type(e).__name__}: {e}", module="world-packet-dispatcher") else: # Bad Timing with Connection Closure. Ignoring Logger.verbose(f"Ignoring Error While Sending World Packet {packet.NAME} To {player.networkHandler.ip}", module="world-packet-dispatcher")
def _save(self, fileIO: io.TextIOWrapper): Logger.debug(f"Saving Config With FileIO {fileIO}", "config-save") # Write Config Values To File (Except ConfigOverrides) # Create A Shallow Copy Dict Version Of Dataclass configData = copy.copy(asdict(self)) # Remove All configOverrides Values Logger.debug("Applying Config Overrides", "config-save") configOverrides = copy.copy(configData["configOverrides"]) for overrideKey in configOverrides: Logger.debug(f"Applying Config Override {overrideKey}", "config-save") if overrideKey in configData.keys(): del configData[overrideKey] else: Logger.warn( f"Trying To Apply Non-Existant Config Override Key {overrideKey}", "config-save") # Writing Dict (as JSON) To File Logger.debug("Writing Formatted Config To File", "config-save") Logger.verbose(f"Formatted Config Is {configData}", "config-save") json.dump(configData, fileIO, indent=4)
def _importModules(self): # Initialize Temporary List of Files Imported _module_files = [] # Walk Through All Packages And Import Library for _, module_name, _ in pkgutil.walk_packages( [os.path.join(SERVERPATH, MODULESFOLDER)]): # Load Modules Logger.debug(f"Detected Module {module_name}", module="module-import") if module_name not in self._module_blacklist: try: Logger.verbose( f"Module {module_name} Not In Blacklist. Adding!", module="module-import") # Import Module _module = importlib.import_module(MODULESIMPORT + module_name) # Appending To A List of Module Files to be Used Later _module_files.append(module_name) # Set the Imported Module into the Global Scope globals()[module_name] = _module except FatalError as e: # Pass Down Fatal Error To Base Server raise e except Exception as e: # Handle Exception if Error Occurs self._error_list.append( (module_name, "PreInit-Import")) # Module Loaded WITH Errors # If the Error is a Register Error (raised on purpose), Don't print out TB if type(e) is InitRegisterError: printTb = False else: printTb = True Logger.error( f"Error While Importing Module {module_name} - {type(e).__name__}: {e}\n", module="module-import", printTb=printTb) Logger.warn( "!!! Fatal Module Errors May Cause Compatibility Issues And/Or Data Corruption !!!\n", module="module-import") Logger.askConfirmation() else: Logger.verbose( f"Skipping Module {module_name} Due To Blacklist", module="module-import") Logger.verbose(f"Detected and Imported Module Files {_module_files}", module="module-import") # Check If Core Was Loaded if self._ensure_core: if "core" not in self._module_list.keys(): self._error_list.append( ("core", "PreInit-EnsureCore")) # Module Loaded WITH Errors raise FatalError( "Error While Loading Module core - Critical Module Not Found" )
def _load(self, fileIO: io.TextIOWrapper): Logger.debug(f"Loading Config With FileIO {fileIO}", "config-load") # Set Internal Attributes If It Exists configData = json.load(fileIO) Logger.verbose(f"Config data Is {configData}", "config-load") for configKey, configValue in configData.items(): Logger.verbose(f"Checking Config Attribute {configKey}", "config-load") # Checking If Key Exists In Config AND If Its Not Part Of The Overrides if hasattr(self, configKey) and configKey not in self.configOverrides: Logger.verbose( f"Setting Config Attribute {configKey} to {configValue}", "config-load") setattr(self, configKey, configValue) else: Logger.warn(f"Ignoring Unknown Config Attribute {configKey}", "config-load")
def _topologicalSort(module): Logger.verbose(f"Running Topological Sort on {module.NAME}", module="topological-sort") # Adding Module to Visited Set to Prevent Looping visited.add(module.NAME) # DFS Going Bottom First Logger.verbose( f"Attempting Topological Sort on {module.NAME}'s Dependencies {module.DEPENDENCIES}", module="topological-sort") for dependency in module.DEPENDENCIES: if dependency.NAME not in visited: _topologicalSort(dependency.MODULE) # Current Module has No Further Dependencies. Adding To Graph! self._sorted_module_graph.append(module) Logger.verbose( f"Added {module.NAME} To Dependency Graph. DG Is Now {self._sorted_module_graph}", module="topological-sort")
async def readPacket(self, packet: Type[AbstractRequestPacket], timeout: int = NET_TIMEOUT, checkId=True): try: # Get Packet Data Logger.verbose( f"Expected Packet {packet.ID} Size {packet.SIZE} from {self.handler.ip}", module="network") rawData = await asyncio.wait_for( self.handler.reader.readexactly(packet.SIZE # type: ignore ), timeout) Logger.verbose( f"CLIENT -> SERVER | CLIENT: {self.handler.ip} | DATA: {rawData}", module="network") # Check If Packet ID is Valid header = rawData[0] # type: ignore if checkId and header != packet.ID: Logger.verbose(f"{self.handler.ip} | Packet Invalid!", module="network") raise ClientError(f"Invalid Packet {header}") # Deserialize Packet # TODO: Fix type complaint! serializedData = await packet.deserialize(self.handler.player, rawData) # type: ignore return serializedData except asyncio.TimeoutError: raise ClientError(f"Did Not Receive Packet {packet.ID} In Time!") except Exception as e: if packet.CRITICAL or type(e) in CRITICAL_REQUEST_ERRORS: raise e # Pass Down Exception To Lower Layer else: # TODO: Remove Hacky Type Ignore return packet.onError(e) # type: ignore
def _parseArgs(command: Type[AbstractCommand], data: list): # This entire section is inspired by Discord.py 's Aprroach To Message Parsing and Handing # TODO: IGNORE_EXTRA, REST_IS_RAW, REQUIRE_VAR_POSITIONAL Logger.debug( f"Parsing Command Arguments {data} For Command {command.NAME}", module="command") # Define Important Vars args = [] kwargs = {} # Extract Parameters From Execute Function params = inspect.signature(command.execute).parameters # Create Iterators To Parse Through Params and Data paramsIter = iter(params.items()) dataIter = iter(data) # Parse Out The 'ctx' parameter try: next(paramsIter) except StopIteration: # InitRegisterError Because Command Was Improperly Formed + We Want To Skip The Player Error raise InitRegisterError( f"Command {command.NAME} Is Missing Parameter 'ctx'") # Loop Through Rest Of Iterators To Parse Data for name, param in paramsIter: Logger.verbose(f"Parsing Parameter {name}", module="command") # Check Parameter Type To Determing Parsing Method # -> Positional Methods (AKA POSITIONAL_OR_KEYWORD) # -> Keyword Only Methods (AKA KEYWORD_ONLY) # -> Positional "Rest" Methods (AKA VAR_POSITIONAL) if param.kind == param.POSITIONAL_OR_KEYWORD: # Parse as Normal Keyword try: # Convert Type transformed = _convertArgs(name, param, next(dataIter)) args.append(transformed) except StopIteration: # Not Enough Data, Check If Error Or Use Default Value if param.default == inspect._empty: # type: ignore raise CommandError( f"Command {command.NAME} Expected Field '{name}' But Got Nothing" ) else: args.append(param.default) elif param.kind == param.KEYWORD_ONLY: # KWarg Only Params Mean "Consume Rest" rest = [] for value in dataIter: rest.append(value) # If Empty, Check If Default Value Was Requested if rest == []: if param.default == inspect._empty: # type: ignore raise CommandError( f"Command {command.NAME} Expected Field '{name}' But Got Nothing" ) else: kwargs[name] = param.default else: # Join and Convert joinedRest = " ".join(rest) kwargs[name] = _convertArgs(name, param, joinedRest) # End of loop. Ignore rest break elif param.kind == param.VAR_POSITIONAL: # Var Positional means to just append all extra values to the end of the function for value in dataIter: transformed = _convertArgs(name, param, value) args.append(transformed) # At the end, if there were extra values, give error try: next(dataIter) raise CommandError( f"Too Many Arguments Passed Into Command '{command.NAME}'") except StopIteration: return args, kwargs
def unpackageString(data, encoding: str = "ascii"): Logger.verbose(f"Unpacking String {data}", module="packet") # Decode Data From Bytes To String # Remove Excess Zeros return data.decode(encoding).strip()
def packageString(data: str, maxSize: int = 64, encoding: str = "ascii"): Logger.verbose(f"Packing String {data}", module="packet") # Trim Text Down To maxSize # Fill Blank Space With Spaces Using ljust # Encode String Into Bytes Using Encoding return bytearray(data[:maxSize].ljust(maxSize), encoding)
async def _init(self): Logger.info(f"=== Initializing Server '{self.name}' ===", module="init") # Testing If Debug Is Enabled Logger.debug("Debug Is Enabled", module="init") Logger.verbose("Verbose Is Enabled", module="init") Logger.info("Use '-d' and/or '-v' To Enable Debug Mode Or Verbose Mode", module="init") # Initializing Config Logger.info("Initializing Server Config", module="init") # Ensuring Config Path self._ensureFileStructure(os.path.dirname(self.config.configPath)) # Initing Config self.config.init() # Setting Up File Structure Logger.info("Setting Up File Structure", module="init") if self.config.worldSaveLocation is not None: self.ensureFiles.append(MODULESFOLDER) self.ensureFiles.append(self.config.worldSaveLocation) self._ensureFileStructure(self.ensureFiles) # Load and Log Modules Logger.info("Starting Module Initialization", module="init") ModuleManager.initModules(blacklist=self.config.moduleBlacklist, ensureCore=True) Logger.info("All Modules Initialized!!!", module="init") Logger.info(f"{ModuleManager.numModules} Modules Initialized", module="init") Logger.info(f"{PacketManager.numPackets} Packets Initialized", module="init") Logger.info(f"{BlockManager.numBlocks} Blocks Initialized", module="init") Logger.info(f"{CommandManager.numCommands} Commands Initialized", module="init") Logger.info(f"{MapGeneratorManager.numMapGenerators} Map Generators Initialized", module="init") # Print Pretty List of All Modules Logger.info(f"Module List:\n{ModuleManager.generateTable()}", module="init") # Only Print Packet And World Generators List If Debug Enabled if Logger.DEBUG: Logger.debug(f"Packets List:\n{PacketManager.generateTable()}", module="init") Logger.debug(f"World Formats List:\n{WorldFormatManager.generateTable()}", module="init") Logger.debug(f"Map Generators List:\n{MapGeneratorManager.generateTable()}", module="init") Logger.debug(f"Commands List:\n{CommandManager.generateTable()}", module="init") # Only Print Block List If Verbose Enabled (Very Big) if Logger.VERBOSE: Logger.verbose(f"Blocks List:\n{BlockManager.generateTable()}", module="init") # Printing Error If Error Occurs During Init if len(ModuleManager._error_list) != 0: Logger.warn("Some Modules Files Failed To Load!\n", module="init") Logger.warn("!!! Failed Modules May Cause Compatibility Issues And/Or Data Corruption !!!\n", module="init-module") Logger.warn(f"Failed: {ModuleManager._error_list}\n", module="init") Logger.askConfirmation() # Initialize WorldManager Logger.info("Initializing World Manager", module="init") self.worldManager = WorldManager(self, blacklist=self.config.worldBlacklist) Logger.info("Loading Worlds", module="init") self.worldManager.loadWorlds() # Initialize PlayerManager Logger.info("Initializing Player Manager", module="init") self.playerManager = PlayerManager(self, maxSize=self.config.serverMaxPlayers) # Create Asyncio Socket Server # When new connection occurs, run callback _getConnHandler Logger.info(f"Setting Up Server {self.name}", module="init") self.server = await asyncio.start_server(self._getConnHandler(), self.address, self.port) self.initialized = True
def __init__(self, worldManager: WorldManager, name: str, sizeX: int, sizeY: int, sizeZ: int, mapArray: bytearray, spawnX: Optional[int] = None, spawnY: Optional[int] = None, spawnZ: Optional[int] = None, spawnYaw: Optional[int] = None, spawnPitch: Optional[int] = None, generator: AbstractMapGenerator = None, persistant: bool = False, fileIO: Optional[io.BufferedRandom] = None, canEdit: bool = True, maxPlayers: int = 250, displayName: str = None, uuid: str = None): # Y is the height self.worldManager = worldManager self.name = name self.generator = generator self.sizeX = sizeX self.sizeY = sizeY self.sizeZ = sizeZ self.spawnX = spawnX self.spawnZ = spawnZ self.spawnY = spawnY self.spawnYaw = spawnYaw self.spawnPitch = spawnPitch self.mapArray = mapArray self.persistant = persistant self.fileIO = fileIO self.canEdit = canEdit self.maxPlayers = maxPlayers self.displayName = displayName # Displayname for CW Capability self.uuid = uuid # UUID for CW Capability # Check if file IO was given if persistant if self.persistant: if self.fileIO is None: # Setting persistance to false because fileIO was not given self.persistant = False Logger.error( f"World Format {self.worldManager.worldFormat.NAME} Created Persistant World Without Providing FileIO! Please Report To Author! Setting World As Non-Persistant.", "world-load") Logger.askConfirmation() else: Logger.debug(f"Persistant World Has FileIO {self.fileIO}", "world-load") # Generate/Set Spawn Coords # Set spawnX if self.spawnX is None: # Generate SpawnX (Set to middle of map) self.spawnX = (self.sizeX // 2) * 32 + 51 Logger.verbose( f"spawnX was not provided. Generated to {self.spawnX}", "world-load") # Set spawnZ if self.spawnZ is None: # Generate SpawnZ (Set to middle of map) self.spawnZ = (self.sizeZ // 2) * 32 + 51 Logger.verbose( f"spawnZ was not provided. Generated to {self.spawnZ}", "world-load") # Set spawnY if self.spawnY is None: # Kinda hacky to get the block coords form the in-game coords self.spawnY = (self.getHighestBlock(round( (self.spawnX - 51) / 32), round( (self.spawnZ - 51) / 32)) + 1) * 32 + 51 Logger.verbose( f"spawnY was not provided. Generated to {self.spawnY}", "world-load") # Set spawnYaw if spawnYaw is None: # Generate SpawnYaw (0) self.spawnYaw = 0 Logger.verbose( f"spawnYaw was not provided. Generated to {self.spawnYaw}", "world-load") # Set spawnPitch if spawnPitch is None: # Generate SpawnPitch (0) self.spawnPitch = 0 Logger.verbose( f"spawnYaw was not provided. Generated to {self.spawnYaw}", "world-load") # Initialize WorldPlayerManager Logger.info("Initializing World Player Manager", module="init-world") self.playerManager = WorldPlayerManager(self)
def loadWorlds(self): Logger.debug("Starting Attempt to Load All Worlds", module="world-load") if self.persistant and (self.server.config.worldSaveLocation is not None): Logger.debug( f"Beginning To Scan Through {self.server.config.worldSaveLocation} Dir", module="world-load") # Loop Through All Files Given In World Folder for filename in os.listdir( os.path.join(SERVERPATH, self.server.config.worldSaveLocation)): # Get Pure File Name (No Extentions) saveName = os.path.splitext(os.path.basename(filename))[0] Logger.verbose( f"Checking Extention and Status of World File {filename}", module="world-load") # Check If File Type Matches With The Extentions Provided By worldFormat if not any([ filename.endswith(ext) for ext in self.worldFormat.EXTENTIONS ]): Logger.debug( f"Ignoring World File {filename}. File Extention Not Known!", module="world-load") # Also Check If World Is Blacklisted elif saveName in self.server.config.worldBlacklist: Logger.info( f"Ignoring World File {filename}. World Name Is Blacklisted!", module="world-load") # Also Check If World Name Is Already Loaded (Same File Names with Different Extentions) elif saveName in self.worlds.keys(): Logger.warn( f"Ignoring World File {filename}. World With Similar Name Has Already Been Registered!", module="world-load") Logger.warn( f"World File {os.path.basename(self.worlds[saveName].fileIO.name)} Conflicts With World File {filename}!", module="world-load") Logger.askConfirmation() else: Logger.debug( f"Detected World File {filename}. Attempting To Load World", module="world-load") # (Attempt) To Load Up World try: Logger.info(f"Loading World {saveName}", module="world-load") fileIO = open( os.path.join(SERVERPATH, self.server.config.worldSaveLocation, filename), "rb+") self.worlds[saveName] = self.worldFormat.loadWorld( fileIO, self, persistant=self.persistant) except Exception as e: Logger.error( f"Error While Loading World {filename} - {type(e).__name__}: {e}", module="world-load") Logger.askConfirmation() # Check If Default World Is Loaded if self.server.config.defaultWorld not in self.worlds.keys(): # Check if other worlds were loaded as well if len(self.worlds.keys()) > 0: Logger.warn( f"Default World {self.server.config.defaultWorld} Not Loaded.", module="world-load") Logger.warn( "Consider Checking If World Exists. Consider Changing The Default World and/or File Format In Config.", module="world-load") # Ask User If They Want To Continue With World Generation Logger.warn( f"Other Worlds Were Detected. Generate New World With Name {self.server.config.defaultWorld}?", module="world-load") Logger.askConfirmation(message="Generate New World?") else: Logger.warn( "No Existing Worlds Were Detected. Generating New World!", module="world-load") # Generate New World defaultWorldName = self.server.config.defaultWorld defaultGenerator = MapGenerators[ self.server.config.defaultGenerator] Logger.debug(f"Creating World {defaultWorldName}", module="world-load") self.createWorld( defaultWorldName, self.server.config.defaultWorldSizeX, self.server.config.defaultWorldSizeY, self.server.config.defaultWorldSizeZ, defaultGenerator, persistant=self.persistant, grassHeight=self.server.config.defaultWorldSizeY // 2) else: Logger.debug("World Manager Is Non Persistant!", module="world-load") # Create Non-Persistant Temporary World defaultWorldName = self.server.config.defaultWorld defaultGenerator = MapGenerators[ self.server.config.defaultGenerator] Logger.debug(f"Creating Temporary World {defaultWorldName}", module="world-load") self.createWorld( defaultWorldName, self.server.config.defaultWorldSizeX, self.server.config.defaultWorldSizeY, self.server.config.defaultWorldSizeZ, defaultGenerator, persistant=False, maxPlayers=self.server.config.worldMaxPlayers, grassHeight=self.server.config.defaultWorldSizeY // 2)
def _initDependencies(self): for module_name, module_obj in list(self._module_list.items()): try: Logger.debug(f"Checking Dependencies for Module {module_name}", module="module-resolve") # Loop through all dependencies, check type, then check if exists for dependency in module_obj.DEPENDENCIES: # Get Variables dep_name = dependency.NAME dep_ver = dependency.VERSION Logger.verbose(f"Checking if Dependency {dep_name} Exists", module="module-resolve") # Check if Dependency is "Loaded" if dep_name in self._module_list.keys(): # Check if Version should be checked if dep_ver is None: Logger.verbose( f"Skipping Version Check For Dependency {dependency}", module="module-resolve") pass # No Version Check Needed elif dep_ver == self._module_list[ dependency.NAME].VERSION: Logger.verbose( f"Dependencies {dependency} Statisfied!", module="module-resolve") pass else: raise DependencyError( f"Dependency '{dependency}' Has Unmatched Version! (Requirement: {dep_ver} | Has: {self._module_list[dependency.NAME].VERSION})" ) # If All Passes, Link Module Class dependency.MODULE = self._module_list[dependency.NAME] else: raise DependencyError( f"Dependency '{dependency}' Not Found!") except FatalError as e: # Pass Down Fatal Error To Base Server raise e except Exception as e: # Handle Exception if Error Occurs self._error_list.append( (module_name, "PreInit-Dependency")) # Module Loaded WITH Errors # If the Error is a Dependency Error (raised on purpose), Don't print out TB if type(e) is DependencyError: printTb = False else: printTb = True Logger.error( f"Error While Initializing Dependencies For {module_name} - {type(e).__name__}: {e}\n", module="module-resolve", printTb=printTb) Logger.warn( "!!! Module Errors May Cause Compatibility Issues And/Or Data Corruption !!!\n", module="module-resolve") Logger.warn(f"Skipping Module {module_name}?", module="module-resolve") Logger.askConfirmation() # Remove Module Logger.warn(f"Removing Module {module_name} From Loader!", module="module-resolve") del self._module_list[module_name]