def __init__(self, parser_header, parsed_data, log_parsing=True): lgr = SimpleLogger(should_log=log_parsing) lgr.print("\nParsing pieces and structs to objects...") self.parser_header = parser_header self.parsed_data = parsed_data self._objects = {} self._finished_new_structure = { "MapObject": MapObject, "UnitsObject": UnitsObject, "TriggersObject": TriggersObject, } # self._objects = { # # "FileHeaderObject": self._parse_file_header_object(), # # "DataHeaderObject": self._parse_data_header_object(), # # "PlayerObject": self._parse_player_object(), # # "MessagesObject": self._parse_messages_object(), # # "DiplomacyObject": self._parse_diplomacy_object(), # # "OptionsObject": self._parse_options_object(), # # "MapObject": self._parse_map_object(), # "UnitsObject": UnitsObject.parse_object(self.parsed_data), # "TriggersObject": TriggersObject.parse_object(self.parsed_data) # } for key in self._finished_new_structure.keys(): lgr.print("\tParsing " + key + "...") self._objects[key] = self._finished_new_structure[ key]._parse_object(self.parsed_data) lgr.print("\tParsing " + key + " finished successfully.") lgr.print( "Parsing pieces and structs to objects finished successfully.")
def reconstruct(self, log_reconstructing=False): lgr = SimpleLogger(should_log=log_reconstructing) lgr.print("\nReconstructing pieces and structs from objects...") for key in self._finished_new_structure.keys(): lgr.print("\tReconstructing " + key + "...") self._objects[key]._reconstruct_object(self.parsed_data, self._objects) lgr.print("\tReconstructing " + key + " finished successfully.") lgr.print("Reconstruction finished successfully.")
def create_default(cls, log_creating=True, log_parsing=False): scenario = cls() lgr = SimpleLogger(log_creating) lgr.print("\nFile creation started...") scenario.parser = parser.Parser() scenario._parsed_header = collections.OrderedDict() scenario._parsed_data = collections.OrderedDict() pieces = OrderedDict(**scenario._parsed_header, **scenario._parsed_data) for piece in _header_structure: piece_name = piece.__name__ lgr.print("\tCreating " + piece_name + "...", replace_line=True) scenario._parsed_header[piece_name] = piece( scenario.parser, data=list(piece.defaults(pieces).values()), pieces=pieces) lgr.print("\tCreating " + piece_name + " finished successfully.", replace_line=True) lgr.print() for piece in _file_structure: pieces = OrderedDict(**scenario._parsed_header, **scenario._parsed_data) piece_name = piece.__name__ lgr.print("\tCreating " + piece_name + "...", replace_line=True) scenario._parsed_data[piece_name] = piece( scenario.parser, data=list(piece.defaults(pieces).values()), pieces=pieces) lgr.print("\tCreating " + piece_name + " finished successfully.", replace_line=True) lgr.print() lgr.print("File creation finished successfully") scenario._object_manager = AoE2ObjectManager(scenario._parsed_header, scenario._parsed_data, log_parsing=log_parsing) return scenario
def _debug_byte_structure_to_file(self, filename, log_debug_write=True): """ Used for debugging - Writes structure from read file to the filesystem in a easily readable manner. """ lgr = SimpleLogger(log_debug_write) lgr.print("\nWriting structure to file...") with open(filename, 'w') as output_file: result = "" for key in self._parsed_header: lgr.print("\tWriting " + key + "...", replace_line=True) result += self._parsed_header[ key].get_byte_structure_as_string() lgr.print("\tWriting " + key + " finished successfully.", replace_line=True) lgr.print() for key in self._parsed_data: lgr.print("\tWriting " + key + "...", replace_line=True) result += self._parsed_data[key].get_byte_structure_as_string() lgr.print("\tWriting " + key + " finished successfully.", replace_line=True) lgr.print() output_file.write(result) output_file.close() lgr.print("Writing structure to file finished successfully.")
def _write_from_structure(self, filename, write_in_bytes=True, compress=True, log_writing=True, log_reconstructing=False): if hasattr(self, '_object_manager'): self._object_manager.reconstruct( log_reconstructing=log_reconstructing) lgr = SimpleLogger(log_writing) lgr.print("\nFile writing from structure started...") byte_header_list = [] byte_data_list = [] for key in self._parsed_header: lgr.print("\twriting " + key + "...", replace_line=True) for retriever in self._parsed_header[key].retrievers: byte_header_list.append(parser.retriever_to_bytes(retriever)) lgr.print("\twriting " + key + " finished successfully.", replace_line=True) lgr.print() for key in self._parsed_data: lgr.print("\twriting " + key + "...", replace_line=True) for retriever in self._parsed_data[key].retrievers: try: byte_data_list.append(parser.retriever_to_bytes(retriever)) except AttributeError as e: print("AttributeError occurred while writing '" + key + "' > '" + retriever.name + "'") print("\n\n\nAn error occurred. Writing failed.") raise e lgr.print("\twriting " + key + " finished successfully.", replace_line=True) lgr.print() file = open(filename, "wb" if write_in_bytes else "w") byte_header = b''.join(byte_header_list) byte_data = b''.join(byte_data_list) file.write(byte_header if write_in_bytes else create_textual_hex( byte_header.hex())) if compress: lgr.print("\tCompressing...", replace_line=True) # https://stackoverflow.com/questions/3122145/zlib-error-error-3-while-decompressing-incorrect-header-check/22310760#22310760 deflate_obj = zlib.compressobj(9, zlib.DEFLATED, -zlib.MAX_WBITS) compressed = deflate_obj.compress( b''.join(byte_data_list)) + deflate_obj.flush() file.write(compressed if write_in_bytes else create_textual_hex( compressed.hex())) lgr.print("\tCompressing finished successfully.", replace_line=True) lgr.print() else: file.write(byte_data if write_in_bytes else create_textual_hex( byte_data.hex())) file.close() lgr.print("File writing finished successfully.")
def _read_file(self, log_reading): lgr = SimpleLogger(log_reading) lgr.print("\nFile reading started...") self._parsed_header = collections.OrderedDict() self._parsed_data = collections.OrderedDict() header_generator = self._create_header_generator(1) data_generator = self._create_data_generator(1) current_piece = "" try: for piece_object in _header_structure: # Rerender pieces dict each time - changes constantly pieces = collections.OrderedDict(**self._parsed_header, **self._parsed_data) piece = piece_object(self.parser) piece_name = type(piece).__name__ self._parsed_header[piece_name] = piece current_piece = piece_name lgr.print("\tReading " + piece_name + "...", replace_line=True) piece.set_data_from_generator(header_generator, pieces) lgr.print("\tReading " + piece_name + " finished successfully.", replace_line=True) lgr.print() for piece_object in _file_structure: # Rerender pieces dict each time - changes constantly pieces = collections.OrderedDict(**self._parsed_header, **self._parsed_data) piece = piece_object(self.parser) piece_name = type(piece).__name__ self._parsed_data[piece_name] = piece current_piece = piece_name lgr.print("\tReading " + piece_name + "...", replace_line=True) piece.set_data_from_generator(data_generator, pieces) lgr.print("\tReading " + piece_name + " finished successfully.", replace_line=True) lgr.print() except Exception as e: print( f"\n[{e.__class__.__name__}] [EXIT] AoE2Scenario._read_file: \n\tPiece: {current_piece}\n" ) print("Writing ErrorFile...") self._debug_byte_structure_to_file("../ErrorFile", log_debug_write=False) time.sleep(1) print( "ErrorFile written. \n\n\n ------------------------ STACK TRACE ------------------------\n\n" ) time.sleep(1) raise StopIteration(e) lgr.print("File reading finished successfully.")
def reconstruct(self, log_reconstructing=False): lgr = SimpleLogger(log_reconstructing) lgr.print("\nReconstructing pieces and structs from objects...") for obj in self.constructables: lgr.print("\tReconstructing " + obj.__name__ + "...", replace_line=True) self.objects[obj.__name__].commit(pieces=self.pieces) lgr.print("\tReconstructing " + obj.__name__ + " finished successfully.", replace_line=True) lgr.print() lgr.print("Reconstruction finished successfully.")
def __init__(self, parsed_header, parsed_data, log_parsing=True): # Todo: Create a piece holder object or something to simplify this process self.pieces: OrderedDict[str, AoE2Piece] = OrderedDict( **parsed_header, **parsed_data) lgr = SimpleLogger(log_parsing) lgr.print("\nParsing pieces and structs to objects...") self.parsed_header = parsed_header self.parsed_data = parsed_data self.constructables = [ MapObject, TriggersObject, UnitsObject, ] self.objects = {} for obj in self.constructables: lgr.print("\tParsing " + obj.__name__ + "...", replace_line=True) self.objects[obj.__name__] = obj._construct(self.pieces) lgr.print("\tParsing " + obj.__name__ + " finished successfully.", replace_line=True) lgr.print() lgr.print( "Parsing pieces and structs to objects finished successfully.")
def _debug_byte_structure_to_file(self, filename, generator_for_trail=None, log_debug_write=True, commit=False): """ Used for debugging - Writes structure from read file to the filesystem in a easily readable manner. """ if commit and hasattr(self, '_object_manager'): # self._object_manager.reconstruct(log_debug_write) self._write_from_structure(filename, log_writing=log_debug_write, log_reconstructing=log_debug_write) lgr = SimpleLogger(log_debug_write) pieces = collections.OrderedDict(**self._parsed_header, **self._parsed_data) lgr.print("\nWriting structure to file...") with open(filename, 'w', encoding="utf-8") as output_file: result = [] for key in self._parsed_header: lgr.print("\tWriting " + key + "...", replace_line=True) result.append( self._parsed_header[key].get_byte_structure_as_string( pieces)) lgr.print("\tWriting " + key + " finished successfully.", replace_line=True) lgr.print() for key in self._parsed_data: lgr.print("\tWriting " + key + "...", replace_line=True) result.append( self._parsed_data[key].get_byte_structure_as_string( pieces)) lgr.print("\tWriting " + key + " finished successfully.", replace_line=True) lgr.print() if generator_for_trail is not None: lgr.print("\tWriting trail...", replace_line=True) trail_length = -1 # -1 == inf try: trail = b'' i = 0 while i != trail_length: trail += generator.repeat_generator( generator=generator_for_trail, run_times=1, intended_stop_iteration=True, return_bytes=True) i += 1 except StopIteration: pass # Expected, if trail is not present or shorter than {trail_length} bytes if i != 0: i += 1 if i == trail_length: i = str(i) + '+' if trail_length == -1: trail_length = i result.append(f"\n\n{'#' * 27} TRAIL ({i}/{trail_length})\n\n") result.append( helper.create_textual_hex(trail.hex(), space_distance=2, enter_distance=24)) lgr.print("\tWriting trail finished successfully.", replace_line=True) lgr.print() output_file.write(''.join(result)) output_file.close() lgr.print("Writing structure to file finished successfully.")
def _read_file(self, log_reading): lgr = SimpleLogger(should_log=log_reading) lgr.print("\nFile reading started...") self._parsed_header = collections.OrderedDict() self._parsed_data = collections.OrderedDict() header_generator = self._create_header_generator(1) data_generator = self._create_data_generator(1) current_piece = "" try: for piece_object in _header_structure: piece = piece_object(self.parser) piece_name = type(piece).__name__ self._parsed_header[piece_name] = piece current_piece = piece_name lgr.print("\tReading " + piece_name + "...") piece.set_data_from_generator(header_generator) lgr.print("\tReading " + piece_name + " finished successfully.") for piece_object in _file_structure: piece = piece_object(self.parser) piece_name = type(piece).__name__ self._parsed_data[piece_name] = piece current_piece = piece_name lgr.print("\tReading " + piece_name + "...") piece.set_data_from_generator(data_generator) lgr.print("\tReading " + piece_name + " finished successfully.") except StopIteration as e: print( f"\n[StopIteration] [EXIT] AoE2Scenario._read_file: \n\tPiece: {current_piece}\n" ) print("Writing ErrorFile...") self._debug_byte_structure_to_file("../ErrorFile", log_debug_write=False) time.sleep(1) print( "ErrorFile written. \n\n\n ------------------------ STACK TRACE ------------------------\n\n" ) time.sleep(1) raise StopIteration(e) suffix = b'' try: while True: suffix += data_generator.__next__() except StopIteration: # End of file reached pass finally: if len(suffix) > 0: # print("Found file suffix! Length: " + str(len(suffix)) + ". Suffix content: '" + str(suffix) + "'.") pass self._suffix = suffix lgr.print("File reading finished successfully.")
def _read_file(self, log_reading): lgr = SimpleLogger(should_log=log_reading) lgr.print("\nFile reading started...") self._parsed_header = collections.OrderedDict() self._parsed_data = collections.OrderedDict() header_generator = self._create_header_generator(1) data_generator = self._create_data_generator(1) for piece_object in _header_structure: piece = piece_object(self.parser) piece_name = type(piece).__name__ lgr.print("\tReading " + piece_name + "...") piece.set_data_from_generator(header_generator) self._parsed_header[piece_name] = piece lgr.print("\tReading " + piece_name + " finished successfully.") for piece_object in _file_structure: piece = piece_object(self.parser) piece_name = type(piece).__name__ lgr.print("\tReading " + piece_name + "...") piece.set_data_from_generator(data_generator) self._parsed_data[piece_name] = piece lgr.print("\tReading " + piece_name + " finished successfully.") suffix = b'' try: while True: suffix += data_generator.__next__() except StopIteration: # End of file reached pass finally: if len(suffix) > 0: # print("Found file suffix! Length: " + str(len(suffix)) + ". Suffix content: '" + str(suffix) + "'.") pass self._suffix = suffix lgr.print("File reading finished successfully.")