def dummy_handler(self, reader: ByteIO, header: ChunkHeader, output_directory: Path): chunk_start = reader.tell() data = reader.read(header.size) tmp_path = output_directory / 'chunks' / self.filepath.stem / header.name tmp_path.mkdir(parents=True, exist_ok=True) output_file = tmp_path / f'{chunk_start:08x}.chunk' if not output_file.exists() or ( output_file.exists() and output_file.stat().st_size < header.size): with output_file.open('wb') as f: f.write(data)
def handle_ASTS(self, reader: ByteIO, header: ChunkHeader, output_directory: Path): start = reader.tell() resource = ASTS(reader) total = len(list(resource.file_ids)) for file_id in resource.file_ids: print(f'Dumping {file_id + 1}/{total}') file_size, filename = resource.file_info(file_id) output_file = output_directory / filename output_file.parent.mkdir(parents=True, exist_ok=True) if not output_file.exists() or ( output_file.exists() and output_file.stat().st_size < file_size): print(f'Writing file {output_file}') file_data = resource.file_data(file_id) with output_file.open('wb') as f: f.write(file_data) reader.seek(start + header.size)
def __init__(self, reader: ByteIO): self._reader = reader self.header = ChunkHeader(reader) self.ctype = reader.read_uint32() self.dummy = reader.read_uint32() self.size = reader.read_uint32() filename = reader.read_ascii_padded() if filename[0] == '\\': filename = filename[1:] self.filename = Path(filename) if self.ctype == ContentType.Model: self.filename = self.filename.with_suffix('.model') elif self.ctype == ContentType.Texture: self.filename = self.filename.with_suffix('.dds') self._data_offset = reader.tell() reader.skip(self.size)