예제 #1
0
	def save(self, filepath):
		names_writer = BinaryStream()
		data_writer = BinaryStream()
		# shader name is at 0
		names_writer.write_zstring(self.shader_name)
		# attribs are written first
		for attrib in self.attributes:
			attrib.offset = names_writer.tell()
			names_writer.write_zstring(attrib.name)
			attrib.first_value_offset = data_writer.tell()
			fmt = dtypes[attrib.dtype]
			b = struct.pack("<" + fmt, *attrib.value)
			data_writer.write(b)
		for texture in self.textures:
			if texture.textured:
				for i in range(len(texture.indices)):
					# uint - hashes
					texture.indices[i] = max(0, texture.value[i])
				tex_ind = texture.indices[0]
				self.texture_names[tex_ind] = texture.name
			texture.offset = names_writer.tell()
			names_writer.write_zstring(texture.type)

		# write the output stream
		with self.writer(filepath) as stream:
			self.write(stream)
			stream.write(b"\x00" * self.zeros_size)
			stream.write(data_writer.getvalue())
			stream.write(names_writer.getvalue())
예제 #2
0
 def save(self, filepath):
     names_writer = BinaryStream()
     data_writer = BinaryStream()
     # shader name is at 0
     names_writer.write_zstring(self.shader_name)
     names_writer.write(b"\x00")
     # attribs are written first
     for attrib in self.attributes:
         attrib.offset = names_writer.tell()
         names_writer.write_zstring(attrib.name)
         attrib.value_offset = data_writer.tell()
         b = struct.pack(f"<{dtypes[attrib.dtype]}", *attrib.value)
         data_writer.write(b)
     self.texture_files.clear()
     for texture in self.textures:
         # if the texture has a file, store its index
         if texture.textured:
             texture.indices[0] = len(self.texture_files)
             self.texture_files.append(texture.file)
         texture.offset = names_writer.tell()
         names_writer.write_zstring(texture.name)
     # update counts
     data_bytes = data_writer.getvalue()
     self.data_lib_size = len(data_bytes)
     self.dependency_count = len(self.texture_files)
     self.fgm_info.texture_count = len(self.textures)
     self.fgm_info.attribute_count = len(self.attributes)
     # write the output stream
     with self.writer(filepath) as stream:
         self.write(stream)
         stream.write(data_bytes)
         stream.write(names_writer.getvalue())
예제 #3
0
    def flush_pointers(self, ignore_unaccounted_bytes=False):
        """Pre-writing step to convert all edits that were done on individual pointers back into the consolidated header
		data io block"""

        logging.debug(f"Flushing ptrs")
        # first, get all ptrs that have data to write
        sorted_ptrs_map = sorted(self.pointer_map.items())

        stack = []
        last_offset = -1
        for i, (offset, pointers) in enumerate(sorted_ptrs_map):
            for ptr in pointers:
                if ptr._data is not None:
                    if last_offset == offset:
                        logging.warning(
                            f"last offset is same as offset {offset}, skipping ptr for update"
                        )
                        continue
                    stack.append((ptr, i, offset))
                    last_offset = offset

        # check if rewriting is needed
        if not stack:
            return
        # create new data writer
        data = BinaryStream()
        last_offset = 0
        logging.debug(f"Stack size = {len(stack)}")
        # now go sequentially over all ptrs in the stack
        for ptr, i, offset in stack:
            from_end_of_last_to_start_of_this = self.get_at(last_offset,
                                                            size=offset -
                                                            last_offset)
            # write old data before this chunk and new data
            data.write(from_end_of_last_to_start_of_this)
            logging.debug(
                f"Flushing stack member {i} at original offset {offset} to {data.tell()}"
            )

            data.write(ptr._data)
            # update offset to end of the original ptr
            last_offset = offset + ptr.data_size
            # check delta
            # todo - padding
            ptr._padding_size = ptr.padding_size
            delta = (len(ptr._data) + ptr._padding_size) - (ptr.data_size +
                                                            ptr.padding_size)
            # update new data size on ptr
            ptr.data_size = len(ptr._data)
            if delta:
                # logging.debug(f"data size of stack [len: {len(sorted_ptrs_map)}] member {i} has changed")
                # get all ptrs that point into this pool, but after this ptr
                if i < len(sorted_ptrs_map):
                    for offset_later, pointers in sorted_ptrs_map[i + 1:]:
                        # logging.debug(f"Moving {offset_later} to {offset_later+delta}")
                        # update their data offsets
                        for p in pointers:
                            p.data_offset += delta
            # remove from ptr map, so pool can be deleted if it's empty
            if not ptr._data:
                if offset in self.pointer_map:
                    logging.debug(f"Removed offset {offset} from pool")
                    self.pointer_map.pop(offset)
        # write the rest of the data
        data.write(self.get_at(last_offset))
        # clear ptr data and stack
        for ptr, i, offset in stack:
            ptr._data = None
        # overwrite internal data
        self.data = data