def apply_skin(self, keep_skin=False): data = self.data skin = self.skin if not keep_skin: self.skin = None deformed_verts = zeros(*data.vertices.shape) deformed_norms = zeros(*data.normals.shape) root_to_skin = skin.data.matrix for bone, bone_data in zip(skin.bones, skin.data.bone_data): skin_to_bone = bone_data.matrix bone_matrix = bone.matrix_relative_to(skin.root) bind_matrix = root_to_skin @ bone_matrix @ skin_to_bone location, rotation, scale = decompose_uniform(bind_matrix) # indices and weights i = bone_data.vertex_weights["f0"] w = bone_data.vertex_weights["f1"][:, None] if len(deformed_verts): deformed_verts[i] += w * (data.vertices[i] @ rotation.T * scale + location.T) if len(deformed_norms): deformed_norms[i] += w * (data.normals[i] @ rotation.T) data.vertices = deformed_verts data.normals = deformed_norms # TODO: normalize?
class NiPixelData(NiObject): pixel_format: NiPixelFormat = NiPixelFormat() palette: NiPalette | None = None pixel_stride: int32 = 0 mipmaps: ndarray = zeros(0, dtype="<I") pixel_data: ndarray = zeros(0, dtype="<B") _refs = (*NiObject._refs, "palette") def load(self, stream): self.pixel_format = stream.read_type(NiPixelFormat) self.palette = stream.read_link() mipmap_levels = stream.read_uint() self.pixel_stride = stream.read_int() if mipmap_levels: self.mipmaps = stream.read_uints(mipmap_levels, 3) num_pixels = stream.read_uint() if num_pixels: self.pixel_data = stream.read_ubytes(num_pixels) def save(self, stream): self.pixel_format.save(stream) stream.write_link(self.palette) stream.write_uint(len(self.mipmaps)) stream.write_int(self.pixel_stride) stream.write_uints(self.mipmaps) stream.write_uint(len(self.pixel_data)) stream.write_ubytes(self.pixel_data)
class NiScreenPolygon(NiObject): vertices: ndarray = zeros(0, 3) uv_coords: ndarray = zeros(0, 2) vertex_colors: ndarray = zeros(0, 4) property_states: ndarray = zeros(0, dtype="<i") # TODO NiPropertyState def load(self, stream): num_vertices = stream.read_ushort() self.vertices = stream.read_floats(num_vertices, 3) has_uv_coords = stream.read_bool() if has_uv_coords: self.uv_coords = stream.read_floats(num_vertices, 2) has_vertex_colors = stream.read_bool() if has_vertex_colors: self.vertex_colors = stream.read_floats(num_vertices, 4) num_property_states = stream.read_uint() if num_property_states: self.property_states = stream.read_ints(num_property_states) def save(self, stream): stream.write_ushort(len(self.vertices)) stream.write_floats(self.vertices) has_uv_coords = len(self.uv_coords) stream.write_bool(has_uv_coords) if has_uv_coords: stream.write_floats(self.uv_coords) has_vertex_colors = len(self.vertex_colors) stream.write_bool(has_vertex_colors) if has_vertex_colors: stream.write_floats(self.vertex_colors) num_property_states = len(self.property_states) stream.write_uint(num_property_states) if num_property_states: stream.write_ints(self.property_states)
def load(self, stream): super().load(stream) num_text_keys = stream.read_uint() if num_text_keys: self.keys = zeros(num_text_keys, dtype=_dtype) for i in range(num_text_keys): self.keys[i] = stream.read_float(), stream.read_str()
class NiTriShapeData(NiTriBasedGeomData): triangles: ndarray = zeros(0, 3, dtype="<H") shared_normals: List[ndarray] = [] def load(self, stream): super().load(stream) num_triangles = stream.read_ushort() num_triangle_points = stream.read_uint() if num_triangles and num_triangle_points: self.triangles = stream.read_ushorts(num_triangles, 3) num_shared_normals = stream.read_ushort() if num_shared_normals: self.shared_normals = [ stream.read_ushorts(stream.read_ushort()) for _ in range(num_shared_normals) ] def save(self, stream): super().save(stream) stream.write_ushort(len(self.triangles)) stream.write_uint(self.triangles.size) stream.write_ushorts(self.triangles) stream.write_ushort(len(self.shared_normals)) for index_array in self.shared_normals: stream.write_ushort(len(index_array)) stream.write_ushorts(index_array)
class NiVisData(NiObject): keys: ndarray = zeros(0, dtype=_dtype) def load(self, stream): num_keys = stream.read_uint() if num_keys: self.keys = stream.read_array(num_keys, _dtype) def save(self, stream): stream.write_uint(len(self.keys)) stream.write_array(self.keys, _dtype) @property def times(self) -> ndarray: return self.keys["f0"] @times.setter def times(self, array: ndarray): self.keys["f0"] = array @property def values(self) -> ndarray: return self.keys["f1"] @values.setter def values(self, array: ndarray): self.keys["f1"] = array def get_start_stop_times(self) -> tuple[int, int]: if len(self.keys) == 0: return (0, 0) else: return (self.times[0], self.times[-1])
def vertex_morphs(self): morph_targets = self.morph_targets vertex_morphs = zeros(len(morph_targets), len(self.data.vertices), 3) for i, target in enumerate(morph_targets): vertex_morphs[i] = self.data.vertices + target.vertices # TODO not always relative! return vertex_morphs
class NiPixelFormat(NiObject): pixel_format: uint32 = 0 # TODO enum color_masks: ndarray = zeros(4, dtype="<I") bits_per_pixel: uint32 = 0 old_fast_compare: ndarray = zeros(8, dtype="<H") def load(self, stream): self.pixel_format = stream.read_uint() self.color_masks = stream.read_uints(4) self.bits_per_pixel = stream.read_uint() self.old_fast_compare = stream.read_ubytes(8) def save(self, stream): stream.write_uint(self.pixel_format) stream.write_uints(self.color_masks) stream.write_uint(self.bits_per_pixel) stream.write_ubytes(self.old_fast_compare)
def convert_to_quaternions(self): if self.euler_data == (): return # already using quaternions # TODO: support alternative axis orders assert self.euler_axis_order == AxisOrder.XYZ # extract keys and clear euler settings e_keys = [e.keys for e in self.euler_data] del self.interpolation, self.euler_data, self.euler_axis_order x, y, z = map(len, e_keys) if x == y == z == 0: return # no keys exist on any axis q_keys = zeros(x + y + z, 5) slices = np.s_[:x, x:x + y, x + y:x + y + z] for i, keys in enumerate(e_keys): if len(keys) == 0: continue # get slice q = q_keys[slices[i]] # set times q[:, 0] = keys[:, 0] # set quats quaternion_from_euler_angle(angle=keys[:, 1], euler_axis=i, out=q[:, 1:5]) # sort and combine keys of same timings u, i, v = np.unique(q_keys[:, 0], return_index=True, return_inverse=True) if len(u) == len(q_keys): self.keys = q_keys[i] else: self.keys = zeros(len(u), 5) self.keys[:, 0] = u self.keys[:, 1] = 1 for index in i: q = self.keys[index, 1:] for other in q_keys[index == v, 1:]: quaternion_mul(other, q, out=q)
def vertex_weights(self): bone_influences = self.bone_influences vertex_weights = zeros(len(bone_influences), len(self.data.vertices)) for i, (_, bone_data) in enumerate(bone_influences): indices = bone_data.vertex_weights["f0"] weights = bone_data.vertex_weights["f1"] vertex_weights[i, indices] = weights return vertex_weights
class NiVisData(NiObject): keys: ndarray = zeros(0, dtype=_dtype) def load(self, stream): num_keys = stream.read_uint() if num_keys: self.keys = stream.read_array(num_keys, _dtype) def save(self, stream): stream.write_uint(len(self.keys)) stream.write_array(self.keys, _dtype)
class NiPixelFormat(NiObject): pixel_format: int32 = PixelFormat.RGB color_masks: ndarray = zeros(4, dtype="<I") bits_per_pixel: uint32 = 0 old_fast_compare: ndarray = zeros(8, dtype="<B") # provide access to related enums PixelFormat = PixelFormat def load(self, stream): self.pixel_format = PixelFormat(stream.read_int()) self.color_masks = stream.read_uints(4) self.bits_per_pixel = stream.read_uint() self.old_fast_compare = stream.read_ubytes(8) def save(self, stream): stream.write_int(self.pixel_format) stream.write_uints(self.color_masks) stream.write_uint(self.bits_per_pixel) stream.write_ubytes(self.old_fast_compare)
class NiSkinPartitionData(NiObject): # TODO Not NiObject vertices: ndarray = zeros(0, dtype="<H") triangles: ndarray = zeros(0, dtype="<H") bones: ndarray = zeros(0, dtype="<H") strips: ndarray = zeros(0, dtype="<H") bones_per_vertex: ndarray = zeros(0, dtype="<H") vertex_map: ndarray = zeros(0, dtype="<H") weights: ndarray = zeros(0) strip_lengths: ndarray = zeros(0, dtype="<H") bone_palette: ndarray = zeros(0, dtype="<H") def load(self, stream): num_vertices = stream.read_ushort() num_triangles = stream.read_ushort() num_bones = stream.read_ushort() num_strips = stream.read_ushort() num_bones_per_vertex = stream.read_ushort() self.bones = stream.read_ushorts(num_bones) self.vertex_map = stream.read_ushorts(num_vertices) self.weights = stream.read_floats(num_bones_per_vertex, num_vertices) self.strip_lengths = stream.read_ushorts(num_strips) self.triangles = stream.read_ushorts(self.strip_lengths.sum() or (num_triangles * 3)) has_palette = stream.read_ubyte() if has_palette: self.bone_palette = stream.read_ubytes(num_bones_per_vertex, num_vertices) def save(self, stream): stream.write_ushort(len(self.vertices)) stream.write_ushort(len(self.triangles)) stream.write_ushort(len(self.bones)) stream.write_ushort(len(self.strips)) stream.write_ushort(len(self.bones_per_vertex)) stream.write_ushorts(self.bones) stream.write_ushorts(self.vertex_map) stream.write_ushorts(self.weights) stream.write_ushorts(self.strip_lengths) stream.write_ushorts(self.triangles) stream.write_ubyte(len(self.bone_palette)) if len(self.bone_palette): stream.write_ubytes(self.bone_palette)
class NiPalette(NiObject): has_alpha: uint8 = 0 palettes: ndarray = zeros(0, 4, dtype="<B") def load(self, stream): self.has_alpha = stream.read_ubyte() num_palettes = stream.read_uint() if num_palettes: self.palettes = stream.read_ubytes(num_palettes, 4) def save(self, stream): stream.write_ubyte(self.has_alpha) stream.write_ubytes(self.palettes)
class NiDynamicEffect(NiAVObject): affected_nodes: ndarray = zeros(0, dtype="<i") # TODO links? def load(self, stream): super().load(stream) num_affected_nodes = stream.read_uint() if num_affected_nodes: self.affected_nodes = stream.read_ints(num_affected_nodes) def save(self, stream): super().save(stream) stream.write_uint(len(self.affected_nodes)) stream.write_ints(self.affected_nodes)
class NiVertWeightsExtraData(NiExtraData): weights: ndarray = zeros(0) def load(self, stream): super().load(stream) num_weights = stream.read_ushort() if num_weights: self.weights = stream.read_floats(num_weights) def save(self, stream): super().save(stream) stream.write_ushort(len(self.weights)) stream.write_floats(self.weights)
class NiSkinDataBoneData(NiObject): # TODO Not NiObject rotation: NiMatrix3 = ID33 translation: NiPoint3 = ZERO3 scale: float32 = 1.0 center: NiPoint3 = ZERO3 radius: float32 = 0.0 vertex_weights: ndarray = zeros(0, dtype=_dtype) def load(self, stream): self.rotation = stream.read_floats(3, 3) self.translation = stream.read_floats(3) self.scale = stream.read_float() self.center = stream.read_floats(3) self.radius = stream.read_float() num_weights = stream.read_ushort() if num_weights: self.vertex_weights = stream.read_array(num_weights, _dtype) def save(self, stream): stream.write_floats(self.rotation) stream.write_floats(self.translation) stream.write_float(self.scale) stream.write_floats(self.center) stream.write_float(self.radius) stream.write_ushort(len(self.vertex_weights)) stream.write_array(self.vertex_weights, _dtype) def apply_scale(self, scale): self.translation *= scale self.center *= scale self.radius *= scale # (numpy-stubs 0.0.1 bug) # noinspection PyArgumentList def update_center_radius(self, vertices): if len(vertices) == 0: self.center[:] = self.radius = 0 else: center = (vertices.min(axis=0) + vertices.max(axis=0)) * 0.5 radius = float(la.norm(center - vertices, axis=1).max()) self.center = self.scale * ( self.rotation @ center) + self.translation self.radius = self.scale * radius @property def matrix(self): return compose(self.translation, self.rotation, self.scale) @matrix.setter def matrix(self, value): self.translation, self.rotation, self.scale = decompose_uniform(value)
class NiLinesData(NiGeometryData): lines: ndarray = zeros(0, dtype="<I") def load(self, stream): super().load(stream) num_vertices = len(self.vertices) if num_vertices: self.lines = stream.read_uints(num_vertices) def save(self, stream): super().save(stream) num_vertices = len(self.vertices) if num_vertices: stream.write_uints(self.lines)
class NiLinesData(NiGeometryData): vertex_connectivity_flags: ndarray = zeros(0, dtype="<B") def load(self, stream): super().load(stream) num_vertices = len(self.vertices) if num_vertices: self.vertex_connectivity_flags = stream.read_ubytes(num_vertices) def save(self, stream): super().save(stream) num_vertices = len(self.vertices) if num_vertices: stream.write_ubytes(self.vertex_connectivity_flags)
def load(self, stream): self.sequence_name = stream.read_str() has_external_kf = stream.read_ubyte() if has_external_kf: self.keyframe_file = stream.read_str() else: self.unknown_int = stream.read_int() self.unknown_object = stream.read_link() num_name_controller_pairs = stream.read_uint() if num_name_controller_pairs: self.name_controller_pairs = zeros(num_name_controller_pairs, dtype=_dtype) for i in range(num_name_controller_pairs): self.name_controller_pairs[i] = stream.read_str(), stream.read_int()
class NiLODNode(NiSwitchNode): lod_center: NiPoint3 = ZERO3 lod_levels: ndarray = zeros(0, 2) def load(self, stream): super().load(stream) self.lod_center = stream.read_floats(3) num_lod_levels = stream.read_uint() if num_lod_levels: self.lod_levels = stream.read_floats(num_lod_levels, 2) def save(self, stream): super().save(stream) stream.write_floats(self.lod_center) stream.write_uint(len(self.lod_levels)) stream.write_floats(self.lod_levels)
class NiMorphDataMorphTarget(NiFloatData): # TODO Not NiObject vertices: ndarray = zeros(0, 3) def load(self, stream, num_vertices=0): num_keys = stream.read_uint() self.key_type = KeyType(stream.read_int()) if num_keys: self.keys = stream.read_floats(num_keys, self.key_size) if num_vertices: self.vertices = stream.read_floats(num_vertices, 3) def save(self, stream): num_keys = len(self.keys) stream.write_uint(num_keys) stream.write_int(self.key_type) if num_keys: stream.write_floats(self.keys) if len(self.vertices): stream.write_floats(self.vertices)
class NiSequence(NiObject): sequence_name: str = "" keyframe_file: str = "" unknown_int: int32 = 0 unknown_object: NiObject | None = None name_controller_pairs: ndarray = zeros(0, dtype=_dtype) def load(self, stream): self.sequence_name = stream.read_str() has_external_kf = stream.read_ubyte() if has_external_kf: self.keyframe_file = stream.read_str() else: self.unknown_int = stream.read_int() self.unknown_object = stream.read_link() num_name_controller_pairs = stream.read_uint() if num_name_controller_pairs: self.name_controller_pairs = zeros(num_name_controller_pairs, dtype=_dtype) for i in range(num_name_controller_pairs): self.name_controller_pairs[i] = stream.read_str(), stream.read_int() def save(self, stream): stream.write_str(self.sequence_name) stream.write_ubyte(bool(self.keyframe_file)) if self.keyframe_file: stream.write_str(self.keyframe_file) else: stream.write_int(self.unknown_int) stream.write_link(self.unknown_object) stream.write_uint(len(self.name_controller_pairs)) for name, controller in self.name_controller_pairs.tolist(): stream.write_str(name) stream.write_int(controller)
class NiParticlesData(NiTriBasedGeomData): num_particles: uint16 = 0 particle_radius: float32 = 0.0 num_active: uint16 = 0 sizes: ndarray = zeros(0) def load(self, stream): super().load(stream) self.num_particles = stream.read_ushort() self.particle_radius = stream.read_float() self.num_active = stream.read_ushort() has_sizes = stream.read_bool() if has_sizes: self.sizes = stream.read_floats(len(self.vertices)) def save(self, stream): super().save(stream) stream.write_ushort(self.num_particles) stream.write_float(self.particle_radius) stream.write_ushort(self.num_active) num_sizes = len(self.sizes) stream.write_bool(num_sizes) if num_sizes: stream.write_floats(self.sizes)
class NiFloatData(NiObject): key_type: int32 = KeyType.LIN_KEY keys: ndarray = zeros(0, 2) # provide access to related enums KeyType = KeyType def load(self, stream): num_keys = stream.read_uint() if num_keys: self.key_type = KeyType(stream.read_int()) self.keys = stream.read_floats(num_keys, self.key_size) def save(self, stream): num_keys = len(self.keys) stream.write_uint(num_keys) if num_keys: stream.write_int(self.key_type) stream.write_floats(self.keys) @property def times(self) -> ndarray: return self.keys[:, 0] @property def values(self) -> ndarray: return self.keys[:, 1] @property def in_tans(self) -> ndarray: return self.keys[:, 2] @property def out_tans(self) -> ndarray: return self.keys[:, 3] @property def tcb(self) -> ndarray: return self.keys[:, -3:] @property def key_size(self) -> int: if self.key_type == KeyType.LIN_KEY: return 2 # (time, value) if self.key_type == KeyType.BEZ_KEY: return 4 # (time, value, inTan, outTan) if self.key_type == KeyType.TCB_KEY: return 5 # (time, value, tension, continuity, bias) raise Exception(f"{self.type} does not support '{self.key_type}'") def get_start_stop_times(self) -> tuple[int, int]: if len(self.keys) == 0: return (0, 0) else: return (self.keys[0, 0], self.keys[-1, 0]) def get_tangent_handles(self): if self.key_type == KeyType.BEZ_KEY: return self.get_bez_tangent_handles() if self.key_type == KeyType.TCB_KEY: return self.get_tcb_tangent_handles() def get_bez_tangent_handles(self): times, values = self.times, self.values # control point handles shape = (2, *values.shape[::-1], 2) handles = np.empty(shape, values.dtype) if len(handles): dt = np.diff(times / 3.0, prepend=0, append=0) dt[0], dt[-1] = dt[1], dt[-2] # correct edges # relative horizontal coordinates in_dx = dt[:-1] out_dx = dt[1:] # relative vertical coordinates in_dy = self.in_tans.T / 3.0 out_dy = self.out_tans.T / 3.0 # incoming handles handles[0, ..., 0] = times - in_dx handles[0, ..., 1] = values.T - in_dy # outgoing handles handles[1, ..., 0] = times + out_dx handles[1, ..., 1] = values.T - out_dy return handles def get_tcb_tangent_handles(self): times, values = self.times, self.values # control point handles shape = (2, *values.shape[::-1], 2) handles = np.empty(shape, values.dtype) if len(handles): # calculate deltas dx = (np.roll(times, 1, axis=0) - np.roll(times, -1, axis=0)) / 6.0 dy = (np.roll(values, 1, axis=0) - np.roll(values, -1, axis=0)) / 6.0 # fix up start/end dy[0] = dy[-1] = 0 # TODO: tcb params # removed for now as they aren't supported by blender # instead this currently returns a Catmull–Rom Spline # incoming handles handles[0, ..., 0] = times - dx handles[0, ..., 1] = values.T - dy.T # outgoing handles handles[1, ..., 0] = times + dx handles[1, ..., 1] = values.T + dy.T return handles
class NiFloatData(NiObject): interpolation: int32 = KeyType.LIN_KEY keys: ndarray = zeros(0, 2) # provide access to related enums KeyType = KeyType def load(self, stream): num_keys = stream.read_uint() if num_keys: self.interpolation = KeyType(stream.read_int()) self.keys = stream.read_floats(num_keys, self.key_size) def save(self, stream): num_keys = len(self.keys) stream.write_uint(num_keys) if num_keys: stream.write_int(self.interpolation) stream.write_floats(self.keys) @property def times(self): return self.keys[:, 0] @property def values(self): return self.keys[:, 1] @property def in_tans(self): return self.keys[:, 2] @property def out_tans(self): return self.keys[:, 3] @property def tcb(self): return self.keys[:, 1:4] @property def key_size(self): if self.interpolation == KeyType.LIN_KEY: return 2 # (time, value) if self.interpolation == KeyType.BEZ_KEY: return 4 # (time, value, inTan, outTan) if self.interpolation == KeyType.TCB_KEY: return 5 # (time, value, tension, continuity, bias) raise Exception(f"{self.type} does not support '{self.interpolation}'") def get_start_stop_times(self): if len(self.keys) == 0: return (0, 0) else: return (self.keys[0, 0], self.keys[-1, 0]) def get_tangent_handles(self): if self.interpolation == KeyType.BEZ_KEY: return self.get_bez_tangent_handles() if self.interpolation == KeyType.TCB_KEY: return self.get_tcb_tangent_handles() def get_bez_tangent_handles(self): times, values = self.times, self.values # calculate time deltas dt = times / 3.0 if len(dt) >= 2: dt[:-1] = dt[1:] - dt[:-1] # faster than np.diff(dt, append=0) dt[-1] = dt[-2] # control point handles shape = (2, *values.shape[::-1], 2) handles = np.empty(shape, dt.dtype) if handles.size: # incoming handles handles[0, ..., 0] = (times - dt) handles[0, ..., 1] = (values - self.in_tans / 3.0).T # outgoing handles handles[1, ..., 0] = (times + dt) handles[1, ..., 1] = (values + self.out_tans / 3.0).T return handles def get_tcb_tangent_handles(self): times, values = self.times, self.values # calculate deltas k = self.keys[:, :-3] / 3.0 p = k - np.roll(k, +1, axis=0) n = np.roll(k, -1, axis=0) - k if len(k) >= 2: # fix up ends p[0], n[-1] = p[1], n[-2] # calculate tangents mt, mc, mb = 1.0 - self.tcb.T pt, pc, pb = 1.0 + self.tcb.T in_tans = 0.5 * ((mt * mc * pb)[:, None] * p + (mt * pc * mb)[:, None] * n) out_tans = 0.5 * ((mt * pc * pb)[:, None] * p + (mt * mc * mb)[:, None] * n) # control point handles shape = (2, *values.shape[::-1], 2) handles = np.empty(shape, values.dtype) if handles.size: # incoming handles handles[0, ..., 0] = (times - in_tans[:, 0]) handles[0, ..., 1] = (values - in_tans[:, 1:]).T # outgoing handles handles[1, ..., 0] = (times + out_tans[:, 0]) handles[1, ..., 1] = (values + out_tans[:, 1:]).T return handles
class NiTextKeyExtraData(NiExtraData): keys: ndarray = zeros(0, dtype=_dtype) def load(self, stream): super().load(stream) num_text_keys = stream.read_uint() if num_text_keys: self.keys = zeros(num_text_keys, dtype=_dtype) for i in range(num_text_keys): self.keys[i] = stream.read_float(), stream.read_str() def save(self, stream): super().save(stream) stream.write_uint(len(self.keys)) for time, value in self.keys.tolist(): stream.write_float(time) stream.write_str(value) @staticmethod def _get_stop_text(start_text): group_name = start_text[:-6] # trim " start" prefix if group_name.endswith(("chop", "slash", "thrust")): return f"{group_name} hit" elif group_name.endswith("shoot"): return f"{group_name} release" else: return f"{group_name} stop" def get_action_groups(self): start_index = 0 end_text = None for i, text in enumerate(self.keys["f1"]): for line in text.lower().splitlines(): if (end_text is None) and line.endswith(" start"): start_index = i end_text = self._get_stop_text(line) continue if line == end_text: yield (start_index, i) end_text = None def expand_groups(self): temp = [] seen = {} for time, text in self.keys: for line in filter(None, text.lower().splitlines()): if (line in seen) and ("sound" not in line): print( f"Skipped duplicate text key '{line}' at {time:.3f}. Previous at {seen[line]:.3f}." ) continue seen[line] = time temp.append((time, line)) self.keys = np.array(temp, dtype=_dtype) def collapse_groups(self): uniques, inverse = np.unique(self.keys["f0"], return_inverse=True) if len(uniques) == len(self.keys): return new_keys = self.keys.copy() new_keys.resize(len(uniques)) for i, time in enumerate(uniques): # list of all the strings for this timing strings = self.keys[inverse == i]["f1"].tolist( ) # TODO: use hash map here for performance! # split strings to clean up extraneous newlines sanitized = [s for s in strings for s in s.splitlines() if s] # re-join the strings and update the keys array new_keys[i] = time, "\r\n".join(sanitized) self.keys = new_keys
class NiGeometryData(NiObject): vertices: ndarray = zeros(0, 3) normals: ndarray = zeros(0, 3) center: NiPoint3 = ZERO3 radius: float32 = 0.0 vertex_colors: ndarray = zeros(0, 4) uv_sets: ndarray = zeros(0, 0, 2) def load(self, stream): num_vertices = stream.read_ushort() has_vertices = stream.read_bool() if has_vertices: self.vertices = stream.read_floats(num_vertices, 3) has_normals = stream.read_bool() if has_normals: self.normals = stream.read_floats(num_vertices, 3) self.center = stream.read_floats(3) self.radius = stream.read_float() has_vertex_colors = stream.read_bool() if has_vertex_colors: self.vertex_colors = stream.read_floats(num_vertices, 4) num_uv_sets = stream.read_ushort() has_uv_sets = stream.read_bool() if has_uv_sets: self.uv_sets = stream.read_floats(num_uv_sets, num_vertices, 2) def save(self, stream): num_vertices = len(self.vertices) stream.write_ushort(num_vertices) stream.write_bool(num_vertices) if num_vertices: stream.write_floats(self.vertices) num_normals = len(self.normals) stream.write_bool(num_normals) if num_normals: stream.write_floats(self.normals) stream.write_floats(self.center) stream.write_float(self.radius) num_vertex_colors = len(self.vertex_colors) stream.write_bool(num_vertex_colors) if num_vertex_colors: stream.write_floats(self.vertex_colors) num_uv_sets = len(self.uv_sets) stream.write_ushort(num_uv_sets) stream.write_bool(num_uv_sets) if num_uv_sets: stream.write_floats(self.uv_sets) def apply_scale(self, scale): self.vertices *= scale self.center *= scale self.radius *= scale def update_center_radius(self): if len(self.vertices) == 0: self.center[:] = self.radius = 0 else: self.center = 0.5 * (self.vertices.min(axis=0) + self.vertices.max(axis=0)) self.radius = float( la.norm(self.center - self.vertices, axis=1).max())