def decode(self, tex0, dest_file, overwrite=None, num_mips=0): if overwrite is None: overwrite = self.OVERWRITE_IMAGES if not dest_file: dest_file = tex0.name + '.png' elif os.path.isdir(dest_file): dest_file = os.path.join(dest_file, tex0.name + '.png') elif os.path.splitext( os.path.basename(dest_file))[1].lower() != '.png': dest_file += '.png' if not overwrite and os.path.exists(dest_file): AutoFix.warn('File {} already exists!'.format(dest_file)) return None tmp = self.get_temp_dest() f = BinFile(tmp, 'w') tex0.pack(f) f.commitWrite() if num_mips == 0: mips = '--no-mipmaps' elif num_mips == -1: mips = '--n-mm=auto' else: mips = '--n-mm=' + str(num_mips) result = subprocess.call( [self.converter, 'decode', tmp, '-d', dest_file, mips, '-qo'], startupinfo=self.si) if tmp != dest_file: os.remove(tmp) if result: raise DecodeError('Failed to decode {}'.format(tex0.name)) return dest_file
def remove_tex0(self, name): try: tex = self.texture_map.pop(name) self.textures.remove(tex) self.mark_modified() except KeyError: AutoFix.warn('No texture {} in {}'.format(name, self.name))
def pack(self, fog, binfile): AutoFix.warn('packing scn0 fog is not supported.') pack_header(binfile, fog.name, fog.node_id, fog.real_id) binfile.write('4BI2f', fog.flags, 0, 0, 0, fog.type, fog.start, fog.end) binfile.write('4B', fog.color) binfile.end()
def set_lightset_str(self, str): val = int(str) if val > 0: AutoFix.warn("Unusual lightset " + str + ", expected -1") if self.lightset != val: self.lightset = val self.mark_modified()
def __init__(self, filename, read_file=True): self.geometries = [] self.vertices = [] self.normals = [] self.texcoords = [] self.materials = {} self.images = set() self.filename = filename if read_file: self.mtllib = None self.parse_file(filename) to_remove = [] for geo in self.geometries: try: geo.normalize(self.vertices, self.normals, self.texcoords) except ValueError: to_remove.append(geo) AutoFix.warn('No geometry found for {}'.format(geo.name)) if to_remove: self.geometries = [ x for x in self.geometries if x not in to_remove ] else: dir, name = os.path.split(filename) base_name = os.path.splitext(name)[0] self.mtllib = base_name + '.mtl'
def post_unpacking(self, brres): for x in brres.textures: brres.texture_map[x.name] = x brres.unused_pat0 = self.post_unpack_anim(self.pat0, 'materials', 'pat0') brres.unused_srt0 = self.post_unpack_anim(self.srt0, 'materials', 'srt0') if brres.unused_pat0 or brres.unused_srt0: AutoFix.warn('Unused animations detected')
def set_light_ref_str(self, value): i = int(value) if i > 0: AutoFix.warn('{} set unusual light ref {}, expected -1'.format( self.name, value)) if self.scn0_light_ref != i: self.scn0_light_ref = i self.mark_modified()
def unpack_unknown(self): if self.uk_offsets: AutoFix.warn('Unknown files {}, may be loosely supported'.format([x[0] for x in self.uk_offsets])) uk = [] for name, offset in self.uk_offsets: self.binfile.offset = offset uk.append(UnknownUnpacker(self.binfile, self.section_offsets, name).node) return uk
def hook_textures(self, textures): m = len(textures) for x in self.node.frames: if x.tex >= m: x.tex = textures[0] AutoFix.warn( 'Unpacked Pat0 {} tex_id out of range'.format( self.node.name), 1) else: x.tex = textures[x.tex]
def _try_import_textures(brres, image_paths): if len(image_paths): try: converter = ImgConverter() converter.batch_encode(image_paths.values(), brres, overwrite=converter.OVERWRITE_IMAGES) except EncodeError: AutoFix.warn('Failed to encode images') return image_paths
def decode_indices(polygon, fmt_str): """Given a polygon and decoder string, decode the facepoint indices :return (face_point_indices, weight_groups) weight_groups is a map of the face_point index to a list of weight indices (matrices loaded) """ # now decode the indices face_point_indices = [] stride = get_stride(fmt_str) data = polygon.data total_face_points = i = 0 weight_groups = {} new_weight_group = True face_point_count = polygon.facepoint_count while total_face_points < face_point_count: [cmd] = unpack_from('>B', data, i) i += 1 if cmd in (0x98, 0x90): [num_facepoints] = unpack_from('>H', data, i) i += 2 if cmd == 0x98: i = decode_tri_strip(fmt_str, stride, data, i, num_facepoints, face_point_indices) elif cmd == 0x90: i = decode_tris(fmt_str, stride, data, i, num_facepoints, face_point_indices) total_face_points += num_facepoints new_weight_group = True elif cmd in (0x20, 0x28, 0x30): # load matrix if new_weight_group: weight_groups[len(face_point_indices)] = weights = [] new_weight_group = False bone_index, len_and_xf_address = unpack_from('>2H', data, i) xf_address = 0xfff & len_and_xf_address # length = (len_and_xf_address >> 12) + 1 # 12 (matrix len) i += 4 if cmd == 0x20: # pos matrix index = xf_address // 12 if index == len(weights): weights.append(bone_index) elif index < len(weights): weights[index] = bone_index elif cmd == 0x28: pass # normals no parsing? elif cmd == 0x30: # tex matrix pass else: raise error.ConvertError('Texture matrices not supported') elif cmd == 0x00: AutoFix.warn( 'Finished parsing {} indices early, possible bug?'.format( polygon.name)) break else: raise ValueError('Unsupported draw cmd {}'.format(cmd)) return face_point_indices, weight_groups
def on_brres_update(self): self.materials = {} self.brres.register_observer(self) for model in self.brres.models: model.register_observer(self) for material in model.materials: material.register_observer(self) if material.name not in self.materials: self.materials[material.name] = material else: AutoFix.warn('Multiple materials named {} in material library'.format(material.name))
def set_resample(sample): filters = [ 'nearest', 'lanczos', 'bilinear', 'bicubic', 'box', 'hamming' ] try: sampler_index = filters.index(sample) ImgConverterI.RESAMPLE = sampler_index except (ValueError, IndexError): AutoFix.warn( 'Invalid config value {} for "img_resample", using {}'.format( sample, filters[ImgConverterI.RESAMPLE]))
def batch_encode(self, files, brres, tex_format=None, num_mips=-1, check=False, overwrite=None): """Batch encode, faster than single encode when doing multiple files""" if overwrite is None: overwrite = self.OVERWRITE_IMAGES mips = '--n-mm=' + str( num_mips) if num_mips >= 0 else '--n-mm=auto' if not tex_format: tex_format = self.IMG_FORMAT t_files = [] for x in files: try: t_files.append(self.find_file(x)) except EncodeError: AutoFix.warn('Failed to find image {}'.format(x)) # tmp = 'abmatt-tmp' # create a new dir to work in tmp = self._move_to_temp_dir(t_files) t_files = [os.path.basename(x) for x in t_files] path_set = set() textures = brres.get_texture_map() for x in t_files: path, name = self.convert_png(os.path.join(tmp, x), remove_old=True) if overwrite or name not in textures: path_set.add(path) else: os.remove(path) if not len(path_set): self._move_out_of_temp_dir(tmp) return None if check: for x in path_set: self.check_image_dimensions(x) args = [self.converter, '-x', tex_format, mips, '-qo', 'encode'] file_names = [os.path.basename(x) for x in path_set] args.extend(file_names) result = subprocess.call(args, cwd=tmp, startupinfo=self.si) if result: self._move_out_of_temp_dir(tmp) raise EncodeError('Failed to encode images {}'.format(files)) tex0s = [] new_files = [x for x in os.listdir(tmp) if x not in file_names] for x in new_files: t = Tex0(x, brres, BinFile(os.path.join(tmp, x))) tex0s.append(t) brres.add_tex0(t) self._move_out_of_temp_dir(tmp) # cleanup return tex0s
def unpack_frames(self, pat0, binfile): # frame header size, _, scale_factor = binfile.read('2Hf', 8) frames = pat0.frames for i in range(size): frame_id, tex_id, plt_id = binfile.read('f2H', 8) if frame_id > pat0.framecount: AutoFix.warn( 'Unpacked Pat0 {} frame index out of range'.format( pat0.name), 1) break frames.append(pat0.Frame(frame_id, tex_id, plt_id))
def add_files(self, files): for fname in files: dir, name = os.path.split(fname) ext = os.path.splitext(name)[1].lower() if ext in ('.dae', '.obj'): self.handler.import_file(fname) elif ext == '.brres': self.handler.open(fname) # elif ext in ('.png', '.jpg', '.bmp', '.tga'): # self.handler.import_texture(fname) else: AutoFix.warn(f'{fname} has unknown extension')
def check(self, verts, norms, uvs, colors, materials ): # as we go along, gather verts norms uvs colors materials modified = False vertices = self.get_vertex_group() if vertices: verts.add(vertices.name) if self.linked_bone: vertices.check_vertices(self.linked_bone) normals = self.get_normal_group() if normals: norms.add(normals.name) material = self.get_material() if material: materials.add(material.name) # Colors my_colors = self.get_color_group() uses_vertex_colors = material.is_vertex_color_enabled() if my_colors: colors.add(my_colors.name) if not uses_vertex_colors: AutoFix.info(f'{self.name} has unused vertex colors', 4) elif uses_vertex_colors: b = Bug( 2, 2, f'{material.name} uses vertex colors but {self.name} has no colors!', 'Disable vertex colors') material.enable_vertex_color(False) b.resolve() modified = True # UVs uvs_used = material.get_uv_channels() uv_count = 0 for i in range(8): tex = self.get_uv_group(i) if tex: uv_count += 1 uvs.add(tex.name) if i in uvs_used: uvs_used.remove(i) else: AutoFix.info( f'{self.name} UV Channel {i} is not used by material.', 3) else: break if uvs_used: AutoFix.warn( f'{self.name} does not have UV channel(s) {uvs_used} but the material uses them!' ) self.uv_count = uv_count return modified
def parse_cp_vertex_format(self, polygon, hi, lo): if lo & 0x1: polygon.weight_index = self.i_pp() self.encode_string += 'B' else: polygon.weight_index = -1 self.__check_helper(lo & 0x1, self.has_weights, 'weight') lo >>= 1 tex_matrix = [] for i in range(8): if lo & 1: tex_matrix.append(self.i_pp()) self.encode_string += 'B' else: tex_matrix.append(-1) self.__check_helper(lo & 1, self.has_uv_matrix[i], 'uv_matrix' + str(i)) lo >>= 1 polygon.uv_mtx_indices = tex_matrix polygon.vertex_index = self.get_index_from_format(lo & 0x3) self.__check_helper(polygon.vertex_index >= 0, self.has_vertex, 'vertex') polygon.normal_index = self.get_index_from_format(lo >> 2 & 0x3) self.__check_helper(polygon.normal_index >= 0, self.has_normals, 'normal') polygon.color0_index = self.get_index_from_format(lo >> 4 & 0x3) self.__check_helper(polygon.color0_index >= 0, self.has_color0, 'color0') polygon.color1_index = self.get_index_from_format(lo >> 6 & 0x3) self.__check_helper(polygon.color1_index >= 0, self.has_color1, 'color1') polygon.color_count = (polygon.color0_index >= 0) + (polygon.color1_index >= 0) self.__check_helper(polygon.color_count, self.color_count, 'color count') tex = [] total = 0 for i in range(8): index = self.get_index_from_format(hi & 3) tex.append(index) self.__check_helper(index >= 0, self.has_uv_group[i], 'uv' + str(i)) total += (index >= 0) hi >>= 2 polygon.uv_count = total if polygon.uv_count != self.uv_count: AutoFix.warn('{} mismatch in {} definition (assuming {})'.format( self.node.name, 'UV count', polygon.uv_count)) self.__check_helper(polygon.uv_count, self.uv_count, 'uv count') polygon.uv_indices = tex
def normalize(self, vertices, normals, tex_coords): width = 1 + self.has_normals + self.has_texcoords try: triangles = np.array(self.triangles).reshape((-1, 3, width)) except ValueError as e: AutoFix.warn( 'Please triangulate your model before importing it!'.format( self.name)) raise ValueError('Normalize triangles failed') triangles = triangles - 1 self.triangles = triangles self.vertices = self.normalize_indices_group(triangles[:, :, 0], vertices) self.normals = self.normalize_indices_group( triangles[:, :, -1], normals) if self.has_normals else None self.texcoords = self.normalize_indices_group( triangles[:, :, 1], tex_coords) if self.has_texcoords else None
def check_shader(self, direct_count, ind_count, matrices_used): # checks with shader for i in range(2): matrix = self.indirect_matrices[i] if matrix.enabled: if not matrices_used[i]: AutoFix.warn( '{} indirect matrix {} enabled but unused in shader'. format(self.name, i), 3) elif not matrix.enabled and matrices_used[i]: AutoFix.warn( '{} indirect matrix {} disabled but used in shader'.format( self.name, i), 3) if direct_count != self.shaderStages: self.shaderStages = direct_count self.mark_modified() if ind_count != self.indirectStages: self.indirectStages = ind_count self.mark_modified()
def __decode_material(self, material): mat = ObjMaterial(material.name) if material.xlu: mat.dissolve = 0.5 first = True for layer in material.layers: name = layer.name if name not in self.tex0_map: tex = self.texture_library.get(name) if tex: self.tex0_map[name] = tex else: AutoFix.warn('No texture found matching {}'.format(name)) if first: path = os.path.join(self.image_dir, name + '.png') mat.diffuse_map = path mat.ambient_map = path first = False return mat
def __init_threads(self): AutoFix.info('Starting threads...', 5) self.threadpool = QThreadPool() # for multi-threading self.threadpool.setMaxThreadCount(5) self.converter = converter = ConvertManager.get() converter.signals.on_conversion_finish.connect( self.on_conversion_finish) self.image_manager = image_manager = ImageManager.get() if image_manager.enabled: image_manager.signals.on_image_update.connect(self.on_image_update) self.threadpool.start(image_manager) else: AutoFix.warn( 'Image Manager disabled, do you have Wiimms SZS Tools installed?' ) self.threadpool.start(converter) log_pipe = LoggerPipe() log_pipe.info_sig.connect(self.info) log_pipe.warn_sig.connect(self.warn) log_pipe.error_sig.connect(self.error)
def check(self, expected_name=None): """ Checks model (somewhat) for validity """ super(Mdl0, self).check() if self.rebuild_head: self.rebuild_header() if expected_name: if expected_name != self.name: b = Bug(2, 2, 'Model name does not match file', 'Rename to {}'.format(expected_name)) if self.DETECT_MODEL_NAME: if self.rename(expected_name): b.resolve() self.mark_modified() if self.is_map_model: names = [x.name for x in self.bones] if 'posLD' not in names or 'posRU' not in names: b = Bug(2, 2, 'Missing map model bones', 'Added map bones') self.add_map_bones() b.resolve() self.mark_modified() # as we go along, keep track of those references used uvs = set() normals = set() vertices = set() colors = set() materials = set() for x in self.objects: if x.check(vertices, normals, uvs, colors, materials): self.mark_modified() texture_map = self.getTextureMap() self.check_group(self.materials, materials, extras=texture_map) self.check_group(self.vertices, vertices) self.check_group(self.normals, normals) self.check_group(self.uvs, uvs) self.check_group(self.colors, colors) if not len(self.bones): # no bones??? name = expected_name if expected_name else 'default' AutoFix.warn('No bones in model, adding bone {}'.format(name)) self.add_bone(name)
def __decode_material(self, material, mesh): diffuse_map = ambient_map = specular_map = None for i in range(len(material.layers)): layer = material.layers[i].name if i == 0: diffuse_map = layer elif i == 1: ambient_map = layer elif i == 2: specular_map = layer if layer not in self.tex0_map: tex0 = self.texture_library.get(layer) if tex0 is None: AutoFix.warn('No texture found matching {}'.format(layer)) else: map_path = layer + '.png' mesh.add_image(layer, os.path.join(self.image_dir, map_path)) self.tex0_map[layer] = tex0 return Material(material.name, diffuse_map, ambient_map, specular_map, material.xlu * 0.5)
def save_model(self, mdl0=None): base_name, mdl0 = self._start_saving(mdl0) polygons = self.polygons obj = Obj(self.mdl_file, False) obj_materials = obj.materials for mat in self.materials: obj_mat = self.__decode_material(mat) obj_materials[obj_mat.name] = obj_mat obj_geometries = obj.geometries has_colors = False for x in polygons: geometry = super()._decode_geometry(x) if geometry: material = geometry.material_name obj_geometries.append( self.__decode_geometry(geometry, material)) if x.get_color_group(): has_colors = True if has_colors: AutoFix.warn('Loss of color data exporting obj') self._end_saving(obj)
def __decode_geometry(geometry, material_name): geo = ObjGeometry(geometry.name) geo.vertices = geometry.apply_linked_bone_bindings() geo.material_name = material_name geo.vertices = geometry.vertices geo.normals = geometry.normals geo.has_normals = bool(geo.normals) texcoords = geometry.texcoords if len(texcoords) > 1: AutoFix.warn('Loss of UV data for {}.'.format(geo.name)) stack = [geo.vertices.face_indices] if len(texcoords): geo.texcoords = texcoords[0] stack.append(geo.texcoords.face_indices) geo.has_texcoords = True else: geo.texcoords = None geo.has_texcoords = False if geo.normals: stack.append(geo.normals.face_indices) geo.triangles = np.stack(stack, -1) return geo
def post_unpack(self): poly = self.node mdl0 = poly.parent self.parse_xf_arry_flags(self.xf_arry_flags) self.parse_xf_vertex_specs(self.xf_vert) self.parse_cp_vertex_format(poly, self.cp_vert_hi, self.cp_vert_lo) # hook up references if self.vertex_group_index >= 0: poly.vertices = mdl0.vertices[self.vertex_group_index] else: poly.vertices = None if self.normal_group_index >= 0: poly.normals = mdl0.normals[self.normal_group_index] else: poly.normals = None poly.colors = colors = [] for x in self.color_group_indices: if x >= 0: colors.append(mdl0.colors[x]) else: colors.append(None) poly.uvs = uvs = [] for x in self.tex_coord_group_indices: if x >= 0: uvs.append(mdl0.uvs[x]) else: uvs.append(None) poly.encode_str = self.encode_string if self.bone_id >= 0: poly.linked_bone = mdl0.bones[mdl0.bone_table[self.bone_id]] elif poly.weight_index < 0: poly.linked_bone = mdl0.bones[0] AutoFix.warn('{} has incorrect bone reference, using {}'.format( poly.name, poly.linked_bone)) else: poly.linked_bone = None
def encode(self, img_file, brres, tex_format=None, num_mips=-1, check=False, overwrite=None): if overwrite is None: overwrite = self.OVERWRITE_IMAGES img_file, name = self.convert_png(self.find_file(img_file)) if not overwrite and brres is not None and name in brres.get_texture_map( ): AutoFix.warn(f'Tex0 {name} already exists!') return None if check: self.check_image_dimensions(img_file) # encode mips = '--n-mm=' + str( num_mips) if num_mips >= 0 else '--n-mm=auto' if not tex_format: tex_format = self.IMG_FORMAT dest = self.get_temp_dest() result = subprocess.call([ self.converter, 'encode', img_file, '-d', dest, '-x', tex_format, mips, '-qo' ], startupinfo=self.si) if result: raise EncodeError('Failed to encode {}'.format(img_file)) t = Tex0(name, brres, BinFile(dest)) t.name = name if brres is not None: brres.add_tex0(t) os.remove(dest) t.name = name return t
def unpack(self, polygon, binfile): binfile.start() binfile.readLen() mdl0_offset, self.bone_id, cp_vert_lo, cp_vert_hi, xf_vert = binfile.read( '2i3I', 20) offset = binfile.offset vt_dec_size, vt_dec_actual, vt_dec_offset = binfile.read('3I', 12) vt_dec_offset += offset offset = binfile.offset vt_size, vt_actual, vt_offset = binfile.read('3I', 12) vt_offset += offset self.xf_arry_flags, polygon.flags = binfile.read('2I', 8) binfile.advance(4) polygon.index, polygon.facepoint_count, polygon.face_count, \ self.vertex_group_index, self.normal_group_index = binfile.read('3I2h', 16) self.color_group_indices = binfile.read('2h', 4) self.tex_coord_group_indices = binfile.read('8h', 16) if polygon.parent.version >= 10: self.fur_vector_id, self.fur_coord_id = binfile.read('2h', 4) else: self.fur_vector_id = self.fur_coord_id = -1 # binfile.advance(4) # ignore binfile.store() # bt offset binfile.recall() # bt polygon.bone_table = unpack_bonetable(binfile, 'H') binfile.offset = vt_dec_offset + 10 _, self.cp_vert_lo, _, self.cp_vert_hi = binfile.read('HIHI', 12) if self.cp_vert_lo != cp_vert_lo: AutoFix.warn('{} CP_VERTEX_LO does not match (using {})'.format( polygon.name, self.cp_vert_lo)) if self.cp_vert_hi != cp_vert_hi: AutoFix.warn('{} CP_VERTEX_HI does not match (using {})'.format( polygon.name, self.cp_vert_hi)) binfile.advance(5) [self.xf_vert] = binfile.read('I', 4) if self.xf_vert != xf_vert: AutoFix.warn('{} XF_VERT_SPEC does not match (using {})'.format( polygon.name, xf_vert)) binfile.offset = vt_dec_offset + 32 uvat = binfile.read('HIHIHI', 18) # self.uvat = uvat self.parse_uvat(polygon, uvat[1], uvat[3], uvat[5]) binfile.offset = vt_offset polygon.data = binfile.readRemaining() # print('\n\n{}\tfacecount:{} data length:{} '.format(self.name, self.face_count, len(self.vt_data))) # if self.face_count < 30: # printCollectionHex(self.vt_data) binfile.end()
def check_vertices(self, linked_bone): if min(linked_bone.scale) >= 0.9999 and np.allclose(linked_bone.rotation, 0.0): # only checks if not scaled down and not rotated # Check Y value if self.parent.name == 'course' and self.minimum[1] + linked_bone.translation[1] < 0: AutoFix.warn('Vertex {} minimum y below axis {}'.format(self.name, self.minimum[1])) if self.parent.name != 'vrcorn': for x in self.minimum: if abs(x) > self.__MAX_COORD: AutoFix.warn( 'Vertex {} extreme coordinate {}, (ignore this warning for non-drivable surfaces)'.format( self.name, x)) for x in self.maximum: if x > self.__MAX_COORD: AutoFix.warn( 'Vertex {} extreme coordinate {}, (ignore this warning for non-drivable surfaces)'.format( self.name, x))