Esempio n. 1
0
def split_skinned_blender_mesh_object(context: bpy.types.Context, object: bpy.types.Object, materials: List[GMDAttributeSet], bone_name_map: Dict[str, GMDBone], bone_limit: int,
                                      error: ErrorReporter) -> List[GMDSkinnedMesh]:
    # Apply the dependency graph to the mesh
    # https://blender.stackexchange.com/a/146911
    mesh = prepare_mesh(context, object)
    # TODO: mesh.transform(object.matrix_world)

    vertex_group_mapping = {
        i: bone_name_map[group.name]
        for i, group in enumerate(object.vertex_groups)
        if group.name in bone_name_map
    }

    submesh_builders = split_mesh_by_material(object.name, mesh, Matrix.Identity(4), materials, True,
                                              vertex_group_mapping, error=error)


    gmd_skinned_meshes = []
    print(f"Exporting skinned meshes for {object.name}")
    for builder in submesh_builders:
        if not isinstance(builder, SkinnedSubmeshBuilder):
            error.fatal(f"split_mesh_by_material gave a {type(builder).__name__} when a SkinnedSubmeshBuilder was expected")
        for split_builder in split_submesh_builder_by_bones(builder, bone_limit, object.name, error):
            print(f"Adding skinned mesh of vert count {len(split_builder.vertices)}")
            gmd_skinned_meshes.append(split_builder.build_to_gmd(materials))

    return gmd_skinned_meshes
Esempio n. 2
0
def _get_file_data(data: Union[Path, str, bytes],
                   error_reporter: ErrorReporter) -> bytes:
    if isinstance(data, (Path, str)):
        try:
            with open(data, "rb") as in_file:
                data = in_file.read()
            return data
        except FileNotFoundError as e:
            error_reporter.fatal(str(e))
    else:
        return data
Esempio n. 3
0
    def __init__(self,  # bm_vertices: List[BMVert],
                 mesh_name: str,
                 vertex_layout: GMDVertexBufferLayout,
                 transformation_position: Matrix,
                 transformation_direction: Matrix,
                 mesh: bpy.types.Mesh,
                 vertex_group_bone_index_map: Dict[int, int],
                 # deform_layer: Optional[BMLayerItem],
                 col0_layer: Optional[bpy.types.MeshLoopColorLayer],
                 col1_layer: Optional[bpy.types.MeshLoopColorLayer],
                 tangent_w_layer: Optional[bpy.types.MeshLoopColorLayer],
                 uv_primary: Optional[bpy.types.MeshUVLoopLayer],
                 uv_numbered: Dict[int, bpy.types.MeshLoopColorLayer],
                 error: ErrorReporter
                 ):
        self.vertex_layout = vertex_layout
        self.transformation_position = transformation_position
        self.transformation_direction = transformation_direction
        self.mesh = mesh
        self.vertex_group_bone_index_map = vertex_group_bone_index_map

        def verify_layer(storage, layer, name):
            if storage:
                if not layer:
                    error.info(f"Expected mesh {mesh_name} to have a {name} layer but got None")
                return layer
            elif layer:
                error.info(f"Mesh {mesh_name} has an unused {name} layer")

        self.col0_layer = verify_layer(vertex_layout.col0_storage, col0_layer, "Color0")
        self.col1_layer = verify_layer(vertex_layout.col1_storage, col1_layer, "Color1")
        self.tangent_w_layer = verify_layer(
            vertex_layout.tangent_storage in [VecStorage.Vec4Full, VecStorage.Vec4Fixed, VecStorage.Vec4Half],
            tangent_w_layer,
            "Tangent W Component"
        )
        # TODO - This "primary_uv_index" thing is icky
        self.primary_uv_i = vertex_layout.get_primary_uv_index()
        if self.primary_uv_i != -1 and self.primary_uv_i in uv_numbered:
            error.recoverable(
                f"VertexFetcher for mesh {mesh_name} given a primary uv index that refers to a numbered UV layer UV{self.primary_uv_i}. The primary UV will take precedence.")
        self.uv_layers = []
        for i, storage in enumerate(vertex_layout.uv_storages):
            if self.primary_uv_i == i:
                self.uv_layers.append((2, uv_primary))
            elif i in uv_numbered:
                layer = uv_numbered[i]
                self.uv_layers.append((VecStorage.component_count(storage), layer))
            else:
                error.info(f"VertexFetcher for mesh {mesh_name} didn't have a UV for layer {i}, values will be all-0")
                self.uv_layers.append((VecStorage.component_count(storage), None))

        self.error = error
Esempio n. 4
0
def check_version_writeable(version_props: VersionProperties,
                            error_reporter: ErrorReporter):
    if version_props.major_version == GMDVersion.Kiwami1:
        return
    elif version_props.major_version == GMDVersion.Dragon:
        return
    elif version_props.major_version == GMDVersion.Kenzan:
        return
    else:
        error_reporter.fatal(
            f"File format version {version_props.version_str} is not writeable"
        )
Esempio n. 5
0
def write_abstract_scene_out(version_props: VersionProperties,
                             file_is_big_endian: bool,
                             vertices_are_big_endian: bool, scene: GMDScene,
                             path: Union[Path,
                                         str], error_reporter: ErrorReporter):
    file_data = pack_abstract_scene(version_props, file_is_big_endian,
                                    vertices_are_big_endian, scene,
                                    error_reporter)
    data_bytearray = pack_file_data(version_props, file_data, error_reporter)
    try:
        with open(path, "wb") as out_file:
            out_file.write(data_bytearray)
    except IOError as e:
        error_reporter.fatal(str(e))
Esempio n. 6
0
def pack_file_data(version_props: VersionProperties,
                   file_data: FileData_Common,
                   error_reporter: ErrorReporter) -> bytearray:
    if version_props.major_version == GMDVersion.Kiwami1:
        data_bytearray = bytearray()
        try:
            FilePacker_YK1.pack(file_data.file_is_big_endian(), file_data,
                                data_bytearray)
        except PackingValidationError as e:
            error_reporter.fatal(str(e))
        return data_bytearray
    elif version_props.major_version == GMDVersion.Dragon:
        data_bytearray = bytearray()
        try:
            FilePacker_Dragon.pack(file_data.file_is_big_endian(), file_data,
                                   data_bytearray)
        except PackingValidationError as e:
            error_reporter.fatal(str(e))
        return data_bytearray
    elif version_props.major_version == GMDVersion.Kenzan:
        data_bytearray = bytearray()
        try:
            FilePacker_Kenzan.pack(file_data.file_is_big_endian(), file_data,
                                   data_bytearray)
        except PackingValidationError as e:
            error_reporter.fatal(str(e))
        return data_bytearray
    else:
        raise InvalidGMDFormatError(
            f"File format version {version_props.version_str} is not packable")
Esempio n. 7
0
def read_gmd_structures(
    data: Union[Path, str, bytes], error_reporter: ErrorReporter
) -> Tuple[VersionProperties, GMDHeaderStruct, Union[FileData_Kenzan,
                                                     FileData_YK1]]:
    data = _get_file_data(data, error_reporter)
    big_endian, base_header = _extract_base_header(data)

    version_props = base_header.get_version_properties()
    if version_props.major_version == GMDVersion.Kiwami1:
        try:
            header = GMDHeader_YK1_Unpack.unpack(big_endian,
                                                 data=data,
                                                 offset=0)
            contents, _ = FilePacker_YK1.unpack(big_endian,
                                                data=data,
                                                offset=0)

            return version_props, header, contents
        except FileUnpackError as e:
            error_reporter.fatal(str(e))
    elif version_props.major_version == GMDVersion.Kenzan:
        try:
            header = GMDHeader_Kenzan_Unpack.unpack(big_endian,
                                                    data=data,
                                                    offset=0)
            contents, _ = FilePacker_Kenzan.unpack(big_endian,
                                                   data=data,
                                                   offset=0)

            return version_props, header, contents
        except FileUnpackError as e:
            error_reporter.fatal(str(e))
    elif version_props.major_version == GMDVersion.Dragon:
        try:
            header = GMDHeader_Dragon_Unpack.unpack(big_endian,
                                                    data=data,
                                                    offset=0)
            contents, _ = FilePacker_Dragon.unpack(big_endian,
                                                   data=data,
                                                   offset=0)

            return version_props, header, contents
        except FileUnpackError as e:
            error_reporter.fatal(str(e))
    else:
        raise InvalidGMDFormatError(
            f"File format version {version_props.version_str} is not readable")
Esempio n. 8
0
def pack_abstract_contents_Dragon(version_properties: VersionProperties, file_big_endian: bool, vertices_big_endian: bool,
                               scene: GMDScene, error: ErrorReporter, base_flags=(0, 0, 0, 0, 0, 0)) -> FileData_Dragon:
    rearranged_data: RearrangedData = arrange_data_for_export(scene, error)

    # Set >255 bones flag
    bones_count = len([x for x, stackop in rearranged_data.ordered_nodes if isinstance(x, GMDBone)])
    int16_bone_indices = bones_count > 255
    print(bones_count, int16_bone_indices)

    packed_mesh_matrixlists, packed_mesh_matrix_strings_index = pack_mesh_matrix_strings(
        rearranged_data.mesh_matrixlist, int16_bone_indices, big_endian=file_big_endian)

    node_arr = []
    for i, (gmd_node, stack_op) in enumerate(rearranged_data.ordered_nodes):
        parent_of = -1 if not gmd_node.children else rearranged_data.node_id_to_node_index[id(gmd_node.children[0])]
        sibling_of = -1
        if gmd_node.parent:
            this_node_child_index = gmd_node.parent.children.index(gmd_node)
            if this_node_child_index != len(gmd_node.parent.children) - 1:
                sibling_of = rearranged_data.node_id_to_node_index[
                    id(gmd_node.parent.children[this_node_child_index + 1])]

        if gmd_node.node_type == NodeType.MatrixTransform:
            object_index = -1
        else:
            object_index = rearranged_data.node_id_to_object_index[id(gmd_node)]

        if isinstance(gmd_node, (GMDBone, GMDUnskinnedObject)):
            matrix_index = rearranged_data.object_id_to_matrix_index[id(gmd_node)]
        else:
            matrix_index = -1

        if isinstance(gmd_node, GMDBone):
            bone_pos = gmd_node.bone_pos
            bone_axis = gmd_node.bone_axis
        else:
            # TODO - UnskinnedObjects should also export a bone_pos equal to their world-space position
            bone_pos = Vector((gmd_node.pos.x, gmd_node.pos.y, gmd_node.pos.z, 1))
            bone_axis = Quaternion((0, 0, 0, 0))
            pass

        node_arr.append(NodeStruct(
            index=i,
            parent_of=parent_of,
            sibling_of=sibling_of,
            object_index=object_index,
            matrix_index=matrix_index,
            stack_op=stack_op,
            name_index=rearranged_data.node_names_index[gmd_node.name],
            node_type=gmd_node.node_type,

            pos=vec3_to_vec4(gmd_node.pos),
            rot=gmd_node.rot,
            scale=vec3_to_vec4(gmd_node.scale),

            bone_pos=vec3_to_vec4(bone_pos, 1),
            bone_axis=bone_axis,
            # TODO: GMD Node Flags
            flags=[0, 0, 0, 0],
        ))

    vertex_buffer_arr = []
    vertex_data_bytearray = bytearray()
    index_buffer = []
    # Dict of GMDMesh id -> (buffer_id, vertex_offset, vertex_count)
    mesh_buffer_stats = {}
    for buffer_idx, (gmd_buffer_layout, packing_flags, meshes_for_buffer) in enumerate(
            rearranged_data.vertex_layout_groups):
        buffer_vertex_count = sum(m.vertices_data.vertex_count() for m in meshes_for_buffer)

        vertex_buffer_arr.append(VertexBufferLayoutStruct_YK1(
            index=buffer_idx,

            vertex_count=buffer_vertex_count,

            vertex_packing_flags=packing_flags,
            bytes_per_vertex=gmd_buffer_layout.bytes_per_vertex(),

            vertex_data_offset=len(vertex_data_bytearray),
            vertex_data_length=buffer_vertex_count * gmd_buffer_layout.bytes_per_vertex(),
        ))

        vertex_buffer_length = 0

        for gmd_mesh in meshes_for_buffer:
            object_index = rearranged_data.mesh_id_to_object_index[id(gmd_mesh)]
            node = rearranged_data.ordered_objects[object_index]

            vertex_offset = vertex_buffer_length
            vertex_count = len(gmd_mesh.vertices_data)
            try:
                gmd_mesh.vertices_data.layout.pack_into(vertices_big_endian, gmd_mesh.vertices_data, vertex_data_bytearray)
            except PackingValidationError as e:
                error.fatal(f"Error while packing a mesh for {node.name}: {e}")
            vertex_buffer_length += vertex_count

            mesh_buffer_stats[id(gmd_mesh)] = (buffer_idx, vertex_offset, vertex_count)

        pass

    mesh_arr = []
    for gmd_mesh in rearranged_data.ordered_meshes:
        object_index = rearranged_data.mesh_id_to_object_index[id(gmd_mesh)]
        node = rearranged_data.ordered_objects[object_index]
        node_index = rearranged_data.node_id_to_node_index[id(node)]
        (buffer_idx, vertex_offset, vertex_count) = mesh_buffer_stats[id(gmd_mesh)]

        if isinstance(gmd_mesh, GMDSkinnedMesh):
            matrix_list = rearranged_data.mesh_id_to_matrixlist[id(gmd_mesh)]
        else:
            matrix_list = []

        if version_properties.relative_indices_used:
            pack_index = lambda x: x
        else:
            pack_index = lambda x: 0xFFFF if x == 0xFFFF else (x + vertex_offset)

        # Set up the pointer for the next set of indices
        triangle_indices = IndicesStruct(
            index_offset=len(index_buffer),
            index_count=len(gmd_mesh.triangle_indices)
        )
        # then add them to the data
        index_buffer += [pack_index(x) for x in gmd_mesh.triangle_indices]

        # Set up the pointer for the next set of indices
        triangle_strip_noreset_indices = IndicesStruct(
            index_offset=len(index_buffer),
            index_count=len(gmd_mesh.triangle_strip_noreset_indices)
        )
        # then add them to the data
        index_buffer += [pack_index(x) for x in gmd_mesh.triangle_strip_noreset_indices]

        # Set up the pointer for the next set of indices
        triangle_strip_reset_indices = IndicesStruct(
            index_offset=len(index_buffer),
            index_count=len(gmd_mesh.triangle_strip_reset_indices)
        )
        # then add them to the data
        index_buffer += [pack_index(x) for x in gmd_mesh.triangle_strip_reset_indices]

        mesh_arr.append(MeshStruct_YK1(
            index=len(mesh_arr),
            attribute_index=rearranged_data.attribute_set_id_to_index[id(gmd_mesh.attribute_set)],
            vertex_buffer_index=buffer_idx,
            object_index=object_index,
            node_index=node_index,

            matrixlist_offset=packed_mesh_matrix_strings_index[tuple(matrix_list)] if matrix_list else 0,
            matrixlist_length=len(matrix_list),

            vertex_offset=vertex_offset,
            vertex_count=vertex_count,

            triangle_list_indices=triangle_indices,
            noreset_strip_indices=triangle_strip_noreset_indices,
            reset_strip_indices=triangle_strip_reset_indices,
        ))

    obj_arr = []
    # This isn't going to have duplicates -> don't bother with the packing
    drawlist_bytearray = bytearray()
    touched_meshes = set()
    print(rearranged_data.ordered_objects)
    for i, obj in enumerate(rearranged_data.ordered_objects):

        mesh_bounds = combine_bounds([bounds_of(gmd_mesh) for gmd_mesh in obj.mesh_list])
        node_index = rearranged_data.node_id_to_node_index[id(obj)]

        drawlist_rel_ptr = len(drawlist_bytearray)
        c_uint16.pack(file_big_endian, len(obj.mesh_list), drawlist_bytearray)
        c_uint16.pack(file_big_endian, 0, drawlist_bytearray)
        for mesh in obj.mesh_list:
            c_uint16.pack(file_big_endian, rearranged_data.attribute_set_id_to_index[id(mesh.attribute_set)],
                          drawlist_bytearray)
            c_uint16.pack(file_big_endian, rearranged_data.mesh_id_to_index[id(mesh)], drawlist_bytearray)
            touched_meshes.add(id(mesh))

        print(f"object struct {i}")
        obj_arr.append(ObjectStruct_YK1(
            index=i,
            node_index_1=node_index,
            node_index_2=node_index,  # TODO: This could be a matrix index - I'm pretty sure those are interchangeable
            drawlist_rel_ptr=drawlist_rel_ptr,

            bbox=mesh_bounds,
        ))
    if len(touched_meshes) != len(mesh_arr):
        error.fatal(f"Didn't export drawlists for all meshes")
    overall_bounds = combine_bounds(obj.bbox for obj in obj_arr)

    material_arr = []
    for gmd_material in rearranged_data.ordered_materials:
        material_arr.append(gmd_material.port_to_version(version_properties.major_version).origin_data)
    unk12_arr = []
    unk14_arr = []
    attribute_arr = []
    # DRAGON ENGINE DIFFERENCE - ordered textures
    # duplicates are allowed, each attribute set *must* use contiguous texture indices.
    ordered_texture_arr = []
    def make_texture_index(name: str):
        if name:
            idx = len(ordered_texture_arr)
            ordered_texture_arr.append(ChecksumStrStruct.make_from_str(name))
            return TextureIndexStruct_Dragon(idx)
        return TextureIndexStruct_Dragon(-1)
    #make_texture_index = lambda s: TextureIndexStruct_Dragon(rearranged_data.texture_names_index[s] if s else -1)
    for i, gmd_attribute_set in enumerate(rearranged_data.ordered_attribute_sets):
        unk12_arr.append(Unk12Struct(
            data=gmd_attribute_set.unk12.float_data#.port_to_version(version_properties.major_version).float_data
            if gmd_attribute_set.unk12 else GMDUnk12.get_default()
        ))
        unk14_arr.append(Unk14Struct(
            data=gmd_attribute_set.unk14.int_data#port_to_version(version_properties.major_version).int_data
            if gmd_attribute_set.unk14 else GMDUnk12.get_default()
        ))

        mesh_range = rearranged_data.attribute_set_id_to_mesh_index_range[id(gmd_attribute_set)]
        texture_index = AttributeStruct_Dragon.calculate_texture_count(
            texture_diffuse=(gmd_attribute_set.texture_diffuse),
            texture_refl=(gmd_attribute_set.texture_refl),
            texture_multi=(gmd_attribute_set.texture_multi),
            texture_unk1=(gmd_attribute_set.texture_unk1),
            texture_ts=(gmd_attribute_set.texture_rs),
            texture_normal=(gmd_attribute_set.texture_normal),
            texture_rt=(gmd_attribute_set.texture_rt),
            texture_rd=(gmd_attribute_set.texture_rd),
        )
        attribute_arr.append(AttributeStruct_Dragon(
            index=i,
            material_index=rearranged_data.material_id_to_index[id(gmd_attribute_set.material)],
            shader_index=rearranged_data.shader_names_index[gmd_attribute_set.shader.name],

            # Which meshes use this material - offsets in the Mesh_YK1 array
            mesh_indices_start=mesh_range[0],
            mesh_indices_count=mesh_range[1] - mesh_range[0],

            texture_init_count=texture_index,  # TODO: Set this properly?
            flags=gmd_attribute_set.attr_flags,
            extra_properties=gmd_attribute_set.attr_extra_properties,

# DRAGON ENGINE CHANGE - TEXTURES MUST BE DECLARED IN ORDER TO MAKE SURE THE RANGE IS CORRECT
            texture_diffuse=make_texture_index(gmd_attribute_set.texture_diffuse),
            texture_multi=make_texture_index(gmd_attribute_set.texture_multi),
            texture_normal=make_texture_index(gmd_attribute_set.texture_normal),
            texture_rd=make_texture_index(gmd_attribute_set.texture_rd),
            texture_unk1=make_texture_index(gmd_attribute_set.texture_unk1),
            texture_rt=make_texture_index(gmd_attribute_set.texture_rt),
            texture_ts=make_texture_index(gmd_attribute_set.texture_rs),  # TODO: ugh, name mismatch
            texture_refl=make_texture_index(gmd_attribute_set.texture_refl),

            unk1_always_1=0,
            unk2_always_0=1,
            unk3_always_0=0
        ))

    file_endian_check = 1 if file_big_endian else 0
    vertex_endian_check = 1 if vertices_big_endian else 0

    flags = list(base_flags)
    if int16_bone_indices:
        flags[5] |= 0x8000_0000
    else:
        flags[5] &= ~0x8000_0000
    # TODO: This is in all(?) Yakuza Dragon files
    # It could be worth passing on the flags from original files if we're still exporting "over" them
    flags[5] |= 0x22
    flags[4] = 0x32b7c266 # TODO - wtf

    return FileData_Dragon(
        magic="GSGM",
        file_endian_check=file_endian_check,
        vertex_endian_check=vertex_endian_check,
        version_combined=version_properties.combined_version(),

        name=ChecksumStrStruct.make_from_str(scene.name),

        overall_bounds=overall_bounds,

        node_arr=node_arr,
        obj_arr=obj_arr,
        mesh_arr=mesh_arr,
        attribute_arr=attribute_arr,
        material_arr=material_arr,
        matrix_arr=rearranged_data.ordered_matrices,
        vertex_buffer_arr=vertex_buffer_arr,
        vertex_data=bytes(vertex_data_bytearray),
        texture_arr=ordered_texture_arr, # DRAGON ENGINE DIFFERENCE
        shader_arr=rearranged_data.shader_names,
        node_name_arr=rearranged_data.node_names,
        index_data=index_buffer,
        object_drawlist_bytes=bytes(drawlist_bytearray),
        mesh_matrixlist_bytes=packed_mesh_matrixlists,

        unk12=unk12_arr,
        unk13=rearranged_data.root_node_indices,
        unk14=unk14_arr,
        flags=flags,
    )
Esempio n. 9
0
def arrange_data_for_export(scene: GMDScene,
                            error: ErrorReporter) -> RearrangedData:
    # Note - flags
    # many bones flag is important, but so are the others - look into which ones are supposed to be there
    # is relative-indexing set in a flag?

    # ordered_nodes = []
    # node_id_to_node_index = {}
    # ordered_matrices = []
    # node_id_to_matrix_index = {}
    # root_node_indices = []
    #
    # skinned_objects = list(scene.skinned_objects.depth_first_iterate())
    # unskinned_objects = list(scene.unskinned_objects.depth_first_iterate())

    ordered_nodes = []
    ordered_matrices = []
    ordered_skinned_objects = []
    ordered_unskinned_objects = []

    root_node_indices = []
    node_id_to_node_index = {}
    node_id_to_object_index = {}
    node_id_to_matrix_index = {}

    texture_names = set()
    shader_names = set()
    node_names = set()

    # Order the nodes
    # Depth-first indexing
    # Track touched nodes in set T(n)?
    root_gmd_nodes = scene.overall_hierarchy.roots
    for i, gmd_node in enumerate(depth_first_iterate(root_gmd_nodes)):
        # stackop = none
        # if has parent and all other children of your parent have been touched - stackop += pop
        # the depth_first_iterate iterates through children in order
        #   -> if we are the last child, all others must have been touched
        want_pop = bool(
            gmd_node.parent) and gmd_node.parent.children[-1] is gmd_node
        # if not leaf: stackop += push
        want_push = bool(gmd_node.children)

        stack_op = NodeStackOp.NoOp
        if want_pop:
            stack_op = NodeStackOp.Pop
            if want_push:
                stack_op = NodeStackOp.PopPush
        elif want_push:
            stack_op = NodeStackOp.Push

        if len(gmd_node.name.encode("shift-jis")) > 30:
            error.fatal(
                f"Node {gmd_node.name} has a name that's longer than 30 bytes long. Please shorten it!"
            )

        # emit (node, stackop)
        ordered_nodes.append((gmd_node, stack_op))
        node_id_to_node_index[id(gmd_node)] = i

        # if node is instance of GMDObject (skinned or unskinned) add to ordered_objects
        if isinstance(gmd_node, GMDSkinnedObject):
            ordered_skinned_objects.append(gmd_node)

        if isinstance(gmd_node, GMDUnskinnedObject):
            ordered_unskinned_objects.append(gmd_node)

        if isinstance(gmd_node, GMDSkinnedObject) and not gmd_node.mesh_list:
            error.fatal(
                f"Skinned Object {gmd_node.name} has no meshes, cannot export")

        if isinstance(gmd_node, GMDUnskinnedObject
                      ) and not gmd_node.children and not gmd_node.mesh_list:
            print(
                f"Unskinned object {gmd_node.name} has no meshes and no children, expected a child or mesh to be present."
            )

        # if node is bone or unskinned, emit a matrix
        if isinstance(gmd_node, (GMDBone, GMDUnskinnedObject)):
            node_id_to_matrix_index[id(gmd_node)] = len(ordered_matrices)
            ordered_matrices.append(gmd_node.matrix)
        # else:
        #     # also emit an identity matrix for skinned meshes just in case - it can't hurt
        #     node_id_to_matrix_index[id(gmd_node)] = len(ordered_matrices)
        #     ordered_matrices.append(Matrix.Identity(4))

        # if node has no parent, add index to roots
        if not gmd_node.parent:
            root_node_indices.append(i)

        # Add name to node names
        node_names.add(gmd_node.name)

    # Put unskinned objects before skinned ones
    # Skinned objects don't have matrices, so don't put them before things that do, because it's a sequential id and it could go wrong.
    ordered_objects = ordered_unskinned_objects + ordered_skinned_objects
    node_id_to_object_index = build_index_mapping(ordered_objects, key=id)

    # make sure to maintain original object order for scene
    # involves making sure the DFA happens with objects in order

    # collect meshes
    meshes: List[GMDMesh] = [
        mesh for obj in ordered_objects for mesh in obj.mesh_list
    ]

    for mesh in meshes:
        shader_names.add(mesh.attribute_set.shader.name)

        if mesh.attribute_set.texture_diffuse:
            texture_names.add(mesh.attribute_set.texture_diffuse)
        if mesh.attribute_set.texture_refl:
            texture_names.add(mesh.attribute_set.texture_refl)
        if mesh.attribute_set.texture_multi:
            texture_names.add(mesh.attribute_set.texture_multi)
        if mesh.attribute_set.texture_unk1:
            texture_names.add(mesh.attribute_set.texture_unk1)
        if mesh.attribute_set.texture_rs:
            texture_names.add(mesh.attribute_set.texture_rs)
        if mesh.attribute_set.texture_normal:
            texture_names.add(mesh.attribute_set.texture_normal)
        if mesh.attribute_set.texture_rt:
            texture_names.add(mesh.attribute_set.texture_rt)
        if mesh.attribute_set.texture_rd:
            texture_names.add(mesh.attribute_set.texture_rd)

    # build texture, node name pools
    texture_names, texture_names_index = build_pools(texture_names)
    node_names, node_names_index = build_pools(node_names)

    # Order attributesets first.
    #  then, order meshes based only on attributesets.
    #  then, order vertexlayouts independently.

    # ordering meshes:
    # build list of vertex buffer layouts to use
    # TODO - sorting order is required for Dragon Engine, but not other engines.
    # TODO - with this setup K2 kiryu has unused shader names?
    # YK2 kiryu sort order is by prefix (sd_o*, sd_d*, sd_c*, sd_b*) and then some unknown ordering within those groups.
    # This will achieve the requested ordering for prefixes, but not for other things. However, we only care about ordering transparent shaders together at the end.
    def compare_attr_sets(a1: GMDAttributeSet, a2: GMDAttributeSet):
        a1_prefix = re.match(r'^[a-z]+_[a-z]', a1.shader.name).group(0)
        a2_prefix = re.match(r'^[a-z]+_[a-z]', a2.shader.name).group(0)

        print(f"'{a1_prefix}' '{a2_prefix}'")

        if a1_prefix < a2_prefix:
            # sort by inverted prefix first
            return 1
        elif a1_prefix > a2_prefix:
            # sort by inverted prefix first
            return -1
        else:
            # just sort by name???
            if a1.shader.name > a2.shader.name:
                return 1
            elif a1.shader.name < a2.shader.name:
                return -1
            else:
                return 0

    # Order the attribute sets, and get a nice order for shaders too
    expected_attribute_set_order = sorted(
        {id(m.attribute_set): m.attribute_set
         for m in meshes}.values(),
        key=functools.cmp_to_key(compare_attr_sets))
    shader_names = [a.shader.name for a in expected_attribute_set_order]
    # remove dupes
    shader_names = list(dict.fromkeys(shader_names))
    print("\n".join(x for x in shader_names))
    print()
    shader_names, shader_names_index = build_pools(shader_names)
    print("\n".join(x.text for x in shader_names))
    print(shader_names_index)

    known_vertex_layouts_set: Set[GMDVertexBufferLayout] = {
        mesh.vertices_data.layout
        for mesh in meshes
    }
    # sort by descending flags int value (?)
    known_vertex_layouts_and_flags = [(l,
                                       generate_vertex_layout_packing_flags(l))
                                      for l in known_vertex_layouts_set]
    known_vertex_layouts_and_flags.sort(
        key=lambda l_with_flags: l_with_flags[1], reverse=True)
    vertex_layout_groups = []
    for layout, flag in known_vertex_layouts_and_flags:
        meshes_for_buffer = [
            m for m in meshes
            if m.attribute_set.shader.vertex_buffer_layout == layout
        ]
        # sort meshes by id(material) - just to group the common materials together
        meshes_for_buffer.sort(
            key=lambda m: expected_attribute_set_order.index(m.attribute_set))
        # emit buffer_layout, meshes_for_buffer
        vertex_layout_groups.append((layout, flag, meshes_for_buffer))

    #ordered_meshes = sum([ms for _, _, ms in vertex_layout_groups], [])
    ordered_meshes = meshes[:]
    ordered_meshes.sort(
        key=lambda m: expected_attribute_set_order.index(m.attribute_set))
    mesh_id_to_index = build_index_mapping(ordered_meshes, key=id)

    mesh_id_to_object_index = {}
    # These are only for skinned meshes
    mesh_id_to_matrixlist = {}
    mesh_matrixlist_set = set()
    for object_idx, object in enumerate(ordered_objects):
        for mesh in object.mesh_list:
            if id(mesh) in mesh_id_to_object_index:
                error.fatal(
                    f"Mesh is mapped to two objects {object.name} and {ordered_objects[mesh_id_to_object_index[id(mesh)]].name}"
                )
            mesh_id_to_object_index[id(mesh)] = object_idx

            if isinstance(object, GMDSkinnedObject):
                if not isinstance(mesh, GMDSkinnedMesh):
                    error.fatal(
                        f"SkinnedObject {object.name} has unskinned mesh")
                matrixlist = tuple([
                    node_id_to_matrix_index[id(bone)]
                    for bone in mesh.relevant_bones
                ])
                mesh_id_to_matrixlist[id(mesh)] = matrixlist
                mesh_matrixlist_set.add(matrixlist)

    mesh_matrixlist = list(mesh_matrixlist_set)
    mesh_matrixlist_index = build_index_mapping(mesh_matrixlist)
    # mesh_id_to_matrix_string_index = {
    #     mesh_id:mesh_matrixlist.index(matrixlist)
    #     for mesh_id, matrixlist in mesh_id_to_matrixlist.items()
    # }

    if set(mesh_id_to_index.keys()) != set(mesh_id_to_object_index.keys()):
        error.fatal(
            "Somehow the mapping of mesh -> mesh index maps different meshes than the mesh -> object index"
        )

    # Order the attribute sets
    attribute_set_id_to_mesh_index_range = {}
    ordered_attribute_sets = []
    attr_index_start = -1
    for i, m in enumerate(ordered_meshes):
        if not ordered_attribute_sets:
            ordered_attribute_sets.append(m.attribute_set)
            attr_index_start = i
        elif m.attribute_set != ordered_attribute_sets[-1]:
            curr_attribute_set = ordered_attribute_sets[-1]
            attr_index_end = i
            attribute_set_id_to_mesh_index_range[id(curr_attribute_set)] = (
                attr_index_start, attr_index_end)
            attr_index_start = i
            ordered_attribute_sets.append(m.attribute_set)
    if ordered_attribute_sets:
        attribute_set_id_to_mesh_index_range[id(
            ordered_attribute_sets[-1])] = (attr_index_start,
                                            len(ordered_meshes))
    if ordered_attribute_sets != expected_attribute_set_order:
        error.recoverable(
            f"Export Error - Attribute Sets were reordered from the intended order!"
        )

    # make index mapping for ordered_materials
    attribute_set_id_to_index = build_index_mapping(ordered_attribute_sets,
                                                    key=id)

    # Order the materials
    material_ids = set()
    ordered_materials = []
    for attribute_set in ordered_attribute_sets:
        if id(attribute_set.material) not in material_ids:
            ordered_materials.append(attribute_set.material)
            material_ids.add(id(attribute_set.material))
    material_id_to_index = build_index_mapping(ordered_materials, key=id)

    # TODO: Build drawlists?

    # Build matrixlists
    # mesh_matrix_index_list_set = set()
    # mesh_id_to_matrixlist: Dict[int, List[int]] = {}
    # for mesh in ordered_meshes:
    #     if not isinstance(mesh, GMDSkinnedMesh):
    #         continue
    #
    #     matrix_list = [node_id_to_node_index[id(bone)] for bone in mesh.relevant_bones]
    #     mesh_matrix_index_list_set.add(tuple(matrix_list))
    #     mesh_id_to_matrixlist[id(mesh)] = matrix_list
    # mesh_matrixlist = [list(s) for s in mesh_matrix_index_list_set]
    # mesh_matrixlist_index = build_index_mapping(mesh_matrixlist, key=tuple)

    # now all arrangements should be made - next is to port into the respective file formats
    # this is for tomorrow tho

    return RearrangedData(
        ordered_nodes=ordered_nodes,
        ordered_matrices=ordered_matrices,
        ordered_objects=ordered_objects,
        root_node_indices=root_node_indices,
        node_id_to_node_index=node_id_to_node_index,
        node_id_to_object_index=node_id_to_object_index,
        object_id_to_matrix_index=node_id_to_matrix_index,
        texture_names=texture_names,
        texture_names_index=texture_names_index,
        shader_names=shader_names,
        shader_names_index=shader_names_index,
        node_names=node_names,
        node_names_index=node_names_index,

        # Tuple of (layout, layout_vertex_packing_flags, meshes)
        vertex_layout_groups=vertex_layout_groups,
        ordered_meshes=ordered_meshes,
        mesh_id_to_index=mesh_id_to_index,
        mesh_id_to_object_index=mesh_id_to_object_index,
        # These are only for skinned meshes
        mesh_id_to_matrixlist=mesh_id_to_matrixlist,
        mesh_matrixlist=mesh_matrixlist,
        mesh_matrixlist_index=mesh_matrixlist_index,
        ordered_attribute_sets=ordered_attribute_sets,
        attribute_set_id_to_index=attribute_set_id_to_index,
        # List of [start, end_exclusive) ranges
        attribute_set_id_to_mesh_index_range=
        attribute_set_id_to_mesh_index_range,
        ordered_materials=ordered_materials,
        material_id_to_index=material_id_to_index,
    )
Esempio n. 10
0
def gmd_meshes_to_bmesh(gmd_meshes: Union[List[GMDMesh], List[GMDSkinnedMesh]],
                        vertex_group_indices: Dict[str, int], attr_idx: int,
                        gmd_to_blender_world: Matrix, fuse_vertices: bool,
                        error: ErrorReporter):
    if len(gmd_meshes) == 0:
        error.fatal("Called make_merged_gmd_mesh with 0 meshes!")

    is_skinned = isinstance(gmd_meshes[0], GMDSkinnedMesh)
    print(
        f"make_merged_gmd_mesh called with {gmd_meshes} skinned={is_skinned} fusing={fuse_vertices}"
    )

    # Fix up bone mappings if the meshes are skinned
    if is_skinned:
        if not all(isinstance(x, GMDSkinnedMesh) for x in gmd_meshes):
            error.fatal(
                "Called gmd_meshes_to_bmesh with a mix of skinned and unskinned meshes"
            )

        gmd_meshes = cast(List[GMDSkinnedMesh], gmd_meshes)

        # Skinned meshes are more complicated because vertices reference bones using a *per-mesh* index into that "relevant_bones" list
        # These indices have to be changed for the merged mesh, because each mesh will usually have a different "relevant_bones" list
        relevant_bones = gmd_meshes[0].relevant_bones[:]
        merged_vertex_buffer = gmd_meshes[0].vertices_data[:]
        for gmd_mesh in gmd_meshes[1:]:
            bone_index_mapping = {}
            for i, bone in enumerate(gmd_mesh.relevant_bones):
                if bone not in relevant_bones:
                    relevant_bones.append(bone)
                bone_index_mapping[i] = relevant_bones.index(bone)

            def remap_weight(bone_weight: BoneWeight):
                # If the weight is 0 the bone is unused, so map it to a consistent 0.
                if bone_weight.weight == 0:
                    return BoneWeight(0, weight=0.0)
                else:
                    return BoneWeight(bone_index_mapping[bone_weight.bone],
                                      bone_weight.weight)

            index_start_to_adjust_bones = len(merged_vertex_buffer)
            merged_vertex_buffer += gmd_mesh.vertices_data
            for i in range(index_start_to_adjust_bones,
                           len(merged_vertex_buffer)):
                old_weights = merged_vertex_buffer.bone_weights[i]
                merged_vertex_buffer.bone_weights[i] = (
                    remap_weight(old_weights[0]),
                    remap_weight(old_weights[1]),
                    remap_weight(old_weights[2]),
                    remap_weight(old_weights[3]),
                )
    else:
        if any(isinstance(x, GMDSkinnedMesh) for x in gmd_meshes):
            error.fatal(
                "Called gmd_meshes_to_bmesh with a mix of skinned and unskinned meshes"
            )

        merged_vertex_buffer = gmd_meshes[0].vertices_data[:]
        for gmd_mesh in gmd_meshes[1:]:
            merged_vertex_buffer += gmd_mesh.vertices_data
        relevant_bones = None

    merged_idx_to_bmesh_idx: Dict[int, int] = {}
    mesh_vtx_idx_to_bmesh_idx: Dict[Tuple[int, int], int] = {}
    bm = bmesh.new()
    deform = bm.verts.layers.deform.new(
        "Vertex Weights") if is_skinned else None
    if deform and (relevant_bones is None):
        error.fatal(
            f"Mismatch between deform/is_skinned, and the existence of relevant_bones"
        )

    def add_vertex_to_bmesh(i: int):
        vert = bm.verts.new(
            gmd_to_blender_world @ merged_vertex_buffer.pos[i].xyz)
        if merged_vertex_buffer.normal:
            # apply the matrix to normal.xyz.resized(4) to set the w component to 0 - normals cannot be translated!
            # Just using .xyz would make blender apply a translation (TODO - check this?)
            vert.normal = (gmd_to_blender_world @ (
                merged_vertex_buffer.normal[i].xyz.resized(4))).xyz
        if deform:
            for bone_weight in merged_vertex_buffer.bone_weights[i]:
                if bone_weight.weight > 0:
                    if bone_weight.bone >= len(relevant_bones):
                        print(
                            f"bone out of bounds - bone {bone_weight.bone} in {[b.name for b in relevant_bones]}"
                        )
                        print(f"mesh len = {len(merged_vertex_buffer)}")
                    vertex_group_index = vertex_group_indices[relevant_bones[
                        bone_weight.bone].name]
                    vert[deform][vertex_group_index] = bone_weight.weight

    # Set up the indexing table inside the bmesh so lookups work
    if fuse_vertices:
        # Find unique (position, normal, boneweight) pairs, assign to BMesh vertex indices
        vert_indices = {}
        for i in range(len(merged_vertex_buffer)):
            vert_info = (merged_vertex_buffer.pos[i].xyz.copy().freeze(),
                         merged_vertex_buffer.normal[i].xyz.copy().freeze(),
                         merged_vertex_buffer.bone_weights[i]
                         if is_skinned else None)
            if vert_info in vert_indices:
                merged_idx_to_bmesh_idx[i] = vert_indices[vert_info]
            else:
                next_idx = len(bm.verts)
                vert_indices[vert_info] = next_idx
                merged_idx_to_bmesh_idx[i] = next_idx
                add_vertex_to_bmesh(i)
    else:
        # Assign each vertex in each mesh to the bmesh
        for i in range(len(merged_vertex_buffer)):
            merged_idx_to_bmesh_idx[i] = i
            add_vertex_to_bmesh(i)

    merged_idx = 0
    for (m_i, gmd_mesh) in enumerate(gmd_meshes):
        for v_i in range(len(gmd_mesh.vertices_data)):
            mesh_vtx_idx_to_bmesh_idx[(
                m_i, v_i)] = merged_idx_to_bmesh_idx[merged_idx]
            merged_idx += 1

    bm.verts.ensure_lookup_table()
    bm.verts.index_update()

    # For Col0, Col1, TangentW, UVs
    #   Create layer
    # Color0
    col0_layer = None
    if merged_vertex_buffer.col0:
        col0_layer = bm.loops.layers.color.new("Color0")

    # Color1
    col1_layer = None
    if merged_vertex_buffer.col1:
        col1_layer = bm.loops.layers.color.new("Color1")

    # Normal W data
    tangent_w_layer = None
    if merged_vertex_buffer.layout.tangent_storage in [
            VecStorage.Vec4Half, VecStorage.Vec4Fixed, VecStorage.Vec4Full
    ]:
        tangent_w_layer = bm.loops.layers.color.new("TangentW")

    # UVs
    # Yakuza has 3D/4D UV coordinates. Blender doesn't support this in the UV channel.
    # The solution is to have a deterministic "primary UV" designation that can only be 2D
    # This is the only UV loaded into the actual UV layer, the rest are all loaded into the vertex colors with special names.
    primary_uv_i = merged_vertex_buffer.layout.get_primary_uv_index()
    uv_layers = []
    for i, uv in enumerate(merged_vertex_buffer.uvs):
        print(
            f"Generating layer for UV {i} with storage {merged_vertex_buffer.layout.uv_storages[i]}, componentcount = {VecStorage.component_count(merged_vertex_buffer.layout.uv_storages[i])}"
        )
        if i == primary_uv_i:
            print(f"Making layer as UV layer")
            uv_layers.append(bm.loops.layers.uv.new(f"UV_Primary"))
        elif VecStorage.component_count(
                merged_vertex_buffer.layout.uv_storages[i]) == 2:
            print(f"Making layer as UV layer")
            uv_layers.append(bm.loops.layers.uv.new(f"UV{i}"))
        else:
            uv_layers.append(bm.loops.layers.color.new(f"UV{i}"))

    # For mesh in meshes
    triangles: Set[Tuple[int, int, int]] = set()

    def add_face_to_bmesh(face_idx: Tuple[int, int, int]):
        try:
            # This can throw ValueError if the triangle is "degenerate" - i.e. has two vertices that are the same
            # [1, 2, 3] is fine
            # [1, 2, 2] is degenerate
            # This should never be called with degenerate triangles, but if there is one we skip it and recover.
            face = bm.faces.new((bm.verts[face_idx[0]], bm.verts[face_idx[1]],
                                 bm.verts[face_idx[2]]))
        except ValueError as e:
            error.recoverable(
                f"Adding face {face_idx} resulted in ValueError - This should have been a valid triangle. Vert count: {len(bm.verts)}.\n{e}"
            )
        else:
            face.smooth = True
            face.material_index = attr_idx
            triangles.add(tuple(sorted(face_idx)))
            return face

    for m_i, gmd_mesh in enumerate(gmd_meshes):
        # For face in mesh
        for ti in range(0, len(gmd_mesh.triangle_indices), 3):
            tri_idxs = gmd_mesh.triangle_indices[ti:ti + 3]
            if 0xFFFF in tri_idxs:
                error.recoverable(
                    f"Found an 0xFFFF index inside a triangle_indices list! That shouldn't happen."
                )
                continue

            remapped_tri_idxs = tuple(mesh_vtx_idx_to_bmesh_idx[(m_i, v_i)]
                                      for v_i in tri_idxs)
            # If face doesn't already exist, and is valid
            if len(set(remapped_tri_idxs)) != 3:
                continue
            if tuple(sorted(remapped_tri_idxs)) in triangles:
                continue
            # Create face
            face = add_face_to_bmesh(remapped_tri_idxs)
            if not face:
                # Creating the face failed for some reason
                continue
            # Apply Col0, Col1, TangentW, UV for each loop
            if col0_layer:
                for (v_i, loop) in zip(tri_idxs, face.loops):
                    color = gmd_mesh.vertices_data.col0[v_i]
                    loop[col0_layer] = (color.x, color.y, color.z, color.w)

            if col1_layer:
                for (v_i, loop) in zip(tri_idxs, face.loops):
                    color = gmd_mesh.vertices_data.col1[v_i]
                    loop[col1_layer] = (color.x, color.y, color.z, color.w)

            if tangent_w_layer:
                for (v_i, loop) in zip(tri_idxs, face.loops):
                    tangent_w = gmd_mesh.vertices_data.tangent[v_i].w
                    # Convert from [-1, 1] to [0, 1]
                    # Not sure why, presumably numbers <0 aren't valid in a color? unsure tho
                    loop[tangent_w_layer] = ((tangent_w + 1) / 2, 0, 0, 0)

            for uv_i, uv_layer in enumerate(uv_layers):
                if VecStorage.component_count(
                        merged_vertex_buffer.layout.uv_storages[uv_i]) == 2:
                    for (v_i, loop) in zip(tri_idxs, face.loops):
                        original_uv = gmd_mesh.vertices_data.uvs[uv_i][v_i]
                        loop[uv_layer].uv = (original_uv.x,
                                             1.0 - original_uv.y)
                else:
                    for (v_i, loop) in zip(tri_idxs, face.loops):
                        original_uv = gmd_mesh.vertices_data.uvs[uv_i][v_i]
                        loop[uv_layer] = original_uv.resized(4)
                        if any(x < 0 or x > 1 for x in original_uv):
                            error.recoverable(
                                f"Data in UV{uv_i} is outside the range of values Blender can store. Expected values between 0 and 1, got {original_uv}"
                            )
    return bm
Esempio n. 11
0
def split_mesh_by_material(mesh_name: str, mesh: bpy.types.Mesh, object_blender_transformation: Matrix, attribute_sets: List[GMDAttributeSet], skinned: bool,
                           vertex_group_mapping: Dict[int, GMDBone], error: ErrorReporter) -> Union[
    List[SubmeshBuilder], List[SkinnedSubmeshBuilder]]:
    col0_layer = mesh.vertex_colors["Color0"] if "Color0" in mesh.vertex_colors else None
    col1_layer = mesh.vertex_colors["Color1"] if "Color1" in mesh.vertex_colors else None
    tangent_w_layer = mesh.vertex_colors["TangentW"] if "TangentW" in mesh.vertex_colors else None

    uv_primary = "UV_Primary"
    uv_numbered_regex = re.compile(r'UV(\d+)')

    primary_uv_layer = mesh.uv_layers[uv_primary] if uv_primary in mesh.uv_layers else mesh.uv_layers.active
    numbered_uv_layers = {}
    if mesh.uv_layers:
        for name, layer in mesh.uv_layers.items():
            match = uv_numbered_regex.match(name)
            if match:
                uv_i = int(match.group(1))
                if uv_i in numbered_uv_layers:
                    error.recoverable(f"Found multiple possible layers for UV{uv_i}, will take latest one")
                numbered_uv_layers[uv_i] = layer
    if mesh.vertex_colors:
        for name, layer in mesh.vertex_colors.items():
            match = uv_numbered_regex.match(name)
            if match:
                uv_i = int(match.group(1))
                if uv_i in numbered_uv_layers:
                    error.recoverable(f"Found multiple possible layers for UV{uv_i}, will take latest one")
                numbered_uv_layers[uv_i] = layer

    if skinned:
        #deform_layer = bm.verts.layers.deform.active

        relevant_gmd_bones = []
        vertex_group_bone_index_map = {}
        for i, (vertex_group_idx, bone) in enumerate(vertex_group_mapping.items()):
            relevant_gmd_bones.append(bone)
            vertex_group_bone_index_map[vertex_group_idx] = i

        submesh_builders = [SkinnedSubmeshBuilder(attribute_set.shader.vertex_buffer_layout, i, relevant_gmd_bones)
                            for i, attribute_set in enumerate(attribute_sets)]
    else:
        #deform_layer = None
        vertex_group_bone_index_map = {}
        submesh_builders = [SubmeshBuilder(attribute_set.shader.vertex_buffer_layout, i)
                            for i, attribute_set in enumerate(attribute_sets)]

    # TODO Put these somewhere else
    transformation_direction = Matrix((
        Vector((-1, 0, 0, 0)),
        Vector((0, 0, 1, 0)),
        Vector((0, 1, 0, 0)),
        Vector((0, 0, 0, 1)),
    ))
    transformation_position = transformation_direction @ object_blender_transformation

    vertex_fetchers = []
    for attribute_set in attribute_sets:
        vertex_fetcher = VertexFetcher(mesh_name,
                                       attribute_set.shader.vertex_buffer_layout,
                                       transformation_position=transformation_position,
                                       transformation_direction=transformation_direction,
                                       vertex_group_bone_index_map=vertex_group_bone_index_map,
                                       mesh=mesh,

                                       #deform_layer=deform_layer,
                                       col0_layer=col0_layer,
                                       col1_layer=col1_layer,
                                       tangent_w_layer=tangent_w_layer,
                                       uv_primary=primary_uv_layer,
                                       uv_numbered=numbered_uv_layers,
                                       error=error)
        vertex_fetchers.append(vertex_fetcher)

    for tri_loops in mesh.loop_triangles:
        if not (0 <= tri_loops.material_index < len(attribute_sets)):
            error.recoverable(
                f"Mesh {mesh_name} has a face with out-of-bounds material index {tri_loops.vertices[i].face.material_index}. It will be skipped!")
            continue

        builder = submesh_builders[tri_loops.material_index]
        vertex_fetcher = vertex_fetchers[tri_loops.material_index]

        def parse_loop_elem(i):
            return builder.add_vertex(tri_loops.vertices[i],
                                      vertex_fetcher,
                                      tri_loops,
                                      i)

        triangle = (
            parse_loop_elem(0),
            parse_loop_elem(1),
            parse_loop_elem(2),
        )
        builder.add_triangle(triangle)

    return [builder for builder in submesh_builders if len(builder.vertices)]
Esempio n. 12
0
def split_submesh_builder_by_bones(skinned_submesh_builder: SkinnedSubmeshBuilder, bone_limit: int, object_name: str, error: ErrorReporter) -> List[SkinnedSubmeshBuilder]:
    skinned_submesh_builder.reduce_to_used_bones()
    if not skinned_submesh_builder.relevant_gmd_bones:
        error.fatal(f"A submesh of {object_name} does not reference any bones. Make sure all of the vertices of {object_name} have their bone weights correct!")
    if len(skinned_submesh_builder.relevant_gmd_bones) <= bone_limit:
        return [skinned_submesh_builder]

    # Split SubmeshHelpers so that you never get >32 unique bones weighting a single submesh
    # This will always be possible, as any triangle can reference at most 12 bones (3 verts * 4 bones/vert)
    # so a naive solution of 2 triangles per SubmeshHelper will always reference at most 24 bones which is <32.

    x_too_many_bones = SkinnedSubmeshBuilderSubset.complete(skinned_submesh_builder)

    def bonesplit(x: SkinnedSubmeshBuilderSubset):
        bones = set()
        #print(x.referenced_triangles)
        for tri in x.referenced_triangles:
            tri_bones = x.base.triangle_referenced_bones(tri)
            if len(tri_bones) + len(bones) < bone_limit:
                bones = bones.union(tri_bones)

        x_withbones = SkinnedSubmeshBuilderSubset.empty(x.base)
        x_withoutbones = SkinnedSubmeshBuilderSubset.empty(x.base)
        for tri in x.referenced_triangles:
            tri_bones = x.base.triangle_referenced_bones(tri)
            if bones.issuperset(tri_bones):
                x_withbones.add_triangle(tri)
            else:
                x_withoutbones.add_triangle(tri)

        if len(x_withoutbones.referenced_triangles) == len(x.referenced_triangles):
            error.fatal("bonesplit() did not reduce triangle count!")

        return x_withbones, x_withoutbones

    # Start by selecting 32 bones.
    #   bones = {}
    #   for tri in submesh:
    #       tri_bones = tri.referenced_bones() (at max 24)
    #       if len(tri_bones) + len(bones) > 32
    #           break
    #       bones += tri_bones
    # This algorithm guarantees that at least one triangle uses ONLY those bones.
    # Then put all of the triangles that reference ONLY those bones in a new mesh.
    # Put the other triangles in a separate mesh. If they reference > 32 bones, apply the process again.
    # This splitting transformation bonesplit(x, bones) -> x_thosebones, x_otherbones will always produce x_otherbones with fewer triangles than x
    #   We know that at least one triangle uses only the selected bones
    #       => len(x_thosebones) >= 1
    #       len(x_otherbones) = len(x) - len(x_thosebones)
    #       => len(x_otherbones) <= len(x) - 1
    #       => len(x_otherbones) < len(x)
    # => applying bonesplit to x_otherbones recursively will definitely reduce the amount of triangles to 0
    # it will produce at maximum len(x) new meshes
    split_meshes = []
    while len(x_too_many_bones.referenced_triangles) > 0:
        new_submesh, x_too_many_bones = bonesplit(x_too_many_bones)
        split_meshes.append(new_submesh)

    # these can then be merged back together!!!!
    # TODO: Check if it's even worth it
    print(
        f"Mesh had >{bone_limit} bone references ({len(skinned_submesh_builder.relevant_gmd_bones)}) and was split into {len(split_meshes)} chunks")

    split_submeshes = []
    for split_mesh in split_meshes:
        print("\nSplitSubmeshSubset")
        print(f"ref-verts: {len(split_mesh.referenced_verts)} ref-tris: {len(split_mesh.referenced_triangles)}")
        split_submesh_builder = split_mesh.convert_to_submesh_builder()
        print("SplitSubmesh pre-reduce")
        print(f"ref-verts: {len(split_submesh_builder.vertices)} ref-tris: {len(split_submesh_builder.triangles)} ref-bones: {len(split_submesh_builder.relevant_gmd_bones)}")
        print("SplitSubmesh post-reduce")
        split_submesh_builder.reduce_to_used_bones()
        print(
            f"ref-verts: {len(split_submesh_builder.vertices)} ref-tris: {len(split_submesh_builder.triangles)} ref-bones: {len(split_submesh_builder.relevant_gmd_bones)}")
        print(split_submesh_builder.total_referenced_bones())
        split_submeshes.append(split_submesh_builder)
        print()

    return split_submeshes
Esempio n. 13
0
from dataclasses import dataclass