예제 #1
0
def execute(dirpath, name_suffix, root_object, armature_object, skeleton_filepath, mesh_objects, model_locators,
            used_parts, used_materials, used_bones, used_terrain_points):
    """Executes export of PIM file for given data.
    :param dirpath: directory path for PIM file
    :type dirpath: str
    :param name_suffix: file name suffix
    :type name_suffix: str
    :param root_object: Blender SCS Root empty object
    :type root_object: bpy.types.Object
    :param armature_object: Blender Aramture object belonging to this SCS game object
    :type armature_object: bpy.types.Object
    :param skeleton_filepath: relative file path of PIS file
    :type skeleton_filepath: str
    :param mesh_objects: all the meshes which should be exported for current game object
    :type mesh_objects: list of bpy.types.Object
    :param model_locators: all Blender empty objecs which represents model locators and should be exported for current game object
    :type model_locators: list of bpy.types.Object
    :param used_parts: parts transitional structure for storing used parts inside this PIM export
    :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans
    :param used_materials: materials transitional structure for storing used materials inside this PIM export
    :type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans
    :param used_bones: bones transitional structure for storing used bones inside this PIM export
    :type used_bones: io_scs_tools.exp.transition_structs.bones.BonesTrans
    :param used_terrain_points: terrain points transitional structure for storing used terrain points
    :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans
    :return: True if export was successfull; False otherwise
    :rtype: bool
    """

    print("\n************************************")
    print("**      SCS PIM.EF Exporter      **")
    print("**      (c)2017 SCS Software      **")
    print("************************************\n")

    scs_globals = _get_scs_globals()

    format_version = 1

    is_skin_used = (armature_object and root_object.scs_props.scs_root_animated == "anim")

    pim_header = Header("", format_version, root_object.name)
    pim_global = Globall(used_parts.count(), skeleton_filepath)

    pim_materials = collections.OrderedDict()  # dict of Material class instances representing used materials
    """:type: dict[str, Material]"""
    pim_pieces = []  # list of Piece class instances representing mesh pieces
    """:type: list[Piece]"""
    pim_parts = {}  # list of Part class instances representing used parts
    """:type: dict[str, Part]"""
    pim_locators = []  # list of Locator class instances representing model locators
    """:type: list[Locator]"""

    objects_with_default_material = {}  # stores object names which has no material set
    missing_mappings_data = {}  # indicates if material doesn't have set any uv layer for export

    bones = skin = skin_stream = None
    if is_skin_used:
        # create bones data section
        bones = Bones()
        for bone in armature_object.data.bones:
            bones.add_bone(bone.name)
            used_bones.add(bone.name)

        # create skin data section
        skin_stream = SkinStream(SkinStream.Types.POSITION)
        skin = Skin(skin_stream)

    # create mesh object data sections
    for mesh_obj in mesh_objects:

        vert_groups = mesh_obj.vertex_groups

        # calculate faces flip state from all ancestors of current object
        scale_sign = 1
        parent = mesh_obj
        while parent and parent.scs_props.empty_object_type != "SCS_Root":

            for scale_axis in parent.scale:
                scale_sign *= scale_axis

            parent = parent.parent

        winding_order = 1
        if scale_sign < 0:
            winding_order = -1

        # calculate transformation matrix for current object (root object transforms are always subtracted!)
        mesh_transf_mat = root_object.matrix_world.inverted() * mesh_obj.matrix_world
        """:type: mathutils.Matrix"""

        # calculate vertex position transformation matrix for this object
        pos_transf_mat = (Matrix.Scale(scs_globals.export_scale, 4) *
                          _scs_to_blend_matrix().inverted())
        """:type: mathutils.Matrix"""

        # calculate vertex normals transformation matrix for this object
        # NOTE: as normals will be read from none export prepared mesh we have to add rotation and scale from mesh transformation matrix
        _, rot, scale = mesh_transf_mat.decompose()
        scale_matrix_x = Matrix.Scale(scale.x, 3, Vector((1, 0, 0))).to_4x4()
        scale_matrix_y = Matrix.Scale(scale.y, 3, Vector((0, 1, 0))).to_4x4()
        scale_matrix_z = Matrix.Scale(scale.z, 3, Vector((0, 0, 1))).to_4x4()
        nor_transf_mat = (_scs_to_blend_matrix().inverted() *
                          rot.to_matrix().to_4x4() *
                          scale_matrix_x * scale_matrix_y * scale_matrix_z)
        """:type: mathutils.Matrix"""

        # get initial mesh and vertex groups for it
        mesh = _object_utils.get_mesh(mesh_obj)
        _mesh_utils.bm_prepare_mesh_for_export(mesh, mesh_transf_mat)

        # get extra mesh only for normals
        mesh_for_normals = _object_utils.get_mesh(mesh_obj)
        mesh_for_normals.calc_normals_split()

        missing_uv_layers = {}  # stores missing uvs specified by materials of this object
        missing_vcolor = False  # indicates if object is missing vertex color layer
        missing_vcolor_a = False  # indicates if object is missing vertex color alpha layer
        missing_skinned_verts = set()  # indicates if object is having only partial skin, which is not allowed in our models
        has_unnormalized_skin = False  # indicates if object has vertices which bones weight sum is smaller then one

        hard_edges = set()
        mesh_piece = Piece(len(pim_pieces))
        """:type: Piece"""
        for poly in mesh.polygons:

            mat_index = poly.material_index

            # check material existence and decide what material name and effect has to be used
            if mat_index >= len(mesh_obj.material_slots) or mesh_obj.material_slots[mat_index].material is None:  # no material or invalid index
                material = None
                pim_mat_name = "_default_material_-_default_settings_"
                pim_mat_effect = "eut2.dif"
                objects_with_default_material[mesh_obj.name] = 1
            else:
                material = mesh_obj.material_slots[mat_index].material
                pim_mat_name = material.name
                pim_mat_effect = material.scs_props.mat_effect_name

            # create new pim material if material with that name doesn't yet exists
            if pim_mat_name not in pim_materials:
                pim_material = Material(len(pim_materials), pim_mat_name, pim_mat_effect, material)
                pim_materials[pim_mat_name] = pim_material
                used_materials.add(pim_mat_name, material)

            piece_vert_indices = []
            vert_normals = []
            vert_uvs = []
            uvs_aliases = []
            uvs_names = collections.OrderedDict()
            vert_rgbas = []
            rgbas_names = collections.OrderedDict()
            tex_coord_alias_map = pim_materials[pim_mat_name].get_tex_coord_map()
            for loop_i in poly.loop_indices:

                loop = mesh.loops[loop_i]
                """:type: bpy.types.MeshLoop"""
                vert_i = loop.vertex_index

                # as we are already looping first find out if edge is hard and put it to set
                if mesh.edges[loop.edge_index].use_edge_sharp:
                    hard_edges.add(loop.edge_index)

                # get data of current vertex
                # 1. position -> mesh.vertices[loop.vertex_index].co
                position = tuple(pos_transf_mat * mesh.vertices[vert_i].co)

                # 2. normal -> loop.normal -> calc_normals_split() has to be called before
                normal = nor_transf_mat * mesh_for_normals.loops[loop_i].normal
                normal = tuple(Vector(normal).normalized())
                vert_normals.append(normal)

                # 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv
                uvs = []
                uvs_aliases = []
                if len(tex_coord_alias_map) < 1:  # no textures or none uses uv mapping in current material effect
                    uvs.append((0.0, 0.0))
                    uvs_names["generated"] = True
                    uvs_aliases.append(["_TEXCOORD0"])

                    # report missing mappings only on actual materials with textures using uv mappings
                    if material and pim_materials[pim_mat_name].uses_textures_with_uv():
                        if material.name not in missing_mappings_data:
                            missing_mappings_data[material.name] = {}

                        if mesh_obj.name not in missing_mappings_data[material.name]:
                            missing_mappings_data[material.name][mesh_obj.name] = 1
                else:

                    for uv_lay_name in mesh.uv_layers.keys():

                        uv_lay = mesh.uv_layers[uv_lay_name]
                        uvs.append(_change_to_scs_uv_coordinates(uv_lay.data[loop_i].uv))
                        uvs_names[uv_lay_name] = True

                        aliases = []
                        if uv_lay_name in tex_coord_alias_map:
                            for alias_index in tex_coord_alias_map[uv_lay_name]:
                                aliases.append("_TEXCOORD" + str(alias_index))

                        uvs_aliases.append(aliases)

                vert_uvs.append(uvs)

                # 4. vcol -> vcol_lay = mesh.vertex_colors[0].data; vcol_lay[loop_i].color
                rgbas = []
                vcol_multi = mesh_obj.data.scs_props.vertex_color_multiplier
                if _MESH_consts.default_vcol not in mesh.vertex_colors:  # get RGB component of RGBA
                    vcol = (1.0,) * 3
                    missing_vcolor = True
                else:
                    color = mesh.vertex_colors[_MESH_consts.default_vcol].data[loop_i].color
                    vcol = (color[0] * 2 * vcol_multi, color[1] * 2 * vcol_multi, color[2] * 2 * vcol_multi)

                if _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix not in mesh.vertex_colors:  # get A component of RGBA
                    vcol += (1.0,)
                    missing_vcolor_a = True
                else:
                    alpha = mesh.vertex_colors[_MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix].data[loop_i].color
                    vcol += ((alpha[0] + alpha[1] + alpha[2]) / 3.0 * 2 * vcol_multi,)  # take avg of colors for alpha

                rgbas.append(vcol)
                rgbas_names[_MESH_consts.default_vcol] = True

                # export rest of the vertex colors too, but do not apply extra multiplies of SCS exporter
                # as rest of the layers are just artist layers
                for vcol_layer in mesh.vertex_colors:

                    # we already computed thoose so ignore them
                    if vcol_layer.name in [_MESH_consts.default_vcol, _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix]:
                        continue

                    color = vcol_layer.data[loop_i].color
                    vcol = (color[0], color[1], color[2], 1.0)

                    rgbas.append(vcol)
                    rgbas_names[vcol_layer.name] = True

                vert_rgbas.append(rgbas)

                # save internal vertex index to array to be able to construct triangle afterwards
                piece_vert_index = mesh_piece.add_vertex(vert_i, position)
                piece_vert_indices.append(piece_vert_index)

                if is_skin_used:
                    # get skinning data for vertex and save it to skin stream
                    bone_weights = {}
                    bone_weights_sum = 0
                    for v_group_entry in mesh.vertices[vert_i].groups:
                        bone_indx = bones.get_bone_index(vert_groups[v_group_entry.group].name)
                        bone_weight = v_group_entry.weight

                        # proceed only if bone exists in our armature
                        if bone_indx != -1:
                            bone_weights[bone_indx] = bone_weight
                            bone_weights_sum += bone_weight

                    skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights, bone_weights_sum)
                    skin_stream.add_entry(skin_entry)

                    # report un-skinned vertices (no bones or zero sum weight) or badly skinned model
                    if bone_weights_sum <= 0:
                        missing_skinned_verts.add(vert_i)
                    elif bone_weights_sum < 1:
                        has_unnormalized_skin = True

                # save to terrain points storage if present in correct vertex group
                for group in mesh.vertices[vert_i].groups:

                    # if current object doesn't have vertex group found in mesh data, then ignore that group
                    # This can happen if multiple objects are using same mesh and
                    # some of them have vertex groups, but others not.
                    if group.group >= len(mesh_obj.vertex_groups):
                        continue

                    curr_vg_name = mesh_obj.vertex_groups[group.group].name

                    # if vertex group name doesn't match prescribed one ignore this vertex group
                    if not match(_OP_consts.TerrainPoints.vg_name_regex, curr_vg_name):
                        continue

                    # if node index is not in bounds ignore this vertex group
                    node_index = int(curr_vg_name[-1])
                    if node_index >= _PL_consts.PREFAB_NODE_COUNT_MAX:
                        continue

                    # if no variants defined add globally (without variant block)
                    if len(root_object.scs_object_variant_inventory) == 0:
                        used_terrain_points.add(-1, node_index, position, normal)
                        continue

                    # finally iterate variant parts entries to find where this part is included
                    # and add terrain points to transitional structure
                    #
                    # NOTE: variant index is donated by direct order of variants in inventory
                    # so export in PIT has to use the same order otherwise variant
                    # indices will be misplaced
                    for variant_i, variant in enumerate(root_object.scs_object_variant_inventory):

                        used_terrain_points.ensure_entry(variant_i, node_index)

                        for variant_part in variant.parts:
                            if variant_part.name == mesh_obj.scs_props.scs_part and variant_part.include:

                                used_terrain_points.add(variant_i, node_index, position, normal)
                                break

            assert mesh_piece.add_face(pim_materials[pim_mat_name],
                                       tuple(piece_vert_indices[::winding_order * -1]),  # invert indices because of conversion to scs system
                                       tuple(vert_normals[::winding_order]),
                                       tuple(vert_uvs[::winding_order]),
                                       list(uvs_names.keys()),
                                       uvs_aliases,
                                       tuple(vert_rgbas[::winding_order]),
                                       list(rgbas_names.keys())
                                       )

        # as we captured all hard edges collect them now and put it into Piece
        for hard_edge in hard_edges:
            (vert1_i, vert2_i) = mesh.edges[hard_edge].vertices
            assert mesh_piece.add_edge(vert1_i, vert2_i, blender_mesh_indices=True)

        # free normals calculations and eventually remove mesh object
        _mesh_utils.cleanup_mesh(mesh)
        _mesh_utils.cleanup_mesh(mesh_for_normals)

        # create part if it doesn't exists yet
        part_name = mesh_obj.scs_props.scs_part
        if part_name not in pim_parts:
            pim_parts[part_name] = Part(part_name)

        # put pieces of current mesh to global list
        pim_pieces.append(mesh_piece)

        # add pieces of current mesh to part
        pim_part = pim_parts[part_name]
        pim_part.add_piece(mesh_piece)

        # report missing data for each object
        if len(missing_uv_layers) > 0:
            for uv_lay_name in missing_uv_layers:
                lprint("W Object '%s' is missing UV layer '%s' specified by materials: %s\n",
                       (mesh_obj.name, uv_lay_name, missing_uv_layers[uv_lay_name]))
        if missing_vcolor:
            lprint("W Object %r is missing vertex color layer with name %r! Default RGB color will be exported (0.5, 0.5, 0.5)!",
                   (mesh_obj.name, _MESH_consts.default_vcol))
        if missing_vcolor_a:
            lprint("W Object %r is missing vertex color alpha layer with name %r! Default alpha will be exported (0.5)",
                   (mesh_obj.name, _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix))
        if len(missing_skinned_verts) > 0:
            lprint("E Object %r from SCS Root %r has %s vertices which are not skinned to any bone, expect errors during conversion!",
                   (mesh_obj.name, root_object.name, len(missing_skinned_verts)))
        if has_unnormalized_skin:
            lprint("W Object %r from SCS Root %r has unormalized skinning, exporting normalized weights!\n\t   "
                   "You can normalize weights by selecting object & executing 'Normalize All Vertex Groups'.",
                   (mesh_obj.name, root_object.name))

    # report missing data for whole model
    if len(missing_mappings_data) > 0:
        for material_name in missing_mappings_data:
            lprint("W Material '%s' is missing mapping data! Objects using it are exported with default UV:\n\t   %s",
                   (material_name, list(missing_mappings_data[material_name].keys())))
    if len(objects_with_default_material) > 0:
        lprint("W Some objects don't use any material. Default material and UV mapping is used on them:\n\t   %s",
               (list(objects_with_default_material.keys()),))

    # create locators data sections
    for loc_obj in model_locators:

        pos, qua, sca = _get_scs_transformation_components(root_object.matrix_world.inverted() * loc_obj.matrix_world)

        if sca[0] * sca[1] * sca[2] < 0:
            lprint("W Model locator %r inside SCS Root Object %r not exported because of invalid scale.\n\t   " +
                   "Model locators must have positive scale!", (loc_obj.name, root_object.name))
            continue

        name = _name_utils.tokenize_name(loc_obj.name)
        hookup_string = loc_obj.scs_props.locator_model_hookup
        if hookup_string != "" and ":" in hookup_string:
            hookup = hookup_string.split(':', 1)[1].strip()
        else:
            if hookup_string != "":
                lprint("W The Hookup %r has no expected value!", hookup_string)
            hookup = None

        # create locator object for export
        locator = Locator(len(pim_locators), name, hookup)
        locator.set_position(pos)
        locator.set_rotation(qua)
        locator.set_scale(sca)

        # create part if it doesn't exists yet
        part_name = loc_obj.scs_props.scs_part
        if part_name not in pim_parts:
            assert used_parts.is_present(part_name)
            pim_parts[part_name] = Part(part_name)

        # add locator to part
        pim_part = pim_parts[part_name]
        pim_part.add_locator(locator)

        # add locator to locator list
        pim_locators.append(locator)

    # create container
    pim_container = [pim_header.get_as_section(), pim_global.get_as_section()]

    for mat_name in pim_materials:
        pim_container.append(pim_materials[mat_name].get_as_section())

    for pim_piece in pim_pieces:
        pim_container.append(pim_piece.get_as_section())

    for part_name in used_parts.get_as_list():

        # export all parts even empty ones gathered from PIC and PIP
        if part_name in pim_parts:
            pim_container.append(pim_parts[part_name].get_as_section())
        else:
            pim_container.append(Part(part_name).get_as_section())

    for locator in pim_locators:
        pim_container.append(locator.get_as_section())

    if is_skin_used:
        pim_container.append(bones.get_as_section())
        pim_container.append(skin.get_as_section())

    # write to file
    ind = "    "
    pim_filepath = os.path.join(dirpath, root_object.name + ".pim" + name_suffix)
    return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)
예제 #2
0
def execute(dirpath, name_suffix, root_object, armature_object,
            skeleton_filepath, mesh_objects, model_locators, used_parts,
            used_materials, used_bones, used_terrain_points):
    """Executes export of PIM file for given data.
    :param dirpath: directory path for PIM file
    :type dirpath: str
    :param name_suffix: file name suffix
    :type name_suffix: str
    :param root_object: Blender SCS Root empty object
    :type root_object: bpy.types.Object
    :param armature_object: Blender Aramture object belonging to this SCS game object
    :type armature_object: bpy.types.Object
    :param skeleton_filepath: relative file path of PIS file
    :type skeleton_filepath: str
    :param mesh_objects: all the meshes which should be exported for current game object
    :type mesh_objects: list of bpy.types.Object
    :param model_locators: all Blender empty objecs which represents model locators and should be exported for current game object
    :type model_locators: list of bpy.types.Object
    :param used_parts: parts transitional structure for storing used parts inside this PIM export
    :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans
    :param used_materials: materials transitional structure for storing used materials inside this PIM export
    :type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans
    :param used_bones: bones transitional structure for storing used bones inside this PIM export
    :type used_bones: io_scs_tools.exp.transition_structs.bones.BonesTrans
    :param used_terrain_points: terrain points transitional structure for storing used terrain points
    :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans
    :return: True if export was successfull; False otherwise
    :rtype: bool
    """

    print("\n************************************")
    print("**      SCS PIM Exporter          **")
    print("**      (c)2017 SCS Software      **")
    print("************************************\n")

    scs_globals = _get_scs_globals()

    format_version = 5

    is_skin_used = (armature_object
                    and root_object.scs_props.scs_root_animated == "anim")

    pim_header = Header("", format_version, root_object.name)
    pim_global = Globall(used_parts.count(), skeleton_filepath)

    pim_materials = collections.OrderedDict(
    )  # dict of Material class instances representing used materials
    """:type: dict[str, Material]"""
    pim_pieces = []  # list of Piece class instances representing mesh pieces
    """:type: list[Piece]"""
    pim_parts = {}  # list of Part class instances representing used parts
    """:type: dict[str, Part]"""
    pim_locators = [
    ]  # list of Locator class instances representing model locators
    """:type: list[Locator]"""

    objects_with_default_material = {
    }  # stores object names which has no material set
    missing_mappings_data = {
    }  # indicates if material doesn't have set any uv layer for export
    invalid_objects_for_tangents = set(
    )  # stores object names which tangents calculation failed because of N-gons existence

    bones = skin = skin_stream = None
    if is_skin_used:

        invalid_bone_names = set(
        )  # set for saving bones with invalid names, they are used for reporting to user

        # create bones data section
        bones = Bones()
        for bone in armature_object.data.bones:
            bones.add_bone(bone.name)
            used_bones.add(bone.name)

            # do bones name checks
            if _name_utils.tokenize_name(bone.name) != bone.name:
                invalid_bone_names.add(bone.name)

        # create skin data section
        skin_stream = SkinStream(SkinStream.Types.POSITION)
        skin = Skin(skin_stream)

        # report invalid bone names
        if len(invalid_bone_names) > 0:
            lprint(
                "W Invalid bone names detected, max. length of valid bone name is 12 and must consists from [a-z, 0-9 and _ ] characters.\n\t   "
                "Conversion will generalize names, however expect problems by re-import! List of invalid bone names for %r:\n\t   "
                "%r", (armature_object.name, list(invalid_bone_names)))

    # create mesh object data sections
    for mesh_obj in mesh_objects:

        lprint("I Preparing mesh object: %r ...", (mesh_obj.name, ))

        vert_groups = mesh_obj.vertex_groups

        mesh_pieces = collections.OrderedDict()

        # calculate faces flip state from all ancestors of current object
        scale_sign = 1
        parent = mesh_obj
        while parent and parent.scs_props.empty_object_type != "SCS_Root":

            for scale_axis in parent.scale:
                scale_sign *= scale_axis

            parent = parent.parent

        face_flip = scale_sign < 0

        # calculate transformation matrix for current object (root object transforms are always subtracted!)
        mesh_transf_mat = root_object.matrix_world.inverted(
        ) * mesh_obj.matrix_world
        """:type: mathutils.Matrix"""

        # calculate vertex position transformation matrix for this object
        pos_transf_mat = (Matrix.Scale(scs_globals.export_scale, 4) *
                          _scs_to_blend_matrix().inverted())
        """:type: mathutils.Matrix"""

        # calculate vertex normals transformation matrix for this object
        # NOTE: as normals will be read from none export prepared mesh we have to add rotation and scale from mesh transformation matrix
        _, rot, scale = mesh_transf_mat.decompose()
        scale_matrix_x = Matrix.Scale(scale.x, 3, Vector((1, 0, 0))).to_4x4()
        scale_matrix_y = Matrix.Scale(scale.y, 3, Vector((0, 1, 0))).to_4x4()
        scale_matrix_z = Matrix.Scale(scale.z, 3, Vector((0, 0, 1))).to_4x4()
        nor_transf_mat = (_scs_to_blend_matrix().inverted() *
                          rot.to_matrix().to_4x4() * scale_matrix_x *
                          scale_matrix_y * scale_matrix_z)
        """:type: mathutils.Matrix"""

        tangent_transf_mat = _scs_to_blend_matrix().inverted()
        """:type: mathutils.Matrix"""

        # get initial mesh & extra copy of the mesh for normals
        mesh = _object_utils.get_mesh(mesh_obj)
        mesh_for_normals = mesh.copy()

        # prepare meshes
        faces_mapping = _mesh_utils.bm_prepare_mesh_for_export(
            mesh, mesh_transf_mat, triangulate=True)
        mesh_for_normals.calc_normals_split()

        missing_uv_layers = {
        }  # stores missing uvs specified by materials of this object
        missing_vcolor = False  # indicates if object is missing vertex color layer
        missing_vcolor_a = False  # indicates if object is missing vertex color alpha layer
        missing_skinned_verts = set(
        )  # indicates if object is having only partial skin, which is not allowed in our models
        has_unnormalized_skin = False  # indicates if object has vertices which bones weight sum is smaller then one

        for poly in mesh.polygons:

            mat_index = poly.material_index

            # check material existence and decide what material name and effect has to be used
            if mat_index >= len(
                    mesh_obj.material_slots
            ) or mesh_obj.material_slots[
                    mat_index].material is None:  # no material or invalid index
                material = None
                pim_mat_name = "_default_material_-_default_settings_"
                pim_mat_effect = "eut2.dif"
                objects_with_default_material[mesh_obj.name] = 1
            else:
                material = mesh_obj.material_slots[mat_index].material
                pim_mat_name = material.name
                pim_mat_effect = material.scs_props.mat_effect_name

            # create new pim material if material with that name doesn't yet exists
            if pim_mat_name not in pim_materials:
                pim_material = Material(len(pim_materials), pim_mat_name,
                                        pim_mat_effect, material)
                pim_materials[pim_mat_name] = pim_material
                used_materials.add(pim_mat_name, material)

            # create new piece if piece with this material doesn't exists yet -> split to pieces by material
            if pim_mat_name not in mesh_pieces:
                mesh_pieces[pim_mat_name] = Piece(
                    len(pim_pieces) + len(mesh_pieces),
                    pim_materials[pim_mat_name])

                nmap_uv_layer = pim_materials[pim_mat_name].get_nmap_uv_name()
                # if there is uv layer used for normal maps and that uv layer exists on mesh then calculate tangents on it otherwise report warning
                if nmap_uv_layer:

                    if nmap_uv_layer in mesh.uv_layers:
                        try:
                            mesh.calc_tangents(uvmap=nmap_uv_layer)
                        except RuntimeError:
                            invalid_objects_for_tangents.add(mesh_obj.name)
                    else:
                        lprint(
                            "W Unable to calculate normal map tangents for object %r,\n\t   "
                            "as it's missing UV layer with name: %r, expect problems!",
                            (mesh_obj.name, nmap_uv_layer))

            mesh_piece = mesh_pieces[pim_mat_name]
            """:type: Piece"""

            # get polygon loop indices for normals depending on mapped triangulated face
            if poly.index in faces_mapping:
                normals_poly_loop_indices = list(mesh_for_normals.polygons[
                    faces_mapping[poly.index]].loop_indices)
            else:
                normals_poly_loop_indices = list(
                    mesh_for_normals.polygons[poly.index].loop_indices)

            # vertex data
            triangle_pvert_indices = [
            ]  # storing vertex indices for this polygon triangle
            for loop_i in poly.loop_indices:

                loop = mesh.loops[loop_i]
                """:type: bpy.types.MeshLoop"""
                vert_i = loop.vertex_index

                # get data of current vertex
                # 1. position -> mesh.vertices[loop.vertex_index].co
                position = tuple(pos_transf_mat * mesh.vertices[vert_i].co)

                # 2. normal -> mesh_for_normals.loops[loop_i].normal -> calc_normals_split() has to be called before
                normal = (0, 0, 0)
                for i, normals_poly_loop_i in enumerate(
                        normals_poly_loop_indices):
                    normal_loop = mesh_for_normals.loops[normals_poly_loop_i]

                    # match by vertex index as triangle will for sure have three unique vertices
                    if vert_i == normal_loop.vertex_index:
                        normal = nor_transf_mat * normal_loop.normal
                        normal = tuple(Vector(normal).normalized())
                        del normals_poly_loop_indices[i]
                        break
                else:
                    lprint(
                        "E Normals data gathering went wrong, expect corrupted mesh! Shouldn't happen..."
                    )

                # 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv
                uvs = []
                uvs_aliases = []
                tex_coord_alias_map = pim_materials[
                    pim_mat_name].get_tex_coord_map()
                if len(
                        tex_coord_alias_map
                ) < 1:  # no textures or none uses uv mapping in current material effect
                    uvs.append((0.0, 0.0))
                    uvs_aliases.append(["_TEXCOORD0"])

                    # report missing mappings only on actual materials with textures using uv mappings
                    if material and pim_materials[
                            pim_mat_name].uses_textures_with_uv():
                        if material.name not in missing_mappings_data:
                            missing_mappings_data[material.name] = {}

                        if mesh_obj.name not in missing_mappings_data[
                                material.name]:
                            missing_mappings_data[material.name][
                                mesh_obj.name] = 1

                else:
                    for uv_lay_name in tex_coord_alias_map:

                        if uv_lay_name not in mesh.uv_layers:
                            uvs.append((0.0, 0.0))

                            # properly report missing uv layers where name of uv layout is key and materials that misses it are values
                            if uv_lay_name not in missing_uv_layers:
                                missing_uv_layers[uv_lay_name] = []

                            if pim_mat_name not in missing_uv_layers[
                                    uv_lay_name]:  # add material if not already there
                                missing_uv_layers[uv_lay_name].append(
                                    pim_mat_name)
                        else:
                            uv_lay = mesh.uv_layers[uv_lay_name]
                            uvs.append(
                                _change_to_scs_uv_coordinates(
                                    uv_lay.data[loop_i].uv))

                        aliases = []
                        for alias_index in tex_coord_alias_map[uv_lay_name]:
                            aliases.append("_TEXCOORD" + str(alias_index))

                        uvs_aliases.append(aliases)

                # 4. vcol -> vcol_lay = mesh.vertex_colors[0].data; vcol_lay[loop_i].color
                vcol_multi = mesh_obj.data.scs_props.vertex_color_multiplier
                if _MESH_consts.default_vcol not in mesh.vertex_colors:  # get RGB component of RGBA
                    vcol = (1.0, ) * 3
                    missing_vcolor = True
                else:
                    color = mesh.vertex_colors[
                        _MESH_consts.default_vcol].data[loop_i].color
                    vcol = (color[0] * 2 * vcol_multi,
                            color[1] * 2 * vcol_multi,
                            color[2] * 2 * vcol_multi)

                if _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix not in mesh.vertex_colors:  # get A component of RGBA
                    vcol += (1.0, )
                    missing_vcolor_a = True
                else:
                    alpha = mesh.vertex_colors[
                        _MESH_consts.default_vcol +
                        _MESH_consts.vcol_a_suffix].data[loop_i].color
                    vcol += ((alpha[0] + alpha[1] + alpha[2]) / 3.0 * 2 *
                             vcol_multi, )  # take avg of colors for alpha

                # 5. tangent -> loop.tangent; loop.bitangent_sign -> calc_tangents() has to be called before
                if pim_materials[pim_mat_name].get_nmap_uv_name(
                ):  # calculate tangents only if needed
                    tangent = tuple(tangent_transf_mat * loop.tangent)
                    tangent = tuple(Vector(tangent).normalized())
                    tangent = (tangent[0], tangent[1], tangent[2],
                               loop.bitangent_sign)
                else:
                    tangent = None

                # 6. There we go, vertex data collected! Now create internal vertex index, for triangle and skin stream construction
                piece_vert_index = mesh_piece.add_vertex(
                    vert_i, position, normal, uvs, uvs_aliases, vcol, tangent)

                # 7. Add vertex to triangle creation list
                triangle_pvert_indices.append(piece_vert_index)

                # 8. Get skinning data for vertex and save it to skin stream
                if is_skin_used:
                    bone_weights = {}
                    bone_weights_sum = 0
                    for v_group_entry in mesh.vertices[vert_i].groups:
                        bone_indx = bones.get_bone_index(
                            vert_groups[v_group_entry.group].name)
                        bone_weight = v_group_entry.weight

                        # proceed only if bone exists in our armature
                        if bone_indx != -1:
                            bone_weights[bone_indx] = bone_weight
                            bone_weights_sum += bone_weight

                    skin_entry = SkinStream.Entry(mesh_piece.get_index(),
                                                  piece_vert_index, position,
                                                  bone_weights,
                                                  bone_weights_sum)
                    skin_stream.add_entry(skin_entry)

                    # report un-skinned vertices (no bones or zero sum weight) or badly skinned model
                    if bone_weights_sum <= 0:
                        missing_skinned_verts.add(vert_i)
                    elif bone_weights_sum < 1:
                        has_unnormalized_skin = True

                # Addition - Terrain Points: save vertex to terrain points storage, if present in correct vertex group
                for group in mesh.vertices[vert_i].groups:

                    # if current object doesn't have vertex group found in mesh data, then ignore that group
                    # This can happen if multiple objects are using same mesh and
                    # some of them have vertex groups, but others not.
                    if group.group >= len(mesh_obj.vertex_groups):
                        continue

                    curr_vg_name = mesh_obj.vertex_groups[group.group].name

                    # if vertex group name doesn't match prescribed one ignore this vertex group
                    if not match(_OP_consts.TerrainPoints.vg_name_regex,
                                 curr_vg_name):
                        continue

                    # if node index is not in bounds ignore this vertex group
                    node_index = int(curr_vg_name[-1])
                    if node_index >= _PL_consts.PREFAB_NODE_COUNT_MAX:
                        continue

                    # if no variants defined add globally (without variant block)
                    if len(root_object.scs_object_variant_inventory) == 0:
                        used_terrain_points.add(-1, node_index, position,
                                                normal)
                        continue

                    # finally iterate variant parts entries to find where this part is included
                    # and add terrain points to transitional structure
                    #
                    # NOTE: variant index is donated by direct order of variants in inventory
                    # so export in PIT has to use the same order otherwise variant
                    # indices will be misplaced
                    for variant_i, variant in enumerate(
                            root_object.scs_object_variant_inventory):

                        used_terrain_points.ensure_entry(variant_i, node_index)

                        for variant_part in variant.parts:
                            if variant_part.name == mesh_obj.scs_props.scs_part and variant_part.include:

                                used_terrain_points.add(
                                    variant_i, node_index, position, normal)
                                break

            # triangles
            if face_flip:
                mesh_piece.add_triangle(tuple(triangle_pvert_indices))
            else:
                mesh_piece.add_triangle(
                    tuple(triangle_pvert_indices[::-1]
                          ))  # yep it's weird but it simply works vice versa

        # free normals calculations
        _mesh_utils.cleanup_mesh(mesh)
        _mesh_utils.cleanup_mesh(mesh_for_normals)

        # create part if it doesn't exists yet
        part_name = used_parts.ensure_part(mesh_obj)
        if part_name not in pim_parts:
            pim_parts[part_name] = Part(part_name)

        mesh_pieces = mesh_pieces.values()
        for piece in mesh_pieces:

            # now as pieces are created we can check for it's flaws
            if piece.get_vertex_count() > 65536:
                lprint(
                    "E Object %r has exceeded maximum vertex count (65536), expect errors during conversion!",
                    (mesh_obj.name, ))

            # put pieces of current mesh to global list
            pim_pieces.append(piece)

            # add pieces of current mesh to part
            pim_part = pim_parts[part_name]
            pim_part.add_piece(piece)

        # report missing data for each object
        if len(missing_uv_layers) > 0:
            for uv_lay_name in missing_uv_layers:
                lprint(
                    "W Object %r is missing UV layer %r specified by materials: %r",
                    (mesh_obj.name, uv_lay_name,
                     missing_uv_layers[uv_lay_name]))
        if missing_vcolor:
            lprint(
                "W Object %r is missing vertex color layer with name %r! Default RGB color will be exported (0.5, 0.5, 0.5)!",
                (mesh_obj.name, _MESH_consts.default_vcol))
        if missing_vcolor_a:
            lprint(
                "W Object %r is missing vertex color alpha layer with name %r! Default alpha will be exported (0.5)",
                (mesh_obj.name,
                 _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix))
        if len(missing_skinned_verts) > 0:
            lprint(
                "E Object %r from SCS Root %r has %s vertices which are not skinned to any bone, expect errors during conversion!",
                (mesh_obj.name, root_object.name, len(missing_skinned_verts)))
        if has_unnormalized_skin:
            lprint(
                "W Object %r from SCS Root %r has unormalized skinning, exporting normalized weights!\n\t   "
                "You can normalize weights by selecting object & executing 'Normalize All Vertex Groups'.",
                (mesh_obj.name, root_object.name))

    # report missing data for whole model
    if len(missing_mappings_data) > 0:
        for material_name in missing_mappings_data:
            lprint(
                "W Material '%s' is missing mapping data! Objects using it are exported with default UV:\n\t   %s",
                (material_name,
                 list(missing_mappings_data[material_name].keys())))
    if len(objects_with_default_material) > 0:
        lprint(
            "W Some objects don't use any material. Default material and UV mapping is used on them:\n\t   %s",
            (list(objects_with_default_material.keys()), ))

    if len(invalid_objects_for_tangents) > 0:
        lprint(
            "E N-gons present in some objects, thus normal map tangent calculation failed.\n\t   "
            "Visualization in game will be distorted for this objects:\n\t   %s",
            (list(invalid_objects_for_tangents), ))

    # create locators data sections
    for loc_obj in model_locators:

        pos, qua, sca = _get_scs_transformation_components(
            root_object.matrix_world.inverted() * loc_obj.matrix_world)

        if sca[0] * sca[1] * sca[2] < 0:
            lprint(
                "W Model locator %r inside SCS Root Object %r not exported because of invalid scale.\n\t   "
                + "Model locators must have positive scale!",
                (loc_obj.name, root_object.name))
            continue

        name = _name_utils.tokenize_name(loc_obj.name)
        hookup_string = loc_obj.scs_props.locator_model_hookup
        hookup_id = None
        if hookup_string != "":
            hookup_id = _hookup_name_to_hookup_id(hookup_string)
            if hookup_id is None:
                lprint("W Model locator %r has unexpected hookup value %r.",
                       (loc_obj.name, loc_obj.scs_props.locator_model_hookup))

        # create locator object for export
        locator = Locator(len(pim_locators), name, hookup_id)
        locator.set_position(pos)
        locator.set_rotation(qua)
        locator.set_scale(sca)

        # create part if it doesn't exists yet
        part_name = used_parts.ensure_part(loc_obj)
        if part_name not in pim_parts:
            pim_parts[part_name] = Part(part_name)

        # add locator to part
        pim_part = pim_parts[part_name]
        pim_part.add_locator(locator)

        # add locator to locator list
        pim_locators.append(locator)

    # create container
    pim_container = [pim_header.get_as_section(), pim_global.get_as_section()]

    for mat_name in pim_materials:
        pim_container.append(pim_materials[mat_name].get_as_section())

    for pim_piece in pim_pieces:
        pim_container.append(pim_piece.get_as_section())

    for part_name in used_parts.get_as_list():

        # export all parts even empty ones used only in PIC and/or PIP
        if part_name in pim_parts:
            pim_container.append(pim_parts[part_name].get_as_section())
        else:
            pim_container.append(Part(part_name).get_as_section())

    for locator in pim_locators:
        pim_container.append(locator.get_as_section())

    if is_skin_used:
        pim_container.append(bones.get_as_section())
        pim_container.append(skin.get_as_section())

    # write to file
    ind = "    "
    pim_filepath = os.path.join(dirpath,
                                root_object.name + ".pim" + name_suffix)
    return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)
예제 #3
0
def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objects, model_locators,
            used_parts, used_materials, used_bones, used_terrain_points):
    """Executes export of PIM file for given data.
    :param dirpath: directory path for PIM file
    :type dirpath: str
    :param root_object: Blender SCS Root empty object
    :type root_object: bpy.types.Object
    :param armature_object: Blender Aramture object belonging to this SCS game object
    :type armature_object: bpy.types.Object
    :param skeleton_filepath: relative file path of PIS file
    :type skeleton_filepath: str
    :param mesh_objects: all the meshes which should be exported for current game object
    :type mesh_objects: list of bpy.types.Object
    :param model_locators: all Blender empty objecs which represents model locators and should be exported for current game object
    :type model_locators: list of bpy.types.Object
    :param used_parts: parts transitional structure for storing used parts inside this PIM export
    :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans
    :param used_materials: materials transitional structure for storing used materials inside this PIM export
    :type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans
    :param used_bones: bones transitional structure for storing used bones inside this PIM export
    :type used_bones: io_scs_tools.exp.transition_structs.bones.BonesTrans
    :param used_terrain_points: terrain points transitional structure for storing used terrain points
    :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans
    :return: True if export was successfull; False otherwise
    :rtype: bool
    """

    print("\n************************************")
    print("**      SCS PIM Exporter          **")
    print("**      (c)2015 SCS Software      **")
    print("************************************\n")

    scs_globals = _get_scs_globals()

    if scs_globals.output_type == "5":
        format_version = 5
        format_type = ""
    else:
        format_version = 1
        format_type = "def"

    is_skin_used = (armature_object and root_object.scs_props.scs_root_animated == "anim")

    pim_header = Header(format_type, format_version, root_object.name)
    pim_global = Globall(skeleton_filepath)

    pim_materials = collections.OrderedDict()  # dict of Material class instances representing used materials
    """:type: dict[str, Material]"""
    pim_pieces = []  # list of Piece class instances representing mesh pieces
    """:type: list[Piece]"""
    pim_parts = {}  # list of Part class instances representing used parts
    """:type: dict[str, Part]"""
    pim_locators = []  # list of Locator class instances representing model locators
    """:type: list[Locator]"""

    objects_with_default_material = {}  # stores object names which has no material set
    missing_mappings_data = {}  # indicates if material doesn't have set any uv layer for export

    bones = skin = skin_stream = None
    if is_skin_used:
        # create bones data section
        bones = Bones()
        for bone in armature_object.data.bones:
            bones.add_bone(bone.name)
            used_bones.add(bone.name)

        # create skin data section
        skin_stream = SkinStream(SkinStream.Types.POSITION)
        skin = Skin(skin_stream)

    # create mesh object data sections
    for mesh_obj in mesh_objects:

        vert_groups = mesh_obj.vertex_groups

        mesh_pieces = collections.OrderedDict()

        # calculate faces flip state from all ancestors of current object
        scale_sign = 1
        parent = mesh_obj
        while parent and parent.scs_props.empty_object_type != "SCS_Root":

            for scale_axis in parent.scale:
                scale_sign *= scale_axis

            parent = parent.parent

        face_flip = scale_sign < 0

        # calculate transformation matrix for current object (root object transforms are always subtracted!)
        mesh_transf_mat = root_object.matrix_world.inverted() * mesh_obj.matrix_world

        # calculate transformation matrices for this object
        pos_transf_mat = (Matrix.Scale(scs_globals.export_scale, 4) *
                          _scs_to_blend_matrix().inverted())

        nor_transf_mat = _scs_to_blend_matrix().inverted()

        # get initial mesh and vertex groups for it
        mesh = _object_utils.get_mesh(mesh_obj)
        _mesh_utils.bm_prepare_mesh_for_export(mesh, mesh_transf_mat, face_flip)
        mesh.calc_normals_split()

        missing_uv_layers = {}  # stores missing uvs specified by materials of this object
        missing_vcolor = False  # indicates if object is missing vertex color layer
        missing_vcolor_a = False  # indicates if object is missing vertex color alpha layer

        for poly in mesh.polygons:

            mat_index = poly.material_index

            # check material existence and decide what material name and effect has to be used
            if mat_index >= len(mesh_obj.material_slots) or mesh_obj.material_slots[mat_index].material is None:  # no material or invalid index
                material = None
                pim_mat_name = "_not_existing_material_"
                pim_mat_effect = "eut2.dif"
                objects_with_default_material[mesh_obj.name] = 1
            else:
                material = mesh_obj.material_slots[mat_index].material
                pim_mat_name = material.name
                pim_mat_effect = material.scs_props.mat_effect_name

            # create new pim material if material with that name doesn't yet exists
            if pim_mat_name not in pim_materials:
                pim_material = Material(len(pim_materials), pim_mat_name, pim_mat_effect, material)
                pim_materials[pim_mat_name] = pim_material
                used_materials.add(pim_mat_name, material)

            # create new piece if piece with this material doesn't exists yet -> split to pieces by material
            if pim_mat_name not in mesh_pieces:
                mesh_pieces[pim_mat_name] = Piece(len(pim_pieces) + len(mesh_pieces), pim_materials[pim_mat_name])

                nmap_uv_layer = pim_materials[pim_mat_name].get_nmap_uv_name()
                if nmap_uv_layer:  # if there is uv layer used for normal maps then calculate tangents on it
                    mesh.calc_tangents(uvmap=nmap_uv_layer)

            mesh_piece = mesh_pieces[pim_mat_name]
            """:type: Piece"""

            piece_vert_indices = []
            for loop_i in poly.loop_indices:

                loop = mesh.loops[loop_i]
                """:type: bpy.types.MeshLoop"""
                vert_i = loop.vertex_index

                # get data of current vertex
                # 1. position -> mesh.vertices[loop.vertex_index].co
                position = tuple(pos_transf_mat * mesh.vertices[vert_i].co)

                # 2. normal -> loop.normal -> calc_normals_split() has to be called before
                normal = nor_transf_mat * loop.normal
                normal = tuple(Vector(normal).normalized())

                # 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv
                uvs = []
                uvs_aliases = []
                tex_coord_alias_map = pim_materials[pim_mat_name].get_tex_coord_map()
                if len(tex_coord_alias_map) < 1:  # no textures or none uses uv mapping in current material effect
                    uvs.append((0.0, 0.0))
                    uvs_aliases.append(["_TEXCOORD0"])

                    # report missing mappings only on actual materials with textures using uv mappings
                    if material and pim_materials[pim_mat_name].uses_textures_with_uv():
                        if material.name not in missing_mappings_data:
                            missing_mappings_data[material.name] = {}

                        if mesh_obj.name not in missing_mappings_data[material.name]:
                            missing_mappings_data[material.name][mesh_obj.name] = 1

                else:
                    for uv_lay_name in tex_coord_alias_map:

                        if uv_lay_name not in mesh.uv_layers:
                            uvs.append((0.0, 0.0))

                            # properly report missing uv layers where name of uv layout is key and materials that misses it are values
                            if uv_lay_name not in missing_uv_layers:
                                missing_uv_layers[uv_lay_name] = []

                            if pim_mat_name not in missing_uv_layers[uv_lay_name]:  # add material if not already there
                                missing_uv_layers[uv_lay_name].append(pim_mat_name)
                        else:
                            uv_lay = mesh.uv_layers[uv_lay_name]
                            uvs.append(_change_to_scs_uv_coordinates(uv_lay.data[loop_i].uv))

                        aliases = []
                        for alias_index in tex_coord_alias_map[uv_lay_name]:
                            aliases.append("_TEXCOORD" + str(alias_index))

                        uvs_aliases.append(aliases)

                # 4. vcol -> vcol_lay = mesh.vertex_colors[0].data; vcol_lay[loop_i].color
                vcol_multi = mesh_obj.data.scs_props.vertex_color_multiplier
                if _MESH_consts.default_vcol not in mesh.vertex_colors:  # get RGB component of RGBA
                    vcol = (1.0,) * 3
                    missing_vcolor = True
                else:
                    color = mesh.vertex_colors[_MESH_consts.default_vcol].data[loop_i].color
                    vcol = (color[0] * 2 * vcol_multi, color[1] * 2 * vcol_multi, color[2] * 2 * vcol_multi)

                if _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix not in mesh.vertex_colors:  # get A component of RGBA
                    vcol += (1.0,)
                    missing_vcolor_a = True
                else:
                    alpha = mesh.vertex_colors[_MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix].data[loop_i].color
                    vcol += ((alpha[0] + alpha[1] + alpha[2]) / 3.0 * 2 * vcol_multi,)  # take avg of colors for alpha

                # 5. tangent -> loop.tangent; loop.bitangent_sign -> calc_tangents() has to be called before
                if pim_materials[pim_mat_name].get_nmap_uv_name():  # calculate tangents only if needed
                    tangent = tuple(nor_transf_mat * loop.tangent)
                    tangent = tuple(Vector(tangent).normalized())
                    tangent = (tangent[0], tangent[1], tangent[2], loop.bitangent_sign)
                else:
                    tangent = None

                # save internal vertex index to array to be able to construct triangle afterwards
                piece_vert_index = mesh_piece.add_vertex(vert_i, position, normal, uvs, uvs_aliases, vcol, tangent)
                piece_vert_indices.append(piece_vert_index)

                if is_skin_used:
                    # get skinning data for vertex and save it to skin stream
                    bone_weights = {}
                    for v_group_entry in mesh.vertices[vert_i].groups:
                        bone_indx = bones.get_bone_index(vert_groups[v_group_entry.group].name)
                        bone_weight = v_group_entry.weight

                        # proceed only if bone exists in our armature
                        if bone_indx != -1:
                            bone_weights[bone_indx] = bone_weight

                    skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights)
                    skin_stream.add_entry(skin_entry)

                # save to terrain points storage if present in correct vertex group
                for group in mesh.vertices[vert_i].groups:

                    curr_vg_name = mesh_obj.vertex_groups[group.group].name

                    # if vertex group name doesn't match prescribed one ignore this vertex group
                    if not match(_OP_consts.TerrainPoints.vg_name_regex, curr_vg_name):
                        continue

                    # if node index is not in bounds ignore this vertex group
                    node_index = int(curr_vg_name[-1])
                    if node_index >= _PL_consts.PREFAB_NODE_COUNT_MAX:
                        continue

                    # if no variants defined add globally (without variant block)
                    if len(root_object.scs_object_variant_inventory) == 0:
                        used_terrain_points.add(-1, node_index, position, normal)
                        continue

                    # finally iterate variant parts entries to find where this part is included
                    # and add terrain points to transitional structure
                    #
                    # NOTE: variant index is donated by direct order of variants in inventory
                    # so export in PIT has to use the same order otherwise variant
                    # indices will be misplaced
                    for variant_i, variant in enumerate(root_object.scs_object_variant_inventory):

                        used_terrain_points.ensure_entry(variant_i, node_index)

                        for variant_part in variant.parts:
                            if variant_part.name == mesh_obj.scs_props.scs_part and variant_part.include:

                                used_terrain_points.add(variant_i, node_index, position, normal)
                                break

            mesh_piece.add_triangle(tuple(piece_vert_indices[::-1]))  # invert indices because of normals flip

        # free normals calculations
        _mesh_utils.cleanup_mesh(mesh)

        # create part if it doesn't exists yet
        part_name = mesh_obj.scs_props.scs_part
        if part_name not in pim_parts:
            pim_parts[part_name] = Part(part_name)

        mesh_pieces = mesh_pieces.values()
        for piece in mesh_pieces:
            # put pieces of current mesh to global list
            pim_pieces.append(piece)

            # add pieces of current mesh to part
            pim_part = pim_parts[part_name]
            pim_part.add_piece(piece)

        # report missing data for each object
        if len(missing_uv_layers) > 0:
            for uv_lay_name in missing_uv_layers:
                lprint("W Object '%s' is missing UV layer '%s' specified by materials: %s\n",
                       (mesh_obj.name, uv_lay_name, missing_uv_layers[uv_lay_name]))
        if missing_vcolor:
            lprint("W Object %r is missing vertex color layer with name %r! Default RGB color will be exported (0.5, 0.5, 0.5)!",
                   (mesh_obj.name, _MESH_consts.default_vcol))
        if missing_vcolor_a:
            lprint("W Object %r is missing vertex color alpha layer with name %r! Default alpha will be exported (0.5)",
                   (mesh_obj.name, _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix))

    # report missing data for whole model
    if len(missing_mappings_data) > 0:
        for material_name in missing_mappings_data:
            lprint("W Material '%s' is missing mapping data! Objects using it are exported with default UV:\n\t   %s",
                   (material_name, list(missing_mappings_data[material_name].keys())))
    if len(objects_with_default_material) > 0:
        lprint("W Some objects don't use any material. Default material and UV mapping is used on them:\n\t   %s",
               (list(objects_with_default_material.keys()),))

    # create locators data sections
    for loc_obj in model_locators:

        pos, qua, sca = _get_scs_transformation_components(root_object.matrix_world.inverted() * loc_obj.matrix_world)

        if sca[0] * sca[1] * sca[2] < 0:
            lprint("W Model locator %r inside SCS Root Object %r not exported because of invalid scale.\n\t   " +
                   "Model locators must have positive scale!", (loc_obj.name, root_object.name))
            continue

        name = _name_utils.tokenize_name(loc_obj.name)
        hookup_string = loc_obj.scs_props.locator_model_hookup
        if hookup_string != "" and ":" in hookup_string:
            hookup = hookup_string.split(':', 1)[1].strip()
        else:
            if hookup_string != "":
                lprint("W The Hookup %r has no expected value!", hookup_string)
            hookup = None

        # create locator object for export
        locator = Locator(len(pim_locators), name, hookup)
        locator.set_position(pos)
        locator.set_rotation(qua)
        locator.set_scale(sca)

        # create part if it doesn't exists yet
        part_name = loc_obj.scs_props.scs_part
        if part_name not in pim_parts:
            pim_parts[part_name] = Part(part_name)

        # add locator to part
        pim_part = pim_parts[part_name]
        pim_part.add_locator(locator)

        # add locator to locator list
        pim_locators.append(locator)

    # create container
    pim_container = [pim_header.get_as_section(), pim_global.get_as_section()]

    for mat_name in pim_materials:
        pim_container.append(pim_materials[mat_name].get_as_section())

    for pim_piece in pim_pieces:
        pim_container.append(pim_piece.get_as_section())

    for part_name in used_parts.get_as_list():

        # export all parts even empty ones gathered from PIC and PIP
        if part_name in pim_parts:
            pim_container.append(pim_parts[part_name].get_as_section())
        else:
            pim_container.append(Part(part_name).get_as_section())

    for locator in pim_locators:
        pim_container.append(locator.get_as_section())

    if is_skin_used:
        pim_container.append(bones.get_as_section())
        pim_container.append(skin.get_as_section())

    # write to file
    ind = "    "
    pim_filepath = os.path.join(dirpath, root_object.name + ".pim")
    return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)
예제 #4
0
def _fill_piece_sections_7(root_object, object_list, bone_list, scene, vg_list,
                           used_materials, offset_matrix, scs_globals,
                           output_type):
    """
    Fills up "Piece" sections for file version 7 (exchange format).
    :param object_list:
    :param bone_list:
    :param scene:
    :param vg_list:
    :param used_materials:
    :param offset_matrix:
    :return:
    """
    piece_sections = []  # container for all "Pieces"
    global_vertex_count = 0
    global_face_count = 0
    global_edge_count = 0
    piece_index_obj = {}
    skin_list = []
    skin_weights_cnt = 0
    skin_clones_cnt = 0
    for piece_index, obj in enumerate(object_list):
        mat_world = obj.matrix_world
        piece_index_obj[piece_index] = obj
        object_materials = _object_utils.get_object_materials(
            obj)  # get object materials

        # Get Object's Mesh data and list of temporarily disabled "Edge Split" Modifiers...
        mesh = _object_utils.get_mesh(obj)

        # VERTICES
        # TEMPORAL VERTEX STREAM DATA FORMAT:
        # example: ('_POSITION', [(0.0, 0.0, 0.0), (0.0, 0.0, 0.0), ...])
        # example: ('_SCALAR', [(0.0), (0.0), ...])

        stream_pos = ('_POSITION', [])
        # stream_nor = ('_NORMAL', [])
        # if scs_globals.export_vertex_groups:
        vg_layers_for_export, streams_vg = _object_utils.get_stream_vgs(
            obj)  # get Vertex Group layers (SCALARs)

        vertex_stream_count = 1
        vertex_streams = []
        stream_vg_container = []
        # print('bone_list: %s' % str(bone_list.keys))
        for vert_i, vert in enumerate(mesh.vertices):
            position = offset_matrix.inverted() * mesh.vertices[vert_i].co
            # scs_position = io_utils.change_to_scs_xyz_coordinates(mat_world * position, scs_globals.export_scale) ## POSITION
            scs_position = Matrix.Scale(
                scs_globals.export_scale,
                4) * _convert_utils.scs_to_blend_matrix().inverted(
                ) * mat_world * position  # POSITION
            stream_pos[1].append(scs_position)
            # stream_nor[1].append(io_utils.get_vertex_normal(mesh, vert_i))              # NORMAL
            if scs_globals.export_vertex_groups:
                if streams_vg:
                    vg_i = 0
                    for vg in vg_layers_for_export:  # weights (even unused) all vertices become 0.0
                        if vg.name in vg_list:
                            vg_weight = (_object_utils.get_vertex_group(
                                vg, vert_i), )  # SCALARs
                            key = str("_SCALAR" + str(vg_i))
                            if vg_i == len(stream_vg_container) and len(
                                    stream_vg_container) != len(
                                        vg_layers_for_export):
                                stream_vg_container.append(
                                    (vg.name, key, [vg_weight]))
                            else:
                                stream_vg_container[vg_i][2].append(vg_weight)
                            vg_i += 1

                            # SKINNING (OLD STYLE FOR PIM VER. 7)
            # if scs_globals.export_anim_file == 'anim':
            if root_object.scs_props.scs_root_animated == 'anim':
                skin_vector = scs_position  # NOTE: Vertex position - from Maya scaled *10 (old & unused in game engine)
                skin_weights = []
                for group in vert.groups:
                    for vg in vg_layers_for_export:
                        if vg.index == group.group:
                            for bone_i, bone in enumerate(bone_list):
                                if vg.name == bone.name:
                                    skin_weights.append((bone_i, group.weight))
                                    skin_weights_cnt += 1
                                    # print('vert: %i - group: %r (%i) - %s' % (vert_i, vg.name, bone_i, str(group.weight)))
                                    break
                            break
                skin_clones = ((piece_index, vert_i), )
                skin_clones_cnt += 1
                skin_list.append((skin_vector, skin_weights, skin_clones))

                # ##
        vertex_streams.append(stream_pos)
        # print('\nstream_pos:\n  %s' % str(stream_pos))
        # vertex_streams.append(stream_nor)
        # print('\nstream_nor:\n  %s' % str(stream_nor))
        for vg_stream in stream_vg_container:
            vertex_stream_count += 1
            vertex_streams.append(vg_stream)
            # print('\nvg_stream:\n  %s' % str(vg_stream))
        # FACES
        # TEMPORAL FACE STREAM DATA FORMAT:
        # faces = [face_data, face_data, ...]
        # face_data = (material, [vertex indices], [face-vertex streams])
        # face_streams = [('_UV0', [(0.0, 0.0), (0.0, 0.0), ...]), ...]
        # example: [(0, [0, 1, 2], [('_UV0', [(0.0, 0.0), (0.0, 0.0)]), ('_UV0', [(0.0, 0.0), (0.0, 0.0)])]), (), ...]

        faces = []
        face_cnt = 0
        uv_layers_exists = 1
        rgb_layers_exists = 1
        # print('used_materials: %s' % str(used_materials))
        for face_i, face in enumerate(mesh.polygons):
            face_cnt += 1
            streams_uv = None
            streams_vcolor = None
            if uv_layers_exists:
                requested_uv_layers, streams_uv = _mesh_utils.get_stream_uvs(
                    mesh, scs_globals.active_uv_only)  # get UV layers (UVs)
                if not streams_uv:
                    uv_layers_exists = 0
            if rgb_layers_exists and scs_globals.export_vertex_color:
                if scs_globals.export_vertex_color_type_7 == 'rgb':
                    rgb_all_layers, streams_vcolor = _mesh_utils.get_stream_rgb(
                        mesh, output_type,
                        False)  # get Vertex Color layers (RGB)
                elif scs_globals.export_vertex_color_type_7 == 'rgbda':
                    rgb_all_layers, streams_vcolor = _mesh_utils.get_stream_rgb(
                        mesh, output_type, True)  # get Vertex Color layers (
                    # RGBdA)
                else:
                    streams_vcolor = None  # TODO: Alpha from another layer
                if not streams_vcolor:
                    rgb_layers_exists = 0
            mat_index = used_materials.index(
                object_materials[face.material_index])
            # print('face-mat_index: %s; object_materials[f-m_i]: %s; used_materials.index(o_m[f-m_i]): %s' % (face.material_index,
            # object_materials[face.material_index], used_materials.index(object_materials[face.material_index])))
            face_verts = []
            for vert in face.vertices:
                face_verts.append(vert)
            face_verts = face_verts[::-1]  # revert vertex order in face
            # print('face_verts: %s' % str(face_verts))
            face_streams = []
            stream_fv_nor = ("_NORMAL", [])
            stream_fv_uv_container = []
            stream_fv_rgb_container = []
            stream_names = {}
            for loop_index in range(face.loop_start,
                                    face.loop_start + face.loop_total):
                # edge_index = mesh.loops[loop_index].edge_index
                vert_index = mesh.loops[loop_index].vertex_index
                # print('face i.: %s\tloop i.: %s\tedge i.: %s\tvert i.: %s' % (face_i, loop_index, edge_index, vert_index))
                # Normals
                stream_fv_nor[1].append(
                    offset_matrix.inverted() *
                    Vector(_mesh_utils.get_vertex_normal(mesh, vert_index)))
                # UV Layers
                if streams_uv:
                    for uv_i, uv_l in enumerate(requested_uv_layers):
                        uv_values = _mesh_utils.get_face_vertex_uv(
                            uv_l.data, loop_index, uv_i)
                        key = str("_UV" + str(uv_i))
                        if uv_i == len(stream_fv_uv_container
                                       ) and len(stream_fv_uv_container
                                                 ) != len(requested_uv_layers):
                            stream_fv_uv_container.append((key, [uv_values]))
                            stream_names[key] = uv_l.name
                        else:
                            stream_fv_uv_container[uv_i][1].append(uv_values)
                            # Vertex Color Layers (RGB)
                if scs_globals.export_vertex_color:
                    if streams_vcolor:
                        for rgb_i, rgb_l in enumerate(rgb_all_layers):
                            if scs_globals.export_vertex_color_type_7 == 'rgb':
                                rgb_values = _mesh_utils.get_face_vertex_color(
                                    rgb_l.data, loop_index, False, rgb_i)
                                key = str("_RGB" + str(rgb_i))
                            elif scs_globals.export_vertex_color_type_7 == 'rgbda':
                                rgb_values = _mesh_utils.get_face_vertex_color(
                                    rgb_l.data, loop_index, True, rgb_i)
                                key = str("_RGBA" + str(rgb_i))
                            else:
                                streams_vcolor = None  # TODO: Alpha from another layer
                            if rgb_i == len(stream_fv_rgb_container
                                            ) and len(stream_fv_rgb_container
                                                      ) != len(rgb_all_layers):
                                stream_fv_rgb_container.append(
                                    (key, [rgb_values]))
                                stream_names[key] = rgb_l.name
                            else:
                                stream_fv_rgb_container[rgb_i][1].append(
                                    rgb_values)
                                # Data Assembling
            face_streams.append(stream_fv_nor)
            for stream in stream_fv_uv_container:
                face_streams.append(stream)
            for stream in stream_fv_rgb_container:
                face_streams.append(stream)
            face_data = (mat_index, face_verts, face_streams)
            faces.append(face_data)

        # SHARP EDGES
        sharp_edges = []
        for edge in mesh.edges:
            if edge.use_edge_sharp:
                sharp_edges.append(edge.vertices[:])

        # BUILD FACE SECTION
        faces_container = _SectionData("Faces")
        faces_container.props.append(("StreamCount", len(faces[0][2])))
        for face_i, face_data in enumerate(faces):
            face_container = _SectionData("Face")
            face_container.props.append(("Index", face_i))
            face_container.props.append(("Material", face_data[0]))
            face_container.props.append(("Indices", face_data[1]))
            for stream in face_data[2]:
                if stream[0] in stream_names:
                    face_container.sections.append(
                        _pix_container.make_vertex_stream(
                            stream, stream_names[stream[0]]))
                else:
                    face_container.sections.append(
                        _pix_container.make_vertex_stream(stream))
            faces_container.sections.append(face_container)

        # BUILD PIECE SECTION
        piece_section = _SectionData("Piece")
        piece_section.props.append(("Index", piece_index))
        global_vertex_count += len(stream_pos[1])
        piece_section.props.append(("VertexCount", len(stream_pos[1])))
        global_face_count += face_cnt
        piece_section.props.append(("FaceCount", face_cnt))
        global_edge_count += len(sharp_edges)
        piece_section.props.append(("EdgeCount", len(sharp_edges)))
        piece_section.props.append(("StreamCount", vertex_stream_count))
        piece_section.props.append(("", ""))
        # vertex streams...
        for stream in vertex_streams:
            if len(stream) == 3:
                # print('\nstream:\n  %s' % str(stream))
                piece_section.sections.append(
                    _pix_container.make_vertex_stream(stream[1:], stream[0]))
            else:
                piece_section.sections.append(
                    _pix_container.make_vertex_stream(stream))
                # faces...
        piece_section.sections.append(faces_container)

        # BUILD AND STORE EDGE SECTION
        if sharp_edges:
            edges_container = _SectionData("Edges")
            for edge in sharp_edges:
                edges_container.data.append(edge)
            piece_section.sections.append(edges_container)

        # STORE PIECE SECTION
        piece_sections.append(piece_section)  # add a piece
    return piece_sections, global_vertex_count, global_face_count, global_edge_count, piece_index_obj, skin_list, skin_weights_cnt, skin_clones_cnt
예제 #5
0
def _fill_piece_sections_5(root_object, object_list, bone_list, scene,
                           used_materials, offset_matrix, scs_globals):
    """Fills up "Piece" sections for file version 5.

    :param root_object: SCS Root Object
    :type root_object: bpy.types.Object
    :param object_list: Object for export
    :type object_list: list of Objects
    :param bone_list: Bones for export
    :type bone_list: list
    :param scene: Blender Scene to operate on
    :type scene: bpy.types.Scene
    :param used_materials: All Materials used in 'SCS Game Object'
    :type used_materials: list
    :param offset_matrix: Matrix for specifying of pivot point
    :type offset_matrix: Matrix
    :param scs_globals: SCS Tools Globals
    :type scs_globals: GlobalSCSProps
    :return: list of parameters
    :rtype: list
    """

    handle_unused_arg(__file__, _fill_piece_sections_5.__name__,
                      "used_materials", used_materials)

    apply_modifiers = scs_globals.apply_modifiers
    exclude_edgesplit = scs_globals.exclude_edgesplit
    include_edgesplit = scs_globals.include_edgesplit
    piece_sections = []
    piece_index_obj = {}
    piece_index = global_vertex_count = global_face_count = 0

    # ----- START: SOLVE THIS!
    skin_list = []
    skin_weights_cnt = skin_clones_cnt = 0
    # -----   END: SOLVE THIS!

    print_info = False

    if print_info:
        print('used_materials: %s\n' % str(used_materials))
        print('object_list: %s\n' % str(object_list))

    # For each object...
    for obj_i, obj in enumerate(object_list):
        piece_index_obj[obj_i] = obj
        # Get all Materials from Object as they're set in material slots...
        object_materials = _object_utils.get_object_materials(obj)
        if print_info:
            print('  object_materials: %s' % str(object_materials))

        # Make Material dictionary (needed for getting rid of Material duplicities)...
        material_dict = _make_material_dict(object_materials)
        if print_info:
            for item in material_dict:
                print('    "%s" = %s' % (str(item), str(material_dict[item])))
            print('')

        # Get Object's Mesh data and list of temporarily disabled "Edge Split" Modifiers...
        mesh = _object_utils.get_mesh(obj)

        # SORT GEOMETRY
        piece_dict, skin_list, skin_weights_cnt, skin_clones_cnt = _get_geometry_dict(
            root_object, obj, mesh, offset_matrix, material_dict,
            used_materials, bone_list, scs_globals)

        # DUMP
        if 0:
            for piece in piece_dict:
                print('Pm: %r' % piece)
                for data in piece_dict[piece]:
                    print('  Da: %r' % str(data))
                    if data == 'hash_dict':
                        if len(piece_dict[piece][data]) > 0:
                            for val in piece_dict[piece][data]:
                                print('    HD: %s:%s' %
                                      (str(val),
                                       str(piece_dict[piece][data][val])))
                        else:
                            print('    NO "hash_dict" Data!')
                    elif data == 'verts':
                        if len(piece_dict[piece][data]) > 0:
                            for val in piece_dict[piece][data]:
                                print('    Ve: %s' % str(val))
                        else:
                            print('    NO "verts" Data!')
                    elif data == 'faces':
                        if len(piece_dict[piece][data]) > 0:
                            for val in piece_dict[piece][data]:
                                print('    Fa: %s' % str(val))
                        else:
                            print('    NO "faces" Data!')
                print('')
            print('')

        # CREATE SECTIONS
        for piece in piece_dict:
            vertex_count = len(piece_dict[piece]['verts'])
            global_vertex_count += vertex_count
            face_count = len(piece_dict[piece]['faces'])
            global_face_count += face_count
            stream_sections = []

            # VERTEX STREAMS
            verts_data = {}
            for val in piece_dict[piece]['verts']:
                facevert_common_data, facevert_unique_data = val
                for data_key in facevert_common_data:
                    # print(' data_key: %s' % str(data_key))
                    if data_key == '_VG':
                        pass
                    else:
                        if data_key not in verts_data:
                            verts_data[data_key] = []
                        verts_data[data_key].append(
                            facevert_common_data[data_key])
                for data_key in facevert_unique_data:
                    # print(' data_key: %s' % str(data_key))
                    if data_key == '_UV':
                        for layer_i, layer in enumerate(
                                facevert_unique_data[data_key]):
                            layer_data_key = str(data_key + str(layer_i))
                            if layer_data_key not in verts_data:
                                verts_data[layer_data_key] = []
                            verts_data[layer_data_key].append(
                                facevert_unique_data[data_key][layer])
                    if data_key == '_RGBA':
                        for layer_i, layer in enumerate(
                                facevert_unique_data[data_key]):
                            if len(facevert_unique_data[data_key]) > 1:
                                layer_data_key = str(data_key + str(layer_i))
                            else:
                                layer_data_key = data_key
                            if layer_data_key not in verts_data:
                                verts_data[layer_data_key] = []
                            verts_data[layer_data_key].append(
                                facevert_unique_data[data_key][layer])
            lprint('S verts_data: %s', (str(verts_data), ))

            data_types = ('_POSITION', '_NORMAL', '_UV', '_UV0', '_UV1',
                          '_UV2', '_UV3', '_UV4', '_UV5', '_UV6', '_UV7',
                          '_UV8', '_UV9', '_RGBA', '_RGBA0', '_RGBA1',
                          '_RGBA2', '_RGBA3', '_RGBA4', '_RGBA5', '_RGBA6',
                          '_RGBA7', '_RGBA8', '_RGBA9', '_VG')
            add_uv = True
            add_rgba = False
            for data_type in data_types:
                if '_RGBA' not in verts_data:
                    add_rgba = True
                if data_type in verts_data:
                    if data_type.startswith('_UV'):
                        add_uv = False
                        stream_sections.append(
                            _pix_container.make_stream_section(
                                verts_data[data_type], data_type,
                                (data_type, )))
                    else:
                        stream_sections.append(
                            _pix_container.make_stream_section(
                                verts_data[data_type], data_type, ()))

            # ADD DEFAULT UV LAYER
            if add_uv:
                lprint('I Adding default UV layer.')
                uv_dummy_data = []
                for vert in range(len(piece_dict[piece]['verts'])):
                    uv_dummy_data.append(Vector((0.0, 0.0)))
                stream_sections.append(
                    _pix_container.make_stream_section(uv_dummy_data, '_UV0',
                                                       ('_UV0', )))

            # ADD DEFAULT RGBA LAYER
            if add_rgba:
                lprint('I Adding default RGBA (vertex color) layer.')
                rgba_dummy_data = []
                for vert in range(len(piece_dict[piece]['verts'])):
                    rgba_dummy_data.append(Vector((1.0, 1.0, 1.0, 1.0)))
                stream_sections.append(
                    _pix_container.make_stream_section(rgba_dummy_data,
                                                       '_RGBA', ()))

            # PIECE PROPERTIES
            piece_section = _SectionData("Piece")
            piece_section.props.append(("Index", piece_index))
            piece_section.props.append(
                ("Material", piece_dict[piece]['material_index']))
            piece_section.props.append(("VertexCount", vertex_count))
            piece_section.props.append(("TriangleCount", face_count))
            piece_section.props.append(("StreamCount", len(stream_sections)))
            piece_section.props.append(("", ""))
            piece_index += 1
            for stream_section in stream_sections:
                piece_section.sections.append(stream_section)

            # FACE STREAM
            stream_section = _SectionData("Triangles")
            stream_section.data = piece_dict[piece]['faces']
            piece_section.sections.append(stream_section)

            piece_sections.append(piece_section)

    return piece_sections, global_vertex_count, global_face_count, piece_index_obj, skin_list, skin_weights_cnt, skin_clones_cnt
예제 #6
0
def execute(dirpath, root_object, mesh_objects, model_locators, used_parts,
            used_materials):
    """Executes export of PIM file for given data.
    :param dirpath: directory path for PIM file
    :type dirpath: str
    :param root_object: Blender SCS Root empty object
    :type root_object: bpy.types.Object
    :param mesh_objects: all the meshes which should be exported for current game object
    :type mesh_objects: list of bpy.types.Object
    :param model_locators: all Blender empty objecs which represents model locators and should be exported for current game object
    :type model_locators: list of bpy.types.Object
    :return: True if export was successfull; False otherwise
    :rtype: bool
    """

    print("\n************************************")
    print("**      SCS PIM Exporter          **")
    print("**      (c)2015 SCS Software      **")
    print("************************************\n")

    scs_globals = _get_scs_globals()

    if scs_globals.output_type == "5":
        format_version = 5
        format_type = ""
    else:
        format_version = 1
        format_type = "def"

    pim_header = Header(format_type, format_version, root_object.name)
    pim_global = Globall(root_object.name + ".pis")

    pim_materials = collections.OrderedDict(
    )  # dict of Material class instances representing used materials
    """:type: dict of Material"""
    pim_pieces = []  # list of Piece class instances representing mesh pieces
    """:type: list of Piece"""
    pim_parts = collections.OrderedDict(
    )  # list of Part class instances representing used parts
    """:type: dict of Part"""
    pim_locators = [
    ]  # list of Locator class instances representing model locators
    """:type: list of Locator"""

    objects_with_default_material = {
    }  # stores object names which has no material set
    missing_mappings_data = {
    }  # indicates if material doesn't have set any uv layer for export

    # create mesh object data sections
    for mesh_obj in mesh_objects:

        mesh_pieces = collections.OrderedDict()

        # get initial mesh
        mesh = _object_utils.get_mesh(mesh_obj)
        _mesh_utils.bm_triangulate(mesh)
        mesh.calc_normals_split()

        # calculate transformation matrices for this object
        pos_transf_mat = (Matrix.Scale(scs_globals.export_scale, 4) *
                          _scs_to_blend_matrix().inverted() *
                          root_object.matrix_world.inverted() *
                          mesh_obj.matrix_world)

        nor_transf_mat = (
            _scs_to_blend_matrix().inverted() * root_object.matrix_world.
            inverted().to_quaternion().to_matrix().to_4x4() *
            mesh_obj.matrix_world.to_quaternion().to_matrix().to_4x4())

        missing_uv_layers = {
        }  # stores missing uvs specified by materials of this object
        missing_vcolor = False  # indicates if object is missing vertex colors

        for poly in mesh.polygons:

            mat_index = poly.material_index

            # check material existance and decide what material name and effect has to be used
            if mat_index >= len(
                    mesh_obj.material_slots
            ) or mesh_obj.material_slots[
                    mat_index].material is None:  # no material or invalid index
                material = None
                pim_mat_name = "_not_existing_material_"
                pim_mat_effect = "eut2.dif"
                objects_with_default_material[mesh_obj.name] = 1
            else:
                material = mesh_obj.material_slots[mat_index].material
                pim_mat_name = material.name
                pim_mat_effect = material.scs_props.mat_effect_name

            # create new pim material if material with that name doesn't yet exists
            if pim_mat_name not in pim_materials:
                pim_material = Material(len(pim_materials), pim_mat_name,
                                        pim_mat_effect, material)
                pim_materials[pim_mat_name] = pim_material
                used_materials.append(pim_mat_name)

            # create new piece if piece with this material doesn't exists yet -> split to pieces by material
            if pim_mat_name not in mesh_pieces:
                mesh_pieces[pim_mat_name] = Piece(
                    len(pim_pieces) + len(mesh_pieces),
                    pim_materials[pim_mat_name])

                nmap_uv_layer = pim_materials[pim_mat_name].get_nmap_uv_name()
                if nmap_uv_layer:  # if there is uv layer used for normal maps then calculate tangents on it
                    mesh.calc_tangents(uvmap=nmap_uv_layer)

            mesh_piece = mesh_pieces[pim_mat_name]
            """:type: Piece"""

            piece_vert_indices = []
            for loop_i in poly.loop_indices:

                loop = mesh.loops[loop_i]
                """:type: bpy.types.MeshLoop"""
                vert_i = loop.vertex_index

                # get data of current vertex
                # 1. position -> mesh.vertices[loop.vertex_index].co
                position = tuple(pos_transf_mat * mesh.vertices[vert_i].co)

                # 2. normal -> loop.normal -> calc_normals_split() has to be called before
                normal = nor_transf_mat * loop.normal
                normal = tuple(Vector(normal).normalized())

                # 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv
                uvs = []
                uvs_aliases = []
                tex_coord_alias_map = pim_materials[
                    pim_mat_name].get_tex_coord_map()
                if len(tex_coord_alias_map
                       ) < 1:  # no textures for current material effect
                    uvs.append((0.0, 0.0))
                    uvs_aliases.append(["_TEXCOORD0"])

                    # report missing mappings only on actual materials with texture entries
                    if material and pim_materials[pim_mat_name].uses_textures(
                    ):
                        if material.name not in missing_mappings_data:
                            missing_mappings_data[material.name] = {}

                        if mesh_obj.name not in missing_mappings_data[
                                material.name]:
                            missing_mappings_data[material.name][
                                mesh_obj.name] = 1

                else:
                    for uv_lay_name in tex_coord_alias_map:

                        if uv_lay_name not in mesh.uv_layers:
                            uvs.append((0.0, 0.0))

                            # properly report missing uv layers where name of uv layout is key and materials that misses it are values
                            if uv_lay_name not in missing_uv_layers:
                                missing_uv_layers[uv_lay_name] = []

                            if pim_mat_name not in missing_uv_layers[
                                    uv_lay_name]:  # add material if not already there
                                missing_uv_layers[uv_lay_name].append(
                                    pim_mat_name)
                        else:
                            uv_lay = mesh.uv_layers[uv_lay_name]
                            uvs.append(
                                _change_to_scs_uv_coordinates(
                                    uv_lay.data[loop_i].uv))

                        aliases = []
                        for alias_index in tex_coord_alias_map[uv_lay_name]:
                            aliases.append("_TEXCOORD" + str(alias_index))

                        uvs_aliases.append(aliases)

                # 4. vcol -> vcol_lay = mesh.vertex_colors[0].data; vcol_lay[loop_i].color
                if len(mesh.vertex_colors) < 1:
                    vcol = (1.0, 1.0, 1.0, 1.0)
                    missing_vcolor = True
                else:
                    multiplier = mesh_obj.data.scs_props.vertex_color_multiplier
                    color = mesh.vertex_colors[0].data[loop_i].color
                    vcol = (color[0] * multiplier, color[1] * multiplier,
                            color[2] * multiplier, 1.0)

                # 5. tangent -> loop.tangent; loop.bitangent_sign -> calc_tangents() has to be called before
                if pim_materials[pim_mat_name].get_nmap_uv_name(
                ):  # calculate tangents only if needed
                    tangent = tuple(nor_transf_mat * loop.tangent)
                    tangent = tuple(Vector(tangent).normalized())
                    tangent = (tangent[0], tangent[1], tangent[2],
                               loop.bitangent_sign)
                else:
                    tangent = None

                # save internal vertex index to array to be able to construct triangle afterwards
                piece_vert_index = mesh_piece.add_vertex(
                    vert_i, position, normal, uvs, uvs_aliases, vcol, tangent)
                piece_vert_indices.append(piece_vert_index)

            mesh_piece.add_triangle(tuple(piece_vert_indices[::-1])
                                    )  # invert indices because of normals flip

        # create part if it doesn't exists yet
        part_name = mesh_obj.scs_props.scs_part
        if part_name not in pim_parts:
            pim_parts[part_name] = Part(part_name)
            used_parts[part_name] = 1

        mesh_pieces = mesh_pieces.values()
        for piece in mesh_pieces:
            # put pieces of current mesh to global list
            pim_pieces.append(piece)

            # add pieces of current mesh to part
            pim_part = pim_parts[part_name]
            pim_part.add_piece(piece)

        # report missing data for each object
        if len(missing_uv_layers) > 0:
            for uv_lay_name in missing_uv_layers:
                lprint(
                    "W Object '%s' is missing UV layer '%s' specified by materials: %s\n",
                    (mesh_obj.name, uv_lay_name,
                     missing_uv_layers[uv_lay_name]))
        if missing_vcolor:
            lprint(
                "W Object '%s' is missing vertex color layer! Default color will be exported (1, 1, 1, 1)!",
                (mesh_obj.name, ))

    # report mising data for whole model
    if len(missing_mappings_data) > 0:
        for material_name in missing_mappings_data:
            lprint(
                "W Material '%s' is missing mapping data! Objects using it are exported with default UV:\n\t   %s",
                (material_name,
                 list(missing_mappings_data[material_name].keys())))
    if len(objects_with_default_material) > 0:
        lprint(
            "W Some objects don't use any material. Default material and UV mapping is used on them:\n\t   %s",
            (list(objects_with_default_material.keys()), ))

    # create locators data sections
    for loc_obj in model_locators:
        name = _name_utils.tokenize_name(loc_obj.name)
        hookup_string = loc_obj.scs_props.locator_model_hookup
        if hookup_string != "" and ":" in hookup_string:
            hookup = hookup_string.split(':', 1)[1].strip()
        else:
            if hookup_string != "":
                lprint("W The Hookup %r has no expected value!", hookup_string)
            hookup = None
        pos, qua, sca = _get_scs_transformation_components(
            loc_obj.matrix_world)

        # create locator object for export
        locator = Locator(len(pim_locators), name, hookup)
        locator.set_position(pos)
        locator.set_rotation(qua)
        locator.set_scale(sca)

        # create part if it doesn't exists yet
        part_name = loc_obj.scs_props.scs_part
        if part_name not in pim_parts:
            pim_parts[part_name] = Part(part_name)
            used_parts[part_name] = 1

        # add locator to part
        pim_part = pim_parts[part_name]
        pim_part.add_locator(locator)

        # add locator to locator list
        pim_locators.append(locator)

    # create container
    pim_container = [pim_header.get_as_section(), pim_global.get_as_section()]

    for mat_name in pim_materials:
        pim_container.append(pim_materials[mat_name].get_as_section())

    for pim_piece in pim_pieces:
        pim_container.append(pim_piece.get_as_section())

    for part_name in used_parts:
        pim_container.append(pim_parts[part_name].get_as_section())

    for locator in pim_locators:
        pim_container.append(locator.get_as_section())

    # write to file
    ind = "    "
    pim_filepath = dirpath + os.sep + root_object.name + ".pim"
    return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)
예제 #7
0
def _fill_piece_sections_7(root_object, object_list, bone_list, scene, vg_list, used_materials, offset_matrix, scs_globals, output_type):
    """
    Fills up "Piece" sections for file version 7 (exchange format).
    :param object_list:
    :param bone_list:
    :param scene:
    :param vg_list:
    :param used_materials:
    :param offset_matrix:
    :return:
    """
    piece_sections = []  # container for all "Pieces"
    global_vertex_count = 0
    global_face_count = 0
    global_edge_count = 0
    piece_index_obj = {}
    skin_list = []
    skin_weights_cnt = 0
    skin_clones_cnt = 0
    for piece_index, obj in enumerate(object_list):
        mat_world = obj.matrix_world
        piece_index_obj[piece_index] = obj
        object_materials = _object_utils.get_object_materials(obj)  # get object materials

        # Get Object's Mesh data and list of temporarily disabled "Edge Split" Modifiers...
        mesh = _object_utils.get_mesh(obj)

        # VERTICES
        # TEMPORAL VERTEX STREAM DATA FORMAT:
        # example: ('_POSITION', [(0.0, 0.0, 0.0), (0.0, 0.0, 0.0), ...])
        # example: ('_SCALAR', [(0.0), (0.0), ...])

        stream_pos = ('_POSITION', [])
        # stream_nor = ('_NORMAL', [])
        # if scs_globals.export_vertex_groups:
        vg_layers_for_export, streams_vg = _object_utils.get_stream_vgs(obj)  # get Vertex Group layers (SCALARs)

        vertex_stream_count = 1
        vertex_streams = []
        stream_vg_container = []
        # print('bone_list: %s' % str(bone_list.keys))
        for vert_i, vert in enumerate(mesh.vertices):
            position = offset_matrix.inverted() * mesh.vertices[vert_i].co
            # scs_position = io_utils.change_to_scs_xyz_coordinates(mat_world * position, scs_globals.export_scale) ## POSITION
            scs_position = Matrix.Scale(scs_globals.export_scale,
                                        4) * _convert_utils.scs_to_blend_matrix().inverted() * mat_world * position  # POSITION
            stream_pos[1].append(scs_position)
            # stream_nor[1].append(io_utils.get_vertex_normal(mesh, vert_i))              # NORMAL
            if scs_globals.export_vertex_groups:
                if streams_vg:
                    vg_i = 0
                    for vg in vg_layers_for_export:  # weights (even unused) all vertices become 0.0
                        if vg.name in vg_list:
                            vg_weight = (_object_utils.get_vertex_group(vg, vert_i),)  # SCALARs
                            key = str("_SCALAR" + str(vg_i))
                            if vg_i == len(stream_vg_container) and len(stream_vg_container) != len(vg_layers_for_export):
                                stream_vg_container.append((vg.name, key, [vg_weight]))
                            else:
                                stream_vg_container[vg_i][2].append(vg_weight)
                            vg_i += 1

                            # SKINNING (OLD STYLE FOR PIM VER. 7)
            # if scs_globals.export_anim_file == 'anim':
            if root_object.scs_props.scs_root_animated == 'anim':
                skin_vector = scs_position  # NOTE: Vertex position - from Maya scaled *10 (old & unused in game engine)
                skin_weights = []
                for group in vert.groups:
                    for vg in vg_layers_for_export:
                        if vg.index == group.group:
                            for bone_i, bone in enumerate(bone_list):
                                if vg.name == bone.name:
                                    skin_weights.append((bone_i, group.weight))
                                    skin_weights_cnt += 1
                                    # print('vert: %i - group: %r (%i) - %s' % (vert_i, vg.name, bone_i, str(group.weight)))
                                    break
                            break
                skin_clones = ((piece_index, vert_i), )
                skin_clones_cnt += 1
                skin_list.append((skin_vector, skin_weights, skin_clones))

                # ##
        vertex_streams.append(stream_pos)
        # print('\nstream_pos:\n  %s' % str(stream_pos))
        # vertex_streams.append(stream_nor)
        # print('\nstream_nor:\n  %s' % str(stream_nor))
        for vg_stream in stream_vg_container:
            vertex_stream_count += 1
            vertex_streams.append(vg_stream)
            # print('\nvg_stream:\n  %s' % str(vg_stream))
        # FACES
        # TEMPORAL FACE STREAM DATA FORMAT:
        # faces = [face_data, face_data, ...]
        # face_data = (material, [vertex indices], [face-vertex streams])
        # face_streams = [('_UV0', [(0.0, 0.0), (0.0, 0.0), ...]), ...]
        # example: [(0, [0, 1, 2], [('_UV0', [(0.0, 0.0), (0.0, 0.0)]), ('_UV0', [(0.0, 0.0), (0.0, 0.0)])]), (), ...]

        faces = []
        face_cnt = 0
        uv_layers_exists = 1
        rgb_layers_exists = 1
        # print('used_materials: %s' % str(used_materials))
        for face_i, face in enumerate(mesh.polygons):
            face_cnt += 1
            streams_uv = None
            streams_vcolor = None
            if uv_layers_exists:
                requested_uv_layers, streams_uv = _mesh_utils.get_stream_uvs(mesh, scs_globals.active_uv_only)  # get UV layers (UVs)
                if not streams_uv:
                    uv_layers_exists = 0
            if rgb_layers_exists and scs_globals.export_vertex_color:
                if scs_globals.export_vertex_color_type_7 == 'rgb':
                    rgb_all_layers, streams_vcolor = _mesh_utils.get_stream_rgb(mesh, output_type, False)  # get Vertex Color layers (RGB)
                elif scs_globals.export_vertex_color_type_7 == 'rgbda':
                    rgb_all_layers, streams_vcolor = _mesh_utils.get_stream_rgb(mesh, output_type, True)  # get Vertex Color layers (
                    # RGBdA)
                else:
                    streams_vcolor = None  # TODO: Alpha from another layer
                if not streams_vcolor:
                    rgb_layers_exists = 0
            mat_index = used_materials.index(object_materials[face.material_index])
            # print('face-mat_index: %s; object_materials[f-m_i]: %s; used_materials.index(o_m[f-m_i]): %s' % (face.material_index,
            # object_materials[face.material_index], used_materials.index(object_materials[face.material_index])))
            face_verts = []
            for vert in face.vertices:
                face_verts.append(vert)
            face_verts = face_verts[::-1]  # revert vertex order in face
            # print('face_verts: %s' % str(face_verts))
            face_streams = []
            stream_fv_nor = ("_NORMAL", [])
            stream_fv_uv_container = []
            stream_fv_rgb_container = []
            stream_names = {}
            for loop_index in range(face.loop_start, face.loop_start + face.loop_total):
                # edge_index = mesh.loops[loop_index].edge_index
                vert_index = mesh.loops[loop_index].vertex_index
                # print('face i.: %s\tloop i.: %s\tedge i.: %s\tvert i.: %s' % (face_i, loop_index, edge_index, vert_index))
                # Normals
                stream_fv_nor[1].append(offset_matrix.inverted() * Vector(_mesh_utils.get_vertex_normal(mesh, vert_index)))
                # UV Layers
                if streams_uv:
                    for uv_i, uv_l in enumerate(requested_uv_layers):
                        uv_values = _mesh_utils.get_face_vertex_uv(uv_l.data, loop_index, uv_i)
                        key = str("_UV" + str(uv_i))
                        if uv_i == len(stream_fv_uv_container) and len(stream_fv_uv_container) != len(requested_uv_layers):
                            stream_fv_uv_container.append((key, [uv_values]))
                            stream_names[key] = uv_l.name
                        else:
                            stream_fv_uv_container[uv_i][1].append(uv_values)
                            # Vertex Color Layers (RGB)
                if scs_globals.export_vertex_color:
                    if streams_vcolor:
                        for rgb_i, rgb_l in enumerate(rgb_all_layers):
                            if scs_globals.export_vertex_color_type_7 == 'rgb':
                                rgb_values = _mesh_utils.get_face_vertex_color(rgb_l.data, loop_index, False, rgb_i)
                                key = str("_RGB" + str(rgb_i))
                            elif scs_globals.export_vertex_color_type_7 == 'rgbda':
                                rgb_values = _mesh_utils.get_face_vertex_color(rgb_l.data, loop_index, True, rgb_i)
                                key = str("_RGBA" + str(rgb_i))
                            else:
                                streams_vcolor = None  # TODO: Alpha from another layer
                            if rgb_i == len(stream_fv_rgb_container) and len(stream_fv_rgb_container) != len(rgb_all_layers):
                                stream_fv_rgb_container.append((key, [rgb_values]))
                                stream_names[key] = rgb_l.name
                            else:
                                stream_fv_rgb_container[rgb_i][1].append(rgb_values)
                                # Data Assembling
            face_streams.append(stream_fv_nor)
            for stream in stream_fv_uv_container:
                face_streams.append(stream)
            for stream in stream_fv_rgb_container:
                face_streams.append(stream)
            face_data = (mat_index, face_verts, face_streams)
            faces.append(face_data)

        # SHARP EDGES
        sharp_edges = []
        for edge in mesh.edges:
            if edge.use_edge_sharp:
                sharp_edges.append(edge.vertices[:])

        # BUILD FACE SECTION
        faces_container = _SectionData("Faces")
        faces_container.props.append(("StreamCount", len(faces[0][2])))
        for face_i, face_data in enumerate(faces):
            face_container = _SectionData("Face")
            face_container.props.append(("Index", face_i))
            face_container.props.append(("Material", face_data[0]))
            face_container.props.append(("Indices", face_data[1]))
            for stream in face_data[2]:
                if stream[0] in stream_names:
                    face_container.sections.append(_pix_container.make_vertex_stream(stream, stream_names[stream[0]]))
                else:
                    face_container.sections.append(_pix_container.make_vertex_stream(stream))
            faces_container.sections.append(face_container)

        # BUILD PIECE SECTION
        piece_section = _SectionData("Piece")
        piece_section.props.append(("Index", piece_index))
        global_vertex_count += len(stream_pos[1])
        piece_section.props.append(("VertexCount", len(stream_pos[1])))
        global_face_count += face_cnt
        piece_section.props.append(("FaceCount", face_cnt))
        global_edge_count += len(sharp_edges)
        piece_section.props.append(("EdgeCount", len(sharp_edges)))
        piece_section.props.append(("StreamCount", vertex_stream_count))
        piece_section.props.append(("", ""))
        # vertex streams...
        for stream in vertex_streams:
            if len(stream) == 3:
                # print('\nstream:\n  %s' % str(stream))
                piece_section.sections.append(_pix_container.make_vertex_stream(stream[1:], stream[0]))
            else:
                piece_section.sections.append(_pix_container.make_vertex_stream(stream))
                # faces...
        piece_section.sections.append(faces_container)

        # BUILD AND STORE EDGE SECTION
        if sharp_edges:
            edges_container = _SectionData("Edges")
            for edge in sharp_edges:
                edges_container.data.append(edge)
            piece_section.sections.append(edges_container)

        # STORE PIECE SECTION
        piece_sections.append(piece_section)  # add a piece
    return piece_sections, global_vertex_count, global_face_count, global_edge_count, piece_index_obj, skin_list, skin_weights_cnt, skin_clones_cnt
예제 #8
0
def _fill_piece_sections_5(root_object, object_list, bone_list, scene, used_materials, offset_matrix, scs_globals):
    """Fills up "Piece" sections for file version 5.

    :param root_object: SCS Root Object
    :type root_object: bpy.types.Object
    :param object_list: Object for export
    :type object_list: list of Objects
    :param bone_list: Bones for export
    :type bone_list: list
    :param scene: Blender Scene to operate on
    :type scene: bpy.types.Scene
    :param used_materials: All Materials used in 'SCS Game Object'
    :type used_materials: list
    :param offset_matrix: Matrix for specifying of pivot point
    :type offset_matrix: Matrix
    :param scs_globals: SCS Tools Globals
    :type scs_globals: GlobalSCSProps
    :return: list of parameters
    :rtype: list
    """

    handle_unused_arg(__file__, _fill_piece_sections_5.__name__, "used_materials", used_materials)

    apply_modifiers = scs_globals.apply_modifiers
    exclude_edgesplit = scs_globals.exclude_edgesplit
    include_edgesplit = scs_globals.include_edgesplit
    piece_sections = []
    piece_index_obj = {}
    piece_index = global_vertex_count = global_face_count = 0

    # ----- START: SOLVE THIS!
    skin_list = []
    skin_weights_cnt = skin_clones_cnt = 0
    # -----   END: SOLVE THIS!

    print_info = False

    if print_info:
        print('used_materials: %s\n' % str(used_materials))
        print('object_list: %s\n' % str(object_list))

    # For each object...
    for obj_i, obj in enumerate(object_list):
        piece_index_obj[obj_i] = obj
        # Get all Materials from Object as they're set in material slots...
        object_materials = _object_utils.get_object_materials(obj)
        if print_info:
            print('  object_materials: %s' % str(object_materials))

        # Make Material dictionary (needed for getting rid of Material duplicities)...
        material_dict = _make_material_dict(object_materials)
        if print_info:
            for item in material_dict:
                print('    "%s" = %s' % (str(item), str(material_dict[item])))
            print('')

        # Get Object's Mesh data and list of temporarily disabled "Edge Split" Modifiers...
        mesh = _object_utils.get_mesh(obj)

        # SORT GEOMETRY
        piece_dict, skin_list, skin_weights_cnt, skin_clones_cnt = _get_geometry_dict(root_object,
                                                                                      obj,
                                                                                      mesh,
                                                                                      offset_matrix,
                                                                                      material_dict,
                                                                                      used_materials,
                                                                                      bone_list,
                                                                                      scs_globals)

        # DUMP
        if 0:
            for piece in piece_dict:
                print('Pm: %r' % piece)
                for data in piece_dict[piece]:
                    print('  Da: %r' % str(data))
                    if data == 'hash_dict':
                        if len(piece_dict[piece][data]) > 0:
                            for val in piece_dict[piece][data]:
                                print('    HD: %s:%s' % (str(val), str(piece_dict[piece][data][val])))
                        else:
                            print('    NO "hash_dict" Data!')
                    elif data == 'verts':
                        if len(piece_dict[piece][data]) > 0:
                            for val in piece_dict[piece][data]:
                                print('    Ve: %s' % str(val))
                        else:
                            print('    NO "verts" Data!')
                    elif data == 'faces':
                        if len(piece_dict[piece][data]) > 0:
                            for val in piece_dict[piece][data]:
                                print('    Fa: %s' % str(val))
                        else:
                            print('    NO "faces" Data!')
                print('')
            print('')

        # CREATE SECTIONS
        for piece in piece_dict:
            vertex_count = len(piece_dict[piece]['verts'])
            global_vertex_count += vertex_count
            face_count = len(piece_dict[piece]['faces'])
            global_face_count += face_count
            stream_sections = []

            # VERTEX STREAMS
            verts_data = {}
            for val in piece_dict[piece]['verts']:
                facevert_common_data, facevert_unique_data = val
                for data_key in facevert_common_data:
                    # print(' data_key: %s' % str(data_key))
                    if data_key == '_VG':
                        pass
                    else:
                        if data_key not in verts_data:
                            verts_data[data_key] = []
                        verts_data[data_key].append(facevert_common_data[data_key])
                for data_key in facevert_unique_data:
                    # print(' data_key: %s' % str(data_key))
                    if data_key == '_UV':
                        for layer_i, layer in enumerate(facevert_unique_data[data_key]):
                            layer_data_key = str(data_key + str(layer_i))
                            if layer_data_key not in verts_data:
                                verts_data[layer_data_key] = []
                            verts_data[layer_data_key].append(facevert_unique_data[data_key][layer])
                    if data_key == '_RGBA':
                        for layer_i, layer in enumerate(facevert_unique_data[data_key]):
                            if len(facevert_unique_data[data_key]) > 1:
                                layer_data_key = str(data_key + str(layer_i))
                            else:
                                layer_data_key = data_key
                            if layer_data_key not in verts_data:
                                verts_data[layer_data_key] = []
                            verts_data[layer_data_key].append(facevert_unique_data[data_key][layer])
            lprint('S verts_data: %s', (str(verts_data),))

            data_types = (
                '_POSITION', '_NORMAL', '_UV', '_UV0', '_UV1', '_UV2', '_UV3', '_UV4', '_UV5', '_UV6', '_UV7', '_UV8', '_UV9', '_RGBA', '_RGBA0',
                '_RGBA1', '_RGBA2', '_RGBA3', '_RGBA4', '_RGBA5', '_RGBA6', '_RGBA7', '_RGBA8', '_RGBA9', '_VG')
            add_uv = True
            add_rgba = False
            for data_type in data_types:
                if '_RGBA' not in verts_data:
                    add_rgba = True
                if data_type in verts_data:
                    if data_type.startswith('_UV'):
                        add_uv = False
                        stream_sections.append(_pix_container.make_stream_section(verts_data[data_type], data_type, (data_type,)))
                    else:
                        stream_sections.append(_pix_container.make_stream_section(verts_data[data_type], data_type, ()))

            # ADD DEFAULT UV LAYER
            if add_uv:
                lprint('I Adding default UV layer.')
                uv_dummy_data = []
                for vert in range(len(piece_dict[piece]['verts'])):
                    uv_dummy_data.append(Vector((0.0, 0.0)))
                stream_sections.append(_pix_container.make_stream_section(uv_dummy_data, '_UV0', ('_UV0',)))

            # ADD DEFAULT RGBA LAYER
            if add_rgba:
                lprint('I Adding default RGBA (vertex color) layer.')
                rgba_dummy_data = []
                for vert in range(len(piece_dict[piece]['verts'])):
                    rgba_dummy_data.append(Vector((1.0, 1.0, 1.0, 1.0)))
                stream_sections.append(_pix_container.make_stream_section(rgba_dummy_data, '_RGBA', ()))

            # PIECE PROPERTIES
            piece_section = _SectionData("Piece")
            piece_section.props.append(("Index", piece_index))
            piece_section.props.append(("Material", piece_dict[piece]['material_index']))
            piece_section.props.append(("VertexCount", vertex_count))
            piece_section.props.append(("TriangleCount", face_count))
            piece_section.props.append(("StreamCount", len(stream_sections)))
            piece_section.props.append(("", ""))
            piece_index += 1
            for stream_section in stream_sections:
                piece_section.sections.append(stream_section)

            # FACE STREAM
            stream_section = _SectionData("Triangles")
            stream_section.data = piece_dict[piece]['faces']
            piece_section.sections.append(stream_section)

            piece_sections.append(piece_section)

    return piece_sections, global_vertex_count, global_face_count, piece_index_obj, skin_list, skin_weights_cnt, skin_clones_cnt