def finalize(self):
        self.armatures.save()
        self.materials.save()
        try:
            path_reference_copy(self.copy_set, self.__copy_report)
        except PermissionError:
            self.warning('ERROR: While copying textures: %s' % self.copy_set, "textures")

        for key, value in self.copy_set:
            try:
                size = os.path.getsize(value)
            except FileNotFoundError:
                size = 0
            self.stats.textures.append({
                "name": os.path.basename(value),
                "size": size

            })
Exemple #2
0
    def add_collada_image(self, img):

        #--- Add image to collada.images
        if img.name not in self.collada.images:

            #--- Copy image to a subdir in the export dir
            if self.copy_images:

                # Image source path and basename
                img_sourcepath = bpy.path.abspath(img.filepath)
                img_basename = bpy.path.basename(img_sourcepath)

                # Copy image to subdir in export dir
                export_filename = bpy.path.display_name_from_filepath(
                    self.export_path)
                subdir = export_filename + "_textures"

                img_destpath = os.path.join(self.export_dir, subdir,
                                            img_basename)
                copy_set = {(img_sourcepath, img_destpath)}
                io_utils.path_reference_copy(copy_set)

                # Relative path to the copied image
                path = io_utils.path_reference(img_destpath, self.source_dir,
                                               self.export_dir, 'RELATIVE')

            #--- Reference image where it is
            else:
                if self.use_relative_path:
                    path_mode = 'RELATIVE'
                else:
                    path_mode = 'ABSOLUTE'
                path = io_utils.path_reference(img.filepath, self.source_dir,
                                               self.export_dir, path_mode)

            #--- Create and add CImage object
            c_img = material.CImage(img.name, path)
            self.collada.images.append(c_img)

        #--- Image already added to collada.images
        else:
            c_img = self.collada.images[img.name]

        return c_img
 def add_collada_image(self, img):
     
     #--- Add image to collada.images
     if img.name not in self.collada.images:
         
         #--- Copy image to a subdir in the export dir
         if self.copy_images:
             
             # Image source path and basename
             img_sourcepath = bpy.path.abspath(img.filepath)
             img_basename = bpy.path.basename(img_sourcepath)
             
             # Copy image to subdir in export dir
             export_filename = bpy.path.display_name_from_filepath(self.export_path)
             #subdir = export_filename + "_textures"
             subdir = "textures"
             
             img_destpath = os.path.join(self.export_dir, subdir, img_basename)
             copy_set = {(img_sourcepath, img_destpath)}
             io_utils.path_reference_copy(copy_set)
             
             # Relative path to the copied image
             path = io_utils.path_reference(img_destpath, self.source_dir, self.export_dir, 'RELATIVE')
             
         #--- Reference image where it is
         else:
             if self.use_relative_path:
                 path_mode = 'RELATIVE'
             else:   
                 path_mode = 'ABSOLUTE'
             path = io_utils.path_reference(img.filepath, self.source_dir, self.export_dir, path_mode)
         
         #--- Create and add CImage object
         c_img = material.CImage(img.name, path)
         self.collada.images.append(c_img)
         
     #--- Image already added to collada.images
     else:
         c_img = self.collada.images[img.name]
     
     return c_img
Exemple #4
0
def write_file(filepath, objects, depsgraph, scene,
               EXPORT_TRI=False,
               EXPORT_EDGES=False,
               EXPORT_SMOOTH_GROUPS=False,
               EXPORT_SMOOTH_GROUPS_BITFLAGS=False,
               EXPORT_NORMALS=False,
               EXPORT_UV=True,
               EXPORT_MTL=True,
               EXPORT_APPLY_MODIFIERS=True,
               EXPORT_APPLY_MODIFIERS_RENDER=False,
               EXPORT_BLEN_OBS=True,
               EXPORT_GROUP_BY_OB=False,
               EXPORT_GROUP_BY_MAT=False,
               EXPORT_KEEP_VERT_ORDER=False,
               EXPORT_POLYGROUPS=False,
               EXPORT_CURVE_AS_NURBS=True,
               EXPORT_GLOBAL_MATRIX=None,
               EXPORT_PATH_MODE='AUTO',
               progress=ProgressReport(),
               ):
    """
    Basic write function. The context and options must be already set
    This can be accessed externaly
    eg.
    write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
    """
    if EXPORT_GLOBAL_MATRIX is None:
        EXPORT_GLOBAL_MATRIX = Matrix()

    def veckey3d(v):
        return round(v.x, 4), round(v.y, 4), round(v.z, 4)

    def veckey2d(v):
        return round(v[0], 4), round(v[1], 4)

    def findVertexGroupName(face, vWeightMap):
        """
        Searches the vertexDict to see what groups is assigned to a given face.
        We use a frequency system in order to sort out the name because a given vertex can
        belong to two or more groups at the same time. To find the right name for the face
        we list all the possible vertex group names with their frequency and then sort by
        frequency in descend order. The top element is the one shared by the highest number
        of vertices is the face's group
        """
        weightDict = {}
        for vert_index in face.vertices:
            vWeights = vWeightMap[vert_index]
            for vGroupName, weight in vWeights:
                weightDict[vGroupName] = weightDict.get(vGroupName, 0.0) + weight

        if weightDict:
            return max((weight, vGroupName) for vGroupName, weight in weightDict.items())[1]
        else:
            return '(null)'

    with ProgressReportSubstep(progress, 2, "OBJ Export path: %r" % filepath, "OBJ Export Finished") as subprogress1:
        with open(filepath, "w", encoding="utf8", newline="\n") as f:
            fw = f.write

            # Write Header
            fw('# Blender v%s OBJ File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
            fw('# www.blender.org\n')

            # Tell the obj file what material file to use.
            if EXPORT_MTL:
                mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
                # filepath can contain non utf8 chars, use repr
                fw('mtllib %s\n' % repr(os.path.basename(mtlfilepath))[1:-1])

            # Initialize totals, these are updated each object
            totverts = totuvco = totno = 1

            face_vert_index = 1

            # A Dict of Materials
            # (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
            mtl_dict = {}
            # Used to reduce the usage of matname_texname materials, which can become annoying in case of
            # repeated exports/imports, yet keeping unique mat names per keys!
            # mtl_name: (material.name, image.name)
            mtl_rev_dict = {}

            copy_set = set()

            # Get all meshes
            subprogress1.enter_substeps(len(objects))
            for i, ob_main in enumerate(objects):
                # ignore dupli children
                if ob_main.parent and ob_main.parent.instance_type in {'VERTS', 'FACES'}:
                    subprogress1.step("Ignoring %s, dupli child..." % ob_main.name)
                    continue

                obs = [(ob_main, ob_main.matrix_world)]
                if ob_main.is_instancer:
                    obs += [(dup.instance_object.original, dup.matrix_world.copy())
                            for dup in depsgraph.object_instances
                            if dup.parent and dup.parent.original == ob_main]
                    # ~ print(ob_main.name, 'has', len(obs) - 1, 'dupli children')

                subprogress1.enter_substeps(len(obs))
                for ob, ob_mat in obs:
                    with ProgressReportSubstep(subprogress1, 6) as subprogress2:
                        uv_unique_count = no_unique_count = 0

                        # Nurbs curve support
                        if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
                            ob_mat = EXPORT_GLOBAL_MATRIX @ ob_mat
                            totverts += write_nurb(fw, ob, ob_mat)
                            continue
                        # END NURBS

                        ob_for_convert = ob.evaluated_get(depsgraph) if EXPORT_APPLY_MODIFIERS else ob.original

                        try:
                            me = ob_for_convert.to_mesh()
                        except RuntimeError:
                            me = None

                        if me is None:
                            continue

                        # _must_ do this before applying transformation, else tessellation may differ
                        if EXPORT_TRI:
                            # _must_ do this first since it re-allocs arrays
                            mesh_triangulate(me)

                        me.transform(EXPORT_GLOBAL_MATRIX @ ob_mat)
                        # If negative scaling, we have to invert the normals...
                        if ob_mat.determinant() < 0.0:
                            me.flip_normals()

                        if EXPORT_UV:
                            faceuv = len(me.uv_layers) > 0
                            if faceuv:
                                uv_layer = me.uv_layers.active.data[:]
                        else:
                            faceuv = False

                        me_verts = me.vertices[:]

                        # Make our own list so it can be sorted to reduce context switching
                        face_index_pairs = [(face, index) for index, face in enumerate(me.polygons)]

                        if EXPORT_EDGES:
                            edges = me.edges
                        else:
                            edges = []

                        if not (len(face_index_pairs) + len(edges) + len(me.vertices)):  # Make sure there is something to write
                            # clean up
                            ob_for_convert.to_mesh_clear()
                            continue  # dont bother with this mesh.

                        if EXPORT_NORMALS and face_index_pairs:
                            me.calc_normals_split()
                            # No need to call me.free_normals_split later, as this mesh is deleted anyway!

                        loops = me.loops

                        if (EXPORT_SMOOTH_GROUPS or EXPORT_SMOOTH_GROUPS_BITFLAGS) and face_index_pairs:
                            smooth_groups, smooth_groups_tot = me.calc_smooth_groups(use_bitflags=EXPORT_SMOOTH_GROUPS_BITFLAGS)
                            if smooth_groups_tot <= 1:
                                smooth_groups, smooth_groups_tot = (), 0
                        else:
                            smooth_groups, smooth_groups_tot = (), 0

                        materials = me.materials[:]
                        material_names = [m.name if m else None for m in materials]

                        # avoid bad index errors
                        if not materials:
                            materials = [None]
                            material_names = [name_compat(None)]

                        # Sort by Material, then images
                        # so we dont over context switch in the obj file.
                        if EXPORT_KEEP_VERT_ORDER:
                            pass
                        else:
                            if len(materials) > 1:
                                if smooth_groups:
                                    sort_func = lambda a: (a[0].material_index,
                                                           smooth_groups[a[1]] if a[0].use_smooth else False)
                                else:
                                    sort_func = lambda a: (a[0].material_index,
                                                           a[0].use_smooth)
                            else:
                                # no materials
                                if smooth_groups:
                                    sort_func = lambda a: smooth_groups[a[1] if a[0].use_smooth else False]
                                else:
                                    sort_func = lambda a: a[0].use_smooth

                            face_index_pairs.sort(key=sort_func)

                            del sort_func

                        # Set the default mat to no material and no image.
                        contextMat = 0, 0  # Can never be this, so we will label a new material the first chance we get.
                        contextSmooth = None  # Will either be true or false,  set bad to force initialization switch.

                        if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
                            name1 = ob.name
                            name2 = ob.data.name
                            if name1 == name2:
                                obnamestring = name_compat(name1)
                            else:
                                obnamestring = '%s_%s' % (name_compat(name1), name_compat(name2))

                            if EXPORT_BLEN_OBS:
                                fw('o %s\n' % obnamestring)  # Write Object name
                            else:  # if EXPORT_GROUP_BY_OB:
                                fw('g %s\n' % obnamestring)

                        subprogress2.step()

                        # Vert
                        for v in me_verts:
                            fw('v %.6f %.6f %.6f\n' % v.co[:])

                        subprogress2.step()

                        # UV
                        if faceuv:
                            # in case removing some of these dont get defined.
                            uv = f_index = uv_index = uv_key = uv_val = uv_ls = None

                            uv_face_mapping = [None] * len(face_index_pairs)

                            uv_dict = {}
                            uv_get = uv_dict.get
                            for f, f_index in face_index_pairs:
                                uv_ls = uv_face_mapping[f_index] = []
                                for uv_index, l_index in enumerate(f.loop_indices):
                                    uv = uv_layer[l_index].uv
                                    # include the vertex index in the key so we don't share UV's between vertices,
                                    # allowed by the OBJ spec but can cause issues for other importers, see: T47010.

                                    # this works too, shared UV's for all verts
                                    #~ uv_key = veckey2d(uv)
                                    uv_key = loops[l_index].vertex_index, veckey2d(uv)

                                    uv_val = uv_get(uv_key)
                                    if uv_val is None:
                                        uv_val = uv_dict[uv_key] = uv_unique_count
                                        fw('vt %.6f %.6f\n' % uv[:])
                                        uv_unique_count += 1
                                    uv_ls.append(uv_val)

                            del uv_dict, uv, f_index, uv_index, uv_ls, uv_get, uv_key, uv_val
                            # Only need uv_unique_count and uv_face_mapping

                        subprogress2.step()

                        # NORMAL, Smooth/Non smoothed.
                        if EXPORT_NORMALS:
                            no_key = no_val = None
                            normals_to_idx = {}
                            no_get = normals_to_idx.get
                            loops_to_normals = [0] * len(loops)
                            for f, f_index in face_index_pairs:
                                for l_idx in f.loop_indices:
                                    no_key = veckey3d(loops[l_idx].normal)
                                    no_val = no_get(no_key)
                                    if no_val is None:
                                        no_val = normals_to_idx[no_key] = no_unique_count
                                        fw('vn %.4f %.4f %.4f\n' % no_key)
                                        no_unique_count += 1
                                    loops_to_normals[l_idx] = no_val
                            del normals_to_idx, no_get, no_key, no_val
                        else:
                            loops_to_normals = []

                        subprogress2.step()

                        # XXX
                        if EXPORT_POLYGROUPS:
                            # Retrieve the list of vertex groups
                            vertGroupNames = ob.vertex_groups.keys()
                            if vertGroupNames:
                                currentVGroup = ''
                                # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
                                vgroupsMap = [[] for _i in range(len(me_verts))]
                                for v_idx, v_ls in enumerate(vgroupsMap):
                                    v_ls[:] = [(vertGroupNames[g.group], g.weight) for g in me_verts[v_idx].groups]

                        for f, f_index in face_index_pairs:
                            f_smooth = f.use_smooth
                            if f_smooth and smooth_groups:
                                f_smooth = smooth_groups[f_index]
                            f_mat = min(f.material_index, len(materials) - 1)

                            # MAKE KEY
                            key = material_names[f_mat], None  # No image, use None instead.

                            # Write the vertex group
                            if EXPORT_POLYGROUPS:
                                if vertGroupNames:
                                    # find what vertext group the face belongs to
                                    vgroup_of_face = findVertexGroupName(f, vgroupsMap)
                                    if vgroup_of_face != currentVGroup:
                                        currentVGroup = vgroup_of_face
                                        fw('g %s\n' % vgroup_of_face)

                            # CHECK FOR CONTEXT SWITCH
                            if key == contextMat:
                                pass  # Context already switched, dont do anything
                            else:
                                if key[0] is None and key[1] is None:
                                    # Write a null material, since we know the context has changed.
                                    if EXPORT_GROUP_BY_MAT:
                                        # can be mat_image or (null)
                                        fw("g %s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name)))
                                    if EXPORT_MTL:
                                        fw("usemtl (null)\n")  # mat, image

                                else:
                                    mat_data = mtl_dict.get(key)
                                    if not mat_data:
                                        # First add to global dict so we can export to mtl
                                        # Then write mtl

                                        # Make a new names from the mat and image name,
                                        # converting any spaces to underscores with name_compat.

                                        # If none image dont bother adding it to the name
                                        # Try to avoid as much as possible adding texname (or other things)
                                        # to the mtl name (see [#32102])...
                                        mtl_name = "%s" % name_compat(key[0])
                                        if mtl_rev_dict.get(mtl_name, None) not in {key, None}:
                                            if key[1] is None:
                                                tmp_ext = "_NONE"
                                            else:
                                                tmp_ext = "_%s" % name_compat(key[1])
                                            i = 0
                                            while mtl_rev_dict.get(mtl_name + tmp_ext, None) not in {key, None}:
                                                i += 1
                                                tmp_ext = "_%3d" % i
                                            mtl_name += tmp_ext
                                        mat_data = mtl_dict[key] = mtl_name, materials[f_mat]
                                        mtl_rev_dict[mtl_name] = key

                                    if EXPORT_GROUP_BY_MAT:
                                        # can be mat_image or (null)
                                        fw("g %s_%s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name), mat_data[0]))
                                    if EXPORT_MTL:
                                        fw("usemtl %s\n" % mat_data[0])  # can be mat_image or (null)

                            contextMat = key
                            if f_smooth != contextSmooth:
                                if f_smooth:  # on now off
                                    if smooth_groups:
                                        f_smooth = smooth_groups[f_index]
                                        fw('s %d\n' % f_smooth)
                                    else:
                                        fw('s 1\n')
                                else:  # was off now on
                                    fw('s off\n')
                                contextSmooth = f_smooth

                            f_v = [(vi, me_verts[v_idx], l_idx)
                                   for vi, (v_idx, l_idx) in enumerate(zip(f.vertices, f.loop_indices))]

                            fw('f')
                            if faceuv:
                                if EXPORT_NORMALS:
                                    for vi, v, li in f_v:
                                        fw(" %d/%d/%d" % (totverts + v.index,
                                                          totuvco + uv_face_mapping[f_index][vi],
                                                          totno + loops_to_normals[li],
                                                          ))  # vert, uv, normal
                                else:  # No Normals
                                    for vi, v, li in f_v:
                                        fw(" %d/%d" % (totverts + v.index,
                                                       totuvco + uv_face_mapping[f_index][vi],
                                                       ))  # vert, uv

                                face_vert_index += len(f_v)

                            else:  # No UV's
                                if EXPORT_NORMALS:
                                    for vi, v, li in f_v:
                                        fw(" %d//%d" % (totverts + v.index, totno + loops_to_normals[li]))
                                else:  # No Normals
                                    for vi, v, li in f_v:
                                        fw(" %d" % (totverts + v.index))

                            fw('\n')

                        subprogress2.step()

                        # Write edges.
                        if EXPORT_EDGES:
                            for ed in edges:
                                if ed.is_loose:
                                    fw('l %d %d\n' % (totverts + ed.vertices[0], totverts + ed.vertices[1]))

                        # Make the indices global rather then per mesh
                        totverts += len(me_verts)
                        totuvco += uv_unique_count
                        totno += no_unique_count

                        # clean up
                        ob_for_convert.to_mesh_clear()

                subprogress1.leave_substeps("Finished writing geometry of '%s'." % ob_main.name)
            subprogress1.leave_substeps()

        subprogress1.step("Finished exporting geometry, now exporting materials")

        # Now we have all our materials, save them
        if EXPORT_MTL:
            write_mtl(scene, mtlfilepath, EXPORT_PATH_MODE, copy_set, mtl_dict)

        # copy all collected files.
        io_utils.path_reference_copy(copy_set)
Exemple #5
0
def write_file(
        filepath,
        objects,
        depsgraph,
        scene,
        EXPORT_TRI=False,
        EXPORT_EDGES=False,
        EXPORT_SMOOTH_GROUPS=False,
        EXPORT_SMOOTH_GROUPS_BITFLAGS=False,
        EXPORT_NORMALS=False,
        EXPORT_UV=True,
        EXPORT_APPLY_MODIFIERS=True,
        EXPORT_APPLY_MODIFIERS_RENDER=False,
        EXPORT_GROUP_BY_OB=False,
        EXPORT_KEEP_VERT_ORDER=False,
        EXPORT_POLYGROUPS=False,
        EXPORT_GLOBAL_MATRIX=None,
        EXPORT_PATH_MODE='AUTO',
        progress=ProgressReport(),
):
    """
	Basic write function. The context and options must be already set
	This can be accessed externaly
	eg.
	write( 'c:\\test\\foobar.brk', Blender.Object.GetSelected() ) # Using default options.
	"""
    if EXPORT_GLOBAL_MATRIX is None:
        EXPORT_GLOBAL_MATRIX = Matrix()

    def veckey3d(v):
        return round(v.x, 4), round(v.y, 4), round(v.z, 4)

    def veckey2d(v):
        return round(v[0], 4), round(v[1], 4)

    def findVertexGroupName(face, vWeightMap):
        """
		Searches the vertexDict to see what groups is assigned to a given face.
		We use a frequency system in order to sort out the name because a given vertex can
		belong to two or more groups at the same time. To find the right name for the face
		we list all the possible vertex group names with their frequency and then sort by
		frequency in descend order. The top element is the one shared by the highest number
		of vertices is the face's group
		"""
        weightDict = {}
        for vert_index in face.vertices:
            vWeights = vWeightMap[vert_index]
            for vGroupName, weight in vWeights:
                weightDict[vGroupName] = weightDict.get(vGroupName,
                                                        0.0) + weight

        if weightDict:
            return max((weight, vGroupName)
                       for vGroupName, weight in weightDict.items())[1]
        else:
            return '(null)'

    with ProgressReportSubstep(progress, 2, "BRK Export path: %r" % filepath,
                               "BRK Export Finished") as subprogress1:
        with open(filepath, "w", encoding="utf8", newline="\n") as f:
            fw = f.write

            # Write Header
            fw('# BrickCAD v%s BRK File: %r\n' %
               (bpy.app.version_string, os.path.basename(bpy.data.filepath)))

            # Initialize totals, these are updated each object
            totverts = totuvco = totno = 1

            face_vert_index = 1

            copy_set = set()

            # Get all meshes
            subprogress1.enter_substeps(len(objects))
            for i, ob_main in enumerate(objects):
                # ignore dupli children
                if ob_main.parent and ob_main.parent.instance_type in {
                        'VERTS', 'FACES'
                }:
                    subprogress1.step("Ignoring %s, dupli child..." %
                                      ob_main.name)
                    continue

                obs = [(ob_main, ob_main.matrix_world)]
                if ob_main.is_instancer:
                    obs += [(dup.instance_object.original,
                             dup.matrix_world.copy())
                            for dup in depsgraph.object_instances
                            if dup.parent and dup.parent.original == ob_main]
                    # ~ print(ob_main.name, 'has', len(obs) - 1, 'dupli children')

                subprogress1.enter_substeps(len(obs))
                for ob, ob_mat in obs:
                    #print(ob.type)
                    with ProgressReportSubstep(subprogress1,
                                               6) as subprogress2:
                        uv_unique_count = no_unique_count = 0

                        ob_for_convert = ob.evaluated_get(
                            depsgraph
                        ) if EXPORT_APPLY_MODIFIERS else ob.original

                        try:
                            me = ob_for_convert.to_mesh()
                        except RuntimeError:
                            me = None

                        if me is None:
                            #object is an empty, this is going to be used to indicate the location of a stud
                            obnamestring = name_compat(ob.name)

                            #check if object has a parent
                            parent = ob.parent
                            #print("Parent: " + str(parent.name))

                            if parent is None:
                                fw('st %s %.6f %.6f %.6f\n' %
                                   (obnamestring,
                                    ob.matrix_world.translation[0],
                                    ob.matrix_world.translation[1],
                                    ob.matrix_world.translation[2])
                                   )  # Write Object name and location
                            else:
                                fw(
                                    'st %s %.6f %.6f %.6f p %s\n' %
                                    (obnamestring,
                                     ob.matrix_world.translation[0],
                                     ob.matrix_world.translation[1],
                                     ob.matrix_world.translation[2],
                                     name_compat(parent.name))
                                )  # Write Object name, location, and parent name
                            continue

                        # _must_ do this before applying transformation, else tessellation may differ
                        if EXPORT_TRI:
                            # _must_ do this first since it re-allocs arrays
                            mesh_triangulate(me)

                        me.transform(EXPORT_GLOBAL_MATRIX @ ob_mat)
                        # If negative scaling, we have to invert the normals...
                        if ob_mat.determinant() < 0.0:
                            me.flip_normals()

                        if EXPORT_UV:
                            faceuv = len(me.uv_layers) > 0
                            if faceuv:
                                uv_layer = me.uv_layers.active.data[:]
                        else:
                            faceuv = False

                        me_verts = me.vertices[:]

                        # Make our own list so it can be sorted to reduce context switching
                        face_index_pairs = [
                            (face, index)
                            for index, face in enumerate(me.polygons)
                        ]

                        if EXPORT_EDGES:
                            edges = me.edges
                        else:
                            edges = []

                        if not (len(face_index_pairs) + len(edges) +
                                len(me.vertices)
                                ):  # Make sure there is something to write
                            # clean up
                            bpy.data.meshes.remove(me)
                            continue  # dont bother with this mesh.

                        if EXPORT_NORMALS and face_index_pairs:
                            me.calc_normals_split()
                            # No need to call me.free_normals_split later, as this mesh is deleted anyway!

                        loops = me.loops

                        if (EXPORT_SMOOTH_GROUPS
                                or EXPORT_SMOOTH_GROUPS_BITFLAGS
                            ) and face_index_pairs:
                            smooth_groups, smooth_groups_tot = me.calc_smooth_groups(
                                use_bitflags=EXPORT_SMOOTH_GROUPS_BITFLAGS)
                            if smooth_groups_tot <= 1:
                                smooth_groups, smooth_groups_tot = (), 0
                        else:
                            smooth_groups, smooth_groups_tot = (), 0

                        contextSmooth = None  # Will either be true or false,  set bad to force initialization switch.

                        name = ob.name
                        obnamestring = name_compat(name)

                        fw('o %s\n' % obnamestring)  # Write Object name

                        subprogress2.step()

                        # Vert
                        for v in me_verts:
                            fw('v %.6f %.6f %.6f\n' % v.co[:])

                        subprogress2.step()

                        # UV
                        if faceuv:
                            # in case removing some of these dont get defined.
                            uv = f_index = uv_index = uv_key = uv_val = uv_ls = None

                            uv_face_mapping = [None] * len(face_index_pairs)

                            uv_dict = {}
                            uv_get = uv_dict.get
                            for f, f_index in face_index_pairs:
                                uv_ls = uv_face_mapping[f_index] = []
                                for uv_index, l_index in enumerate(
                                        f.loop_indices):
                                    uv = uv_layer[l_index].uv
                                    # include the vertex index in the key so we don't share UV's between vertices,
                                    # allowed by the OBJ spec but can cause issues for other importers, see: T47010.

                                    # this works too, shared UV's for all verts
                                    #~ uv_key = veckey2d(uv)
                                    uv_key = loops[
                                        l_index].vertex_index, veckey2d(uv)

                                    uv_val = uv_get(uv_key)
                                    if uv_val is None:
                                        uv_val = uv_dict[
                                            uv_key] = uv_unique_count
                                        fw('vt %.6f %.6f\n' % uv[:])
                                        uv_unique_count += 1
                                    uv_ls.append(uv_val)

                            del uv_dict, uv, f_index, uv_index, uv_ls, uv_get, uv_key, uv_val
                            # Only need uv_unique_count and uv_face_mapping

                        subprogress2.step()

                        # NORMAL, Smooth/Non smoothed.
                        if EXPORT_NORMALS:
                            no_key = no_val = None
                            normals_to_idx = {}
                            no_get = normals_to_idx.get
                            loops_to_normals = [0] * len(loops)
                            for f, f_index in face_index_pairs:
                                for l_idx in f.loop_indices:
                                    no_key = veckey3d(loops[l_idx].normal)
                                    no_val = no_get(no_key)
                                    if no_val is None:
                                        no_val = normals_to_idx[
                                            no_key] = no_unique_count
                                        fw('vn %.4f %.4f %.4f\n' % no_key)
                                        no_unique_count += 1
                                    loops_to_normals[l_idx] = no_val
                            del normals_to_idx, no_get, no_key, no_val
                        else:
                            loops_to_normals = []

                        subprogress2.step()

                        # XXX
                        if EXPORT_POLYGROUPS:
                            # Retrieve the list of vertex groups
                            vertGroupNames = ob.vertex_groups.keys()
                            if vertGroupNames:
                                currentVGroup = ''
                                # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
                                vgroupsMap = [[]
                                              for _i in range(len(me_verts))]
                                for v_idx, v_ls in enumerate(vgroupsMap):
                                    v_ls[:] = [(vertGroupNames[g.group],
                                                g.weight)
                                               for g in me_verts[v_idx].groups]

                        for f, f_index in face_index_pairs:
                            f_smooth = f.use_smooth
                            if f_smooth and smooth_groups:
                                f_smooth = smooth_groups[f_index]

                            # Write the vertex group
                            if EXPORT_POLYGROUPS:
                                if vertGroupNames:
                                    # find what vertext group the face belongs to
                                    vgroup_of_face = findVertexGroupName(
                                        f, vgroupsMap)
                                    if vgroup_of_face != currentVGroup:
                                        currentVGroup = vgroup_of_face
                                        fw('g %s\n' % vgroup_of_face)

                            if f_smooth != contextSmooth:
                                if f_smooth:  # on now off
                                    if smooth_groups:
                                        f_smooth = smooth_groups[f_index]
                                        fw('s %d\n' % f_smooth)
                                    else:
                                        fw('s 1\n')
                                else:  # was off now on
                                    fw('s off\n')
                                contextSmooth = f_smooth

                            f_v = [(vi, me_verts[v_idx], l_idx)
                                   for vi, (v_idx, l_idx) in enumerate(
                                       zip(f.vertices, f.loop_indices))]

                            fw('f')
                            if faceuv:
                                if EXPORT_NORMALS:
                                    for vi, v, li in f_v:
                                        fw(" %d/%d/%d" % (
                                            totverts + v.index,
                                            totuvco +
                                            uv_face_mapping[f_index][vi],
                                            totno + loops_to_normals[li],
                                        ))  # vert, uv, normal
                                else:  # No Normals
                                    for vi, v, li in f_v:
                                        fw(" %d/%d" % (
                                            totverts + v.index,
                                            totuvco +
                                            uv_face_mapping[f_index][vi],
                                        ))  # vert, uv

                                face_vert_index += len(f_v)

                            else:  # No UV's
                                if EXPORT_NORMALS:
                                    for vi, v, li in f_v:
                                        fw(" %d//%d" %
                                           (totverts + v.index,
                                            totno + loops_to_normals[li]))
                                else:  # No Normals
                                    for vi, v, li in f_v:
                                        fw(" %d" % (totverts + v.index))

                            fw('\n')

                        subprogress2.step()

                        # Write edges.
                        if EXPORT_EDGES:
                            for ed in edges:
                                if ed.is_loose:
                                    fw('l %d %d\n' %
                                       (totverts + ed.vertices[0],
                                        totverts + ed.vertices[1]))

                        # Make the indices global rather then per mesh
                        totverts += len(me_verts)
                        totuvco += uv_unique_count
                        totno += no_unique_count

                        # clean up
                        ob_for_convert.to_mesh_clear()

                subprogress1.leave_substeps(
                    "Finished writing geometry of '%s'." % ob_main.name)
            subprogress1.leave_substeps()

        # copy all collected files.
        io_utils.path_reference_copy(copy_set)
Exemple #6
0
def write_file(
        filepath,
        objects,
        depsgraph,
        scene,
        EXPORT_TRI=False,
        EXPORT_EDGES=False,
        EXPORT_SMOOTH_GROUPS=False,
        EXPORT_SMOOTH_GROUPS_BITFLAGS=False,
        EXPORT_NORMALS=False,
        EXPORT_UV=True,
        EXPORT_MTL=True,
        EXPORT_APPLY_MODIFIERS=True,
        EXPORT_APPLY_MODIFIERS_RENDER=False,
        EXPORT_BLEN_OBS=True,
        EXPORT_GROUP_BY_OB=False,
        EXPORT_GROUP_BY_MAT=False,
        EXPORT_KEEP_VERT_ORDER=False,
        EXPORT_POLYGROUPS=False,
        EXPORT_CURVE_AS_NURBS=True,
        EXPORT_GLOBAL_MATRIX=None,
        EXPORT_PATH_MODE='AUTO',
        progress=ProgressReport(),
        EXPORT_VERTEX_COLORS=True,
):
    """
    Basic write function. The context and options must be already set
    This can be accessed externaly
    eg.
    write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
    """
    if EXPORT_GLOBAL_MATRIX is None:
        EXPORT_GLOBAL_MATRIX = Matrix()

    with ProgressReportSubstep(progress, 2, "WC1 Export path: %r" % filepath,
                               "WC1 Export Finished") as subprogress1:
        with open(filepath, "wb") as fhnd:
            fw = fhnd.write

            # Write placeholder Header
            fw(get_binary_u64(0))

            # Initialize totals, these are updated each object
            totverts = totmeshes = 0

            face_vert_index = 1

            copy_set = set()

            # Get all meshes
            subprogress1.enter_substeps(len(objects))
            for i, ob_main in enumerate(objects):

                # ignore dupli children
                if ob_main.parent and ob_main.parent.instance_type in {
                        'VERTS', 'FACES'
                }:
                    subprogress1.step("Ignoring %s, dupli child..." %
                                      ob_main.name)
                    continue

                obs = [(ob_main, ob_main.matrix_world)]
                if ob_main.is_instancer:
                    obs += [(dup.instance_object.original,
                             dup.matrix_world.copy())
                            for dup in depsgraph.object_instances
                            if dup.parent and dup.parent.original == ob_main]
                    # ~ print(ob_main.name, 'has', len(obs) - 1, 'dupli children')

                subprogress1.enter_substeps(len(obs))

                # write Header
                end_pos = fhnd.tell()
                fhnd.seek(0)
                fw(get_binary_u64(len(obs)))
                fhnd.seek(end_pos)

                for ob, ob_mat in obs:

                    with ProgressReportSubstep(subprogress1,
                                               5) as subprogress2:
                        object_pos = fhnd.tell()

                        # Write placeholder Object Header
                        ## vertex count
                        fw(get_binary_u64(0))
                        ## index count
                        fw(get_binary_u64(0))

                        ## flags
                        flags = 0
                        if EXPORT_VERTEX_COLORS:
                            flags |= 1 << 0
                        fw(get_binary_u64(flags))

                        ob_for_convert = ob.evaluated_get(
                            depsgraph
                        ) if EXPORT_APPLY_MODIFIERS else ob.original

                        try:
                            me = ob_for_convert.to_mesh()
                        except RuntimeError:
                            me = None

                        if me is None:
                            continue

                        # _must_ do this before applying transformation, else tessellation may differ
                        if EXPORT_TRI:
                            # _must_ do this first since it re-allocs arrays
                            mesh_triangulate(me)

                        me.transform(EXPORT_GLOBAL_MATRIX @ ob_mat)
                        # If negative scaling, we have to invert the normals...
                        if ob_mat.determinant() < 0.0:
                            me.flip_normals()

                        me_verts = me.vertices[:]
                        me_cols = me.vertex_colors[:]

                        # Make our own list so it can be sorted to reduce context switching
                        face_index_pairs = [
                            (face, index)
                            for index, face in enumerate(me.polygons)
                        ]

                        if not (len(face_index_pairs) + len(me.vertices)
                                ):  # Make sure there is something to write
                            # clean up
                            bpy.data.meshes.remove(me)
                            continue  # dont bother with this mesh.

                        subprogress2.step()

                        # Vert
                        for v in me_verts:
                            fw(get_binary_f64(v.co[0]))
                            fw(get_binary_f64(-v.co[1]))
                            fw(get_binary_f64(v.co[2]))

                        subprogress2.step()

                        for col_layer in me_cols:
                            for col in col_layer.data:
                                fw(get_binary_f64(col.color[0]))
                                fw(get_binary_f64(col.color[1]))
                                fw(get_binary_f64(col.color[2]))
                                fw(get_binary_f64(col.color[3]))

                        subprogress2.step()

                        obj_indices = 0
                        for f, f_index in face_index_pairs:

                            f_v = [(vi, me_verts[v_idx], l_idx)
                                   for vi, (v_idx, l_idx) in enumerate(
                                       zip(f.vertices, f.loop_indices))]

                            for vi, v, li in f_v:
                                fw(get_binary_u64(totverts + v.index))
                                obj_indices += 1

                        subprogress2.step()

                        # Make the indices global rather then per mesh
                        totverts += len(me_verts)

                        # write object header
                        end_pos = fhnd.tell()
                        fhnd.seek(object_pos)
                        fw(get_binary_u64(len(me_verts)))
                        fw(get_binary_u64(obj_indices))
                        fhnd.seek(end_pos)

                        # clean up
                        ob_for_convert.to_mesh_clear()

                subprogress1.leave_substeps(
                    "Finished writing geometry of '%s'." % ob_main.name)

            subprogress1.leave_substeps()

        subprogress1.step(
            "Finished exporting geometry, now exporting materials")

        # copy all collected files.
        io_utils.path_reference_copy(copy_set)
def blender_to_skm(mesh, rig, WRITE_MDF):
    skm_data = SkmFile()

    contextMaterial = None
    context_mat_wrap = None
    contextMatrix_rot = None

    contextObName = "ToEE Model"
    rigObName = "ToEE Rig"
    armatureName = "ToEE Model Skeleton"

    TEXTURE_DICT = {}
    MATDICT = {}
    WRAPDICT = {}
    copy_set = set()  # set of files to copy (texture images...)

    def mesh_to_skm_mesh(
        skm_data
    ):  # myContextMesh_vertls, myContextMesh_facels, myContextMeshMaterials):
        '''
        Creates Mesh Object from vertex/face/material data
        '''
        bmesh = bpy.data.meshes['ToEE Model']

        vertex_count = len(bmesh.vertices)
        face_count = len(bmesh.polygons)

        print("%d vertices, %d faces" % (vertex_count, face_count))

        # Create vertices
        for vtx in bmesh.vertices:
            skm_vtx = SkmVertex()
            skm_vtx.pos = vtx.co.to_tuple() + (0.0, )
            skm_vtx.normal = vtx.normal.to_tuple() + (0.0, )
            skm_data.vertex_data.append(skm_vtx)
        assert len(skm_data.vertex_data) == vertex_count

        # Create faces (Triangles). Note: face should be triangles only!
        for p in bmesh.polygons:
            loop_start = p.loop_start
            loop_total = p.loop_total
            assert loop_total == 3, "Faces must be triangles!"
            face = bmesh.loops[loop_start + 0].vertex_index, bmesh.loops[
                loop_start + 1].vertex_index, bmesh.loops[loop_start +
                                                          2].vertex_index
            skm_face = SkmFace()
            skm_face.vertex_ids = face
            skm_data.face_data.append(skm_face)
        assert len(skm_data.face_data) == face_count

        # Get UV coordinates for each polygon's vertices
        print("Setting UVs")
        uvl = bmesh.uv_layers[0].data[:]
        for fidx, fa in enumerate(skm_data.face_data):
            fa.material_id = bmesh.polygons[fidx].material_index

        for fidx, pl in enumerate(bmesh.polygons):
            face = skm_data.face_data[fidx]
            v1, v2, v3 = face.vertex_ids

            skm_data.vertex_data[v1].uv = uvl[pl.loop_start + 0].uv
            skm_data.vertex_data[v2].uv = uvl[pl.loop_start + 1].uv
            skm_data.vertex_data[v3].uv = uvl[pl.loop_start + 2].uv

    def rig_to_skm_bones(skm_data):
        '''
        Converts rig/armature objects to SKM Bones
        '''
        # Bones
        print("Getting bones")
        obj = bpy.data.objects[contextObName]
        barm = bpy.data.armatures[armatureName]
        rig = bpy.data.objects[rigObName]

        bpy.context.view_layer.objects.active = rig
        rig.select_set(True)
        bpy.ops.object.mode_set(
            mode='EDIT')  # set to Edit Mode so bones can be accessed

        bone_ids = {}

        for bone_id, bone in enumerate(barm.edit_bones):
            bone_name = bone.name
            bone_ids[bone_name] = bone_id

            skm_bone = SkmBone(Name=bone_name)

            if bone.parent is None:
                skm_bone.parent_id = -1
            else:
                skm_bone.parent_id = bone_ids[bone.parent.name]

            world = bone.matrix
            wi = world.inverted_safe()
            skm_bone.world_inverse = matrix4_to_3x4_array(wi)

            skm_data.bone_data.append(skm_bone)

        # Exit edit mode
        if bpy.ops.object.mode_set.poll():
            bpy.ops.object.mode_set(mode='OBJECT')

        for vidx, vtx in enumerate(obj.data.vertices):
            for i, vg in enumerate(vtx.groups):
                bone_id = vg.group
                bone_wt = vg.weight
                skm_data.vertex_data[vidx].attachment_bones.append(bone_id)
                skm_data.vertex_data[vidx].attachment_weights.append(bone_wt)
            if skm_data.vertex_data[vidx].attachment_count > 6:
                raise Exception(
                    f"Too many bone attachments for vertex {vidx}! Max is 6")

        return

    def material_to_skm_mat(mat_wrap, mdf_file_path):
        skm_mat = SkmMaterial(mdf_file_path)
        return skm_mat

    ## Create materials
    progress.enter_substeps(3, "Processing data...")
    progress.step("Processing Materials and images...")
    for mm in bpy.data.materials:  #skm_data.material_data:

        material_name = mm.name
        if not material_name.lower().endswith('mdf'):
            print('Skipping material whose name doesn\'t end with .mdf: %r' %
                  material_name)
            continue

        assert mm.use_nodes, "export_ska assumes use_nodes = True!"
        contextMaterial = mm

        mat_wrap = node_shader_utils.PrincipledBSDFWrapper(contextMaterial,
                                                           is_readonly=False)
        assert mat_wrap.use_nodes == True, "huh? no use_nodes in wrapper?"
        context_mat_wrap = mat_wrap

        print("Converting material to SKM format: %s" % material_name)

        skm_mat = material_to_skm_mat(mat_wrap, material_name)
        if WRITE_MDF:
            mat_to_mdf_file(mat_wrap, skm_mat.id)

        MATDICT[material_name] = contextMaterial
        WRAPDICT[contextMaterial] = context_mat_wrap

        skm_data.material_data.append(skm_mat)

    # Convert Mesh object
    progress.step("Processing Mesh...")
    mesh_to_skm_mesh(skm_data)

    # Create Rig
    progress.step("Processing Rig...")
    rig_to_skm_bones(skm_data)

    # copy all collected files.
    io_utils.path_reference_copy(copy_set)

    progress.leave_substeps("Finished SKM conversion.")
    return skm_data