def build_objects(object_layers, object_surfs, object_tags, object_name, add_subd_mod, skel_to_arm):
    '''Using the gathered data, create the objects.'''
    ob_dict= {}  # Used for the parenting setup.
    print("Adding %d Materials" % len(object_surfs))

    for surf_key in object_surfs:
        surf_data= object_surfs[surf_key]
        surf_data.bl_mat= bpy.data.materials.new(surf_data.name)
        surf_data.bl_mat.diffuse_color= (surf_data.colr[:])
        surf_data.bl_mat.diffuse_intensity= surf_data.diff
        surf_data.bl_mat.emit= surf_data.lumi
        surf_data.bl_mat.specular_intensity= surf_data.spec
        if surf_data.refl != 0.0:
            surf_data.bl_mat.raytrace_mirror.use= True
        surf_data.bl_mat.raytrace_mirror.reflect_factor= surf_data.refl
        surf_data.bl_mat.raytrace_mirror.gloss_factor= 1.0-surf_data.rblr
        if surf_data.tran != 0.0:
            surf_data.bl_mat.use_transparency= True
            surf_data.bl_mat.transparency_method= 'RAYTRACE'
        surf_data.bl_mat.alpha= 1.0 - surf_data.tran
        surf_data.bl_mat.raytrace_transparency.ior= surf_data.rind
        surf_data.bl_mat.raytrace_transparency.gloss_factor= 1.0 - surf_data.tblr
        surf_data.bl_mat.translucency= surf_data.trnl
        surf_data.bl_mat.specular_hardness= int(4*((10*surf_data.glos)*(10*surf_data.glos)))+4
        # The Gloss is as close as possible given the differences.

    # Single layer objects use the object file's name instead.
    if len(object_layers) and object_layers[-1].name == 'Layer 1':
        object_layers[-1].name= object_name
        print("Building '%s' Object" % object_name)
    else:
        print("Building %d Objects" % len(object_layers))

    # Before adding any meshes or armatures go into Object mode.
    if bpy.ops.object.mode_set.poll():
        bpy.ops.object.mode_set(mode='OBJECT')

    for layer_data in object_layers:
        me= bpy.data.meshes.new(layer_data.name)
        me.vertices.add(len(layer_data.pnts))
        me.faces.add(len(layer_data.pols))
        # for vi in range(len(layer_data.pnts)):
        #     me.vertices[vi].co= layer_data.pnts[vi]

        # faster, would be faster again to use an array
        me.vertices.foreach_set("co", [axis for co in layer_data.pnts for axis in co])

        ngons= {}   # To keep the FaceIdx consistent, handle NGons later.
        edges= []   # Holds the FaceIdx of the 2-point polys.
        for fi, fpol in enumerate(layer_data.pols):
            fpol.reverse()   # Reversing gives correct normal directions
            # PointID 0 in the last element causes Blender to think it's un-used.
            if fpol[-1] == 0:
                fpol.insert(0, fpol[-1])
                del fpol[-1]

            vlen= len(fpol)
            if vlen == 3 or vlen == 4:
                for i in range(vlen):
                    me.faces[fi].vertices_raw[i]= fpol[i]
            elif vlen == 2:
                edges.append(fi)
            elif vlen != 1:
                ngons[fi]= fpol  # Deal with them later

        ob= bpy.data.objects.new(layer_data.name, me)
        bpy.context.scene.objects.link(ob)
        ob_dict[layer_data.index]= [ob, layer_data.parent_index]

        # Move the object so the pivot is in the right place.
        ob.location= layer_data.pivot

        # Create the Material Slots and assign the MatIndex to the correct faces.
        mat_slot= 0
        for surf_key in layer_data.surf_tags:
            if object_tags[surf_key] in object_surfs:
                me.materials.append(object_surfs[object_tags[surf_key]].bl_mat)

                for fi in layer_data.surf_tags[surf_key]:
                    me.faces[fi].material_index= mat_slot
                    me.faces[fi].use_smooth= object_surfs[object_tags[surf_key]].smooth

                mat_slot+=1

        # Create the Vertex Groups (LW's Weight Maps).
        if len(layer_data.wmaps) > 0:
            print("Adding %d Vertex Groups" % len(layer_data.wmaps))
            for wmap_key in layer_data.wmaps:
                vgroup= ob.vertex_groups.new()
                vgroup.name= wmap_key
                wlist= layer_data.wmaps[wmap_key]
                for pvp in wlist:
                    vgroup.add((pvp[0], ), pvp[1], 'REPLACE')

        # Create the Shape Keys (LW's Endomorphs).
        if len(layer_data.morphs) > 0:
            print("Adding %d Shapes Keys" % len(layer_data.morphs))
            ob.shape_key_add('Basis')   # Got to have a Base Shape.
            for morph_key in layer_data.morphs:
                skey= ob.shape_key_add(morph_key)
                dlist= layer_data.morphs[morph_key]
                for pdp in dlist:
                    me.shape_keys.key_blocks[skey.name].data[pdp[0]].co= [pdp[1], pdp[2], pdp[3]]

        # Create the Vertex Color maps.
        if len(layer_data.colmaps) > 0:
            print("Adding %d Vertex Color Maps" % len(layer_data.colmaps))
            for cmap_key in layer_data.colmaps:
                map_pack= create_mappack(layer_data, cmap_key, "COLOR")
                vcol= me.vertex_colors.new(cmap_key)
                if not vcol:
                    break
                for fi in map_pack:
                    if fi > len(vcol.data):
                        continue
                    face= map_pack[fi]
                    colf= vcol.data[fi]

                    if len(face) > 2:
                        colf.color1= face[0]
                        colf.color2= face[1]
                        colf.color3= face[2]
                    if len(face) == 4:
                        colf.color4= face[3]

        # Create the UV Maps.
        if len(layer_data.uvmaps) > 0:
            print("Adding %d UV Textures" % len(layer_data.uvmaps))
            for uvmap_key in layer_data.uvmaps:
                map_pack= create_mappack(layer_data, uvmap_key, "UV")
                uvm= me.uv_textures.new(uvmap_key)
                if not uvm:
                    break
                for fi in map_pack:
                    if fi > len(uvm.data):
                        continue
                    face= map_pack[fi]
                    uvf= uvm.data[fi]

                    if len(face) > 2:
                        uvf.uv1= face[0]
                        uvf.uv2= face[1]
                        uvf.uv3= face[2]
                    if len(face) == 4:
                        uvf.uv4= face[3]

        # Now add the NGons.
        if len(ngons) > 0:
            for ng_key in ngons:
                face_offset= len(me.faces)
                ng= ngons[ng_key]
                v_locs= []
                for vi in range(len(ng)):
                    v_locs.append(mathutils.Vector(layer_data.pnts[ngons[ng_key][vi]]))
                tris= tesselate_polygon([v_locs])
                me.faces.add(len(tris))
                for tri in tris:
                    face= me.faces[face_offset]
                    face.vertices_raw[0]= ng[tri[0]]
                    face.vertices_raw[1]= ng[tri[1]]
                    face.vertices_raw[2]= ng[tri[2]]
                    face.material_index= me.faces[ng_key].material_index
                    face.use_smooth= me.faces[ng_key].use_smooth
                    face_offset+= 1

        # FaceIDs are no longer a concern, so now update the mesh.
        has_edges= len(edges) > 0 or len(layer_data.edge_weights) > 0
        me.update(calc_edges=has_edges)

        # Add the edges.
        edge_offset= len(me.edges)
        me.edges.add(len(edges))
        for edge_fi in edges:
            me.edges[edge_offset].vertices[0]= layer_data.pols[edge_fi][0]
            me.edges[edge_offset].vertices[1]= layer_data.pols[edge_fi][1]
            edge_offset+= 1

        # Apply the Edge Weighting.
        if len(layer_data.edge_weights) > 0:
            for edge in me.edges:
                edge_sa= "{0} {1}".format(edge.vertices[0], edge.vertices[1])
                edge_sb= "{0} {1}".format(edge.vertices[1], edge.vertices[0])
                if edge_sa in layer_data.edge_weights:
                    edge.crease= layer_data.edge_weights[edge_sa]
                elif edge_sb in layer_data.edge_weights:
                    edge.crease= layer_data.edge_weights[edge_sb]

        # Unfortunately we can't exlude certain faces from the subdivision.
        if layer_data.has_subds and add_subd_mod:
            ob.modifiers.new(name="Subsurf", type='SUBSURF')

        # Should we build an armature from the embedded rig?
        if len(layer_data.bones) > 0 and skel_to_arm:
            bpy.ops.object.armature_add()
            arm_object= bpy.context.active_object
            arm_object.name= "ARM_" + layer_data.name
            arm_object.data.name= arm_object.name
            arm_object.location= layer_data.pivot
            bpy.ops.object.mode_set(mode='EDIT')
            build_armature(layer_data, arm_object.data.edit_bones)
            bpy.ops.object.mode_set(mode='OBJECT')

        # Clear out the dictionaries for this layer.
        layer_data.bone_names.clear()
        layer_data.bone_rolls.clear()
        layer_data.wmaps.clear()
        layer_data.colmaps.clear()
        layer_data.uvmaps.clear()
        layer_data.morphs.clear()
        layer_data.surf_tags.clear()

        # We may have some invalid mesh data, See: [#27916]
        # keep this last!
        print("validating mesh: %r..." % me.name)
        me.validate(verbose=1)
        print("done!")

    # With the objects made, setup the parents and re-adjust the locations.
    for ob_key in ob_dict:
        if ob_dict[ob_key][1] != -1 and ob_dict[ob_key][1] in ob_dict:
            parent_ob = ob_dict[ob_dict[ob_key][1]]
            ob_dict[ob_key][0].parent= parent_ob[0]
            ob_dict[ob_key][0].location-= parent_ob[0].location

    bpy.context.scene.update()

    print("Done Importing LWO File")
示例#2
0
def build_objects(object_layers, object_surfs, object_tags, object_name,
                  add_subd_mod, skel_to_arm):
    '''Using the gathered data, create the objects.'''
    ob_dict = {}  # Used for the parenting setup.
    print("Adding %d Materials" % len(object_surfs))

    for surf_key in object_surfs:
        surf_data = object_surfs[surf_key]
        surf_data.bl_mat = bpy.data.materials.new(surf_data.name)
        surf_data.bl_mat.diffuse_color = (surf_data.colr[:])
        surf_data.bl_mat.diffuse_intensity = surf_data.diff
        surf_data.bl_mat.emit = surf_data.lumi
        surf_data.bl_mat.specular_intensity = surf_data.spec
        if surf_data.refl != 0.0:
            surf_data.bl_mat.raytrace_mirror.use = True
        surf_data.bl_mat.raytrace_mirror.reflect_factor = surf_data.refl
        surf_data.bl_mat.raytrace_mirror.gloss_factor = 1.0 - surf_data.rblr
        if surf_data.tran != 0.0:
            surf_data.bl_mat.use_transparency = True
            surf_data.bl_mat.transparency_method = 'RAYTRACE'
        surf_data.bl_mat.alpha = 1.0 - surf_data.tran
        surf_data.bl_mat.raytrace_transparency.ior = surf_data.rind
        surf_data.bl_mat.raytrace_transparency.gloss_factor = 1.0 - surf_data.tblr
        surf_data.bl_mat.translucency = surf_data.trnl
        surf_data.bl_mat.specular_hardness = int(4 *
                                                 ((10 * surf_data.glos) *
                                                  (10 * surf_data.glos))) + 4
        # The Gloss is as close as possible given the differences.

    # Single layer objects use the object file's name instead.
    if len(object_layers) and object_layers[-1].name == 'Layer 1':
        object_layers[-1].name = object_name
        print("Building '%s' Object" % object_name)
    else:
        print("Building %d Objects" % len(object_layers))

    # Before adding any meshes or armatures go into Object mode.
    if bpy.ops.object.mode_set.poll():
        bpy.ops.object.mode_set(mode='OBJECT')

    for layer_data in object_layers:
        me = bpy.data.meshes.new(layer_data.name)
        me.vertices.add(len(layer_data.pnts))
        me.faces.add(len(layer_data.pols))
        # for vi in range(len(layer_data.pnts)):
        #     me.vertices[vi].co= layer_data.pnts[vi]

        # faster, would be faster again to use an array
        me.vertices.foreach_set(
            "co", [axis for co in layer_data.pnts for axis in co])

        ngons = {}  # To keep the FaceIdx consistent, handle NGons later.
        edges = []  # Holds the FaceIdx of the 2-point polys.
        for fi, fpol in enumerate(layer_data.pols):
            fpol.reverse()  # Reversing gives correct normal directions
            # PointID 0 in the last element causes Blender to think it's un-used.
            if fpol[-1] == 0:
                fpol.insert(0, fpol[-1])
                del fpol[-1]

            vlen = len(fpol)
            if vlen == 3 or vlen == 4:
                for i in range(vlen):
                    me.faces[fi].vertices_raw[i] = fpol[i]
            elif vlen == 2:
                edges.append(fi)
            elif vlen != 1:
                ngons[fi] = fpol  # Deal with them later

        ob = bpy.data.objects.new(layer_data.name, me)
        bpy.context.scene.objects.link(ob)
        ob_dict[layer_data.index] = [ob, layer_data.parent_index]

        # Move the object so the pivot is in the right place.
        ob.location = layer_data.pivot

        # Create the Material Slots and assign the MatIndex to the correct faces.
        mat_slot = 0
        for surf_key in layer_data.surf_tags:
            if object_tags[surf_key] in object_surfs:
                me.materials.append(object_surfs[object_tags[surf_key]].bl_mat)

                for fi in layer_data.surf_tags[surf_key]:
                    me.faces[fi].material_index = mat_slot
                    me.faces[fi].use_smooth = object_surfs[
                        object_tags[surf_key]].smooth

                mat_slot += 1

        # Create the Vertex Groups (LW's Weight Maps).
        if len(layer_data.wmaps) > 0:
            print("Adding %d Vertex Groups" % len(layer_data.wmaps))
            for wmap_key in layer_data.wmaps:
                vgroup = ob.vertex_groups.new()
                vgroup.name = wmap_key
                wlist = layer_data.wmaps[wmap_key]
                for pvp in wlist:
                    vgroup.add((pvp[0], ), pvp[1], 'REPLACE')

        # Create the Shape Keys (LW's Endomorphs).
        if len(layer_data.morphs) > 0:
            print("Adding %d Shapes Keys" % len(layer_data.morphs))
            ob.shape_key_add('Basis')  # Got to have a Base Shape.
            for morph_key in layer_data.morphs:
                skey = ob.shape_key_add(morph_key)
                dlist = layer_data.morphs[morph_key]
                for pdp in dlist:
                    me.shape_keys.key_blocks[skey.name].data[pdp[0]].co = [
                        pdp[1], pdp[2], pdp[3]
                    ]

        # Create the Vertex Color maps.
        if len(layer_data.colmaps) > 0:
            print("Adding %d Vertex Color Maps" % len(layer_data.colmaps))
            for cmap_key in layer_data.colmaps:
                map_pack = create_mappack(layer_data, cmap_key, "COLOR")
                vcol = me.vertex_colors.new(cmap_key)
                if not vcol:
                    break
                for fi in map_pack:
                    if fi > len(vcol.data):
                        continue
                    face = map_pack[fi]
                    colf = vcol.data[fi]

                    if len(face) > 2:
                        colf.color1 = face[0]
                        colf.color2 = face[1]
                        colf.color3 = face[2]
                    if len(face) == 4:
                        colf.color4 = face[3]

        # Create the UV Maps.
        if len(layer_data.uvmaps) > 0:
            print("Adding %d UV Textures" % len(layer_data.uvmaps))
            for uvmap_key in layer_data.uvmaps:
                map_pack = create_mappack(layer_data, uvmap_key, "UV")
                uvm = me.uv_textures.new(uvmap_key)
                if not uvm:
                    break
                for fi in map_pack:
                    if fi > len(uvm.data):
                        continue
                    face = map_pack[fi]
                    uvf = uvm.data[fi]

                    if len(face) > 2:
                        uvf.uv1 = face[0]
                        uvf.uv2 = face[1]
                        uvf.uv3 = face[2]
                    if len(face) == 4:
                        uvf.uv4 = face[3]

        # Now add the NGons.
        if len(ngons) > 0:
            for ng_key in ngons:
                face_offset = len(me.faces)
                ng = ngons[ng_key]
                v_locs = []
                for vi in range(len(ng)):
                    v_locs.append(
                        mathutils.Vector(layer_data.pnts[ngons[ng_key][vi]]))
                tris = tesselate_polygon([v_locs])
                me.faces.add(len(tris))
                for tri in tris:
                    face = me.faces[face_offset]
                    face.vertices_raw[0] = ng[tri[0]]
                    face.vertices_raw[1] = ng[tri[1]]
                    face.vertices_raw[2] = ng[tri[2]]
                    face.material_index = me.faces[ng_key].material_index
                    face.use_smooth = me.faces[ng_key].use_smooth
                    face_offset += 1

        # FaceIDs are no longer a concern, so now update the mesh.
        has_edges = len(edges) > 0 or len(layer_data.edge_weights) > 0
        me.update(calc_edges=has_edges)

        # Add the edges.
        edge_offset = len(me.edges)
        me.edges.add(len(edges))
        for edge_fi in edges:
            me.edges[edge_offset].vertices[0] = layer_data.pols[edge_fi][0]
            me.edges[edge_offset].vertices[1] = layer_data.pols[edge_fi][1]
            edge_offset += 1

        # Apply the Edge Weighting.
        if len(layer_data.edge_weights) > 0:
            for edge in me.edges:
                edge_sa = "{0} {1}".format(edge.vertices[0], edge.vertices[1])
                edge_sb = "{0} {1}".format(edge.vertices[1], edge.vertices[0])
                if edge_sa in layer_data.edge_weights:
                    edge.crease = layer_data.edge_weights[edge_sa]
                elif edge_sb in layer_data.edge_weights:
                    edge.crease = layer_data.edge_weights[edge_sb]

        # Unfortunately we can't exlude certain faces from the subdivision.
        if layer_data.has_subds and add_subd_mod:
            ob.modifiers.new(name="Subsurf", type='SUBSURF')

        # Should we build an armature from the embedded rig?
        if len(layer_data.bones) > 0 and skel_to_arm:
            bpy.ops.object.armature_add()
            arm_object = bpy.context.active_object
            arm_object.name = "ARM_" + layer_data.name
            arm_object.data.name = arm_object.name
            arm_object.location = layer_data.pivot
            bpy.ops.object.mode_set(mode='EDIT')
            build_armature(layer_data, arm_object.data.edit_bones)
            bpy.ops.object.mode_set(mode='OBJECT')

        # Clear out the dictionaries for this layer.
        layer_data.bone_names.clear()
        layer_data.bone_rolls.clear()
        layer_data.wmaps.clear()
        layer_data.colmaps.clear()
        layer_data.uvmaps.clear()
        layer_data.morphs.clear()
        layer_data.surf_tags.clear()

        # We may have some invalid mesh data, See: [#27916]
        # keep this last!
        print("validating mesh: %r..." % me.name)
        me.validate(verbose=1)
        print("done!")

    # With the objects made, setup the parents and re-adjust the locations.
    for ob_key in ob_dict:
        if ob_dict[ob_key][1] != -1 and ob_dict[ob_key][1] in ob_dict:
            parent_ob = ob_dict[ob_dict[ob_key][1]]
            ob_dict[ob_key][0].parent = parent_ob[0]
            ob_dict[ob_key][0].location -= parent_ob[0].location

    bpy.context.scene.update()

    print("Done Importing LWO File")
def BPyMesh_ngon(from_data, indices, PREF_FIX_LOOPS=True):
    '''
    Takes a polyline of indices (fgon)
    and returns a list of face indicie lists.
    Designed to be used for importers that need indices for an fgon to create from existing verts.

    from_data: either a mesh, or a list/tuple of vectors.
    indices: a list of indices to use this list is the ordered closed polyline to fill, and can be a subset of the data given.
    PREF_FIX_LOOPS: If this is enabled polylines that use loops to make multiple polylines are delt with correctly.
    '''

    if not set:  # Need sets for this, otherwise do a normal fill.
        PREF_FIX_LOOPS = False

    Vector = mathutils.Vector
    if not indices:
        return []

    #    return []
    def rvec(co):
        return round(co.x, 6), round(co.y, 6), round(co.z, 6)

    def mlen(co):
        return abs(co[0]) + abs(co[1]) + abs(co[2])  # manhatten length of a vector, faster then length

    def vert_treplet(v, i):
        return v, rvec(v), i, mlen(v)

    def ed_key_mlen(v1, v2):
        if v1[3] > v2[3]:
            return v2[1], v1[1]
        else:
            return v1[1], v2[1]

    if not PREF_FIX_LOOPS:
        '''
        Normal single concave loop filling
        '''
        if type(from_data) in (tuple, list):
            verts = [Vector(from_data[i]) for ii, i in enumerate(indices)]
        else:
            verts = [from_data.vertices[i].co for ii, i in enumerate(indices)]

        for i in range(len(verts) - 1, 0, -1):  # same as reversed(xrange(1, len(verts))):
            if verts[i][1] == verts[i - 1][0]:
                verts.pop(i - 1)

        fill = fill_polygon([verts])

    else:
        '''
        Seperate this loop into multiple loops be finding edges that are used twice
        This is used by lightwave LWO files a lot
        '''

        if type(from_data) in (tuple, list):
            verts = [vert_treplet(Vector(from_data[i]), ii) for ii, i in enumerate(indices)]
        else:
            verts = [vert_treplet(from_data.vertices[i].co, ii) for ii, i in enumerate(indices)]

        edges = [(i, i - 1) for i in range(len(verts))]
        if edges:
            edges[0] = (0, len(verts) - 1)

        if not verts:
            return []

        edges_used = set()
        edges_doubles = set()
        # We need to check if any edges are used twice location based.
        for ed in edges:
            edkey = ed_key_mlen(verts[ed[0]], verts[ed[1]])
            if edkey in edges_used:
                edges_doubles.add(edkey)
            else:
                edges_used.add(edkey)

        # Store a list of unconnected loop segments split by double edges.
        # will join later
        loop_segments = []

        v_prev = verts[0]
        context_loop = [v_prev]
        loop_segments = [context_loop]

        for v in verts:
            if v != v_prev:
                # Are we crossing an edge we removed?
                if ed_key_mlen(v, v_prev) in edges_doubles:
                    context_loop = [v]
                    loop_segments.append(context_loop)
                else:
                    if context_loop and context_loop[-1][1] == v[1]:
                        #raise "as"
                        pass
                    else:
                        context_loop.append(v)

                v_prev = v
        # Now join loop segments

        def join_seg(s1, s2):
            if s2[-1][1] == s1[0][1]:
                s1, s2 = s2, s1
            elif s1[-1][1] == s2[0][1]:
                pass
            else:
                return False

            # If were stuill here s1 and s2 are 2 segments in the same polyline
            s1.pop()  # remove the last vert from s1
            s1.extend(s2)  # add segment 2 to segment 1

            if s1[0][1] == s1[-1][1]:  # remove endpoints double
                s1.pop()

            s2[:] = []  # Empty this segment s2 so we dont use it again.
            return True

        joining_segments = True
        while joining_segments:
            joining_segments = False
            segcount = len(loop_segments)

            for j in range(segcount - 1, -1, -1):  # reversed(range(segcount)):
                seg_j = loop_segments[j]
                if seg_j:
                    for k in range(j - 1, -1, -1):  # reversed(range(j)):
                        if not seg_j:
                            break
                        seg_k = loop_segments[k]

                        if seg_k and join_seg(seg_j, seg_k):
                            joining_segments = True

        loop_list = loop_segments

        for verts in loop_list:
            while verts and verts[0][1] == verts[-1][1]:
                verts.pop()

        loop_list = [verts for verts in loop_list if len(verts) > 2]
        # DONE DEALING WITH LOOP FIXING

        # vert mapping
        vert_map = [None] * len(indices)
        ii = 0
        for verts in loop_list:
            if len(verts) > 2:
                for i, vert in enumerate(verts):
                    vert_map[i + ii] = vert[2]
                ii += len(verts)

        fill = tesselate_polygon([[v[0] for v in loop] for loop in loop_list])
        #draw_loops(loop_list)
        #raise 'done loop'
        # map to original indices
        fill = [[vert_map[i] for i in reversed(f)] for f in fill]

    if not fill:
        print('Warning Cannot scanfill, fallback on a triangle fan.')
        fill = [[0, i - 1, i] for i in range(2, len(indices))]
    else:
        # Use real scanfill.
        # See if its flipped the wrong way.
        flip = None
        for fi in fill:
            if flip != None:
                break
            for i, vi in enumerate(fi):
                if vi == 0 and fi[i - 1] == 1:
                    flip = False
                    break
                elif vi == 1 and fi[i - 1] == 0:
                    flip = True
                    break

        if not flip:
            for i, fi in enumerate(fill):
                fill[i] = tuple([ii for ii in reversed(fi)])

    return fill
示例#4
0
def BPyMesh_ngon(from_data, indices, PREF_FIX_LOOPS=True):
    '''
    Takes a polyline of indices (fgon)
    and returns a list of face indicie lists.
    Designed to be used for importers that need indices for an fgon to create from existing verts.

    from_data: either a mesh, or a list/tuple of vectors.
    indices: a list of indices to use this list is the ordered closed polyline to fill, and can be a subset of the data given.
    PREF_FIX_LOOPS: If this is enabled polylines that use loops to make multiple polylines are delt with correctly.
    '''

    if not set:  # Need sets for this, otherwise do a normal fill.
        PREF_FIX_LOOPS = False

    Vector = mathutils.Vector
    if not indices:
        return []

    #    return []
    def rvec(co):
        return round(co.x, 6), round(co.y, 6), round(co.z, 6)

    def mlen(co):
        return abs(co[0]) + abs(co[1]) + abs(
            co[2])  # manhatten length of a vector, faster then length

    def vert_treplet(v, i):
        return v, rvec(v), i, mlen(v)

    def ed_key_mlen(v1, v2):
        if v1[3] > v2[3]:
            return v2[1], v1[1]
        else:
            return v1[1], v2[1]

    if not PREF_FIX_LOOPS:
        '''
        Normal single concave loop filling
        '''
        if type(from_data) in (tuple, list):
            verts = [Vector(from_data[i]) for ii, i in enumerate(indices)]
        else:
            verts = [from_data.vertices[i].co for ii, i in enumerate(indices)]

        for i in range(len(verts) - 1, 0,
                       -1):  # same as reversed(xrange(1, len(verts))):
            if verts[i][1] == verts[i - 1][0]:
                verts.pop(i - 1)

        fill = fill_polygon([verts])

    else:
        '''
        Seperate this loop into multiple loops be finding edges that are used twice
        This is used by lightwave LWO files a lot
        '''

        if type(from_data) in (tuple, list):
            verts = [
                vert_treplet(Vector(from_data[i]), ii)
                for ii, i in enumerate(indices)
            ]
        else:
            verts = [
                vert_treplet(from_data.vertices[i].co, ii)
                for ii, i in enumerate(indices)
            ]

        edges = [(i, i - 1) for i in range(len(verts))]
        if edges:
            edges[0] = (0, len(verts) - 1)

        if not verts:
            return []

        edges_used = set()
        edges_doubles = set()
        # We need to check if any edges are used twice location based.
        for ed in edges:
            edkey = ed_key_mlen(verts[ed[0]], verts[ed[1]])
            if edkey in edges_used:
                edges_doubles.add(edkey)
            else:
                edges_used.add(edkey)

        # Store a list of unconnected loop segments split by double edges.
        # will join later
        loop_segments = []

        v_prev = verts[0]
        context_loop = [v_prev]
        loop_segments = [context_loop]

        for v in verts:
            if v != v_prev:
                # Are we crossing an edge we removed?
                if ed_key_mlen(v, v_prev) in edges_doubles:
                    context_loop = [v]
                    loop_segments.append(context_loop)
                else:
                    if context_loop and context_loop[-1][1] == v[1]:
                        #raise "as"
                        pass
                    else:
                        context_loop.append(v)

                v_prev = v
        # Now join loop segments

        def join_seg(s1, s2):
            if s2[-1][1] == s1[0][1]:
                s1, s2 = s2, s1
            elif s1[-1][1] == s2[0][1]:
                pass
            else:
                return False

            # If were stuill here s1 and s2 are 2 segments in the same polyline
            s1.pop()  # remove the last vert from s1
            s1.extend(s2)  # add segment 2 to segment 1

            if s1[0][1] == s1[-1][1]:  # remove endpoints double
                s1.pop()

            s2[:] = []  # Empty this segment s2 so we dont use it again.
            return True

        joining_segments = True
        while joining_segments:
            joining_segments = False
            segcount = len(loop_segments)

            for j in range(segcount - 1, -1, -1):  # reversed(range(segcount)):
                seg_j = loop_segments[j]
                if seg_j:
                    for k in range(j - 1, -1, -1):  # reversed(range(j)):
                        if not seg_j:
                            break
                        seg_k = loop_segments[k]

                        if seg_k and join_seg(seg_j, seg_k):
                            joining_segments = True

        loop_list = loop_segments

        for verts in loop_list:
            while verts and verts[0][1] == verts[-1][1]:
                verts.pop()

        loop_list = [verts for verts in loop_list if len(verts) > 2]
        # DONE DEALING WITH LOOP FIXING

        # vert mapping
        vert_map = [None] * len(indices)
        ii = 0
        for verts in loop_list:
            if len(verts) > 2:
                for i, vert in enumerate(verts):
                    vert_map[i + ii] = vert[2]
                ii += len(verts)

        fill = tesselate_polygon([[v[0] for v in loop] for loop in loop_list])
        #draw_loops(loop_list)
        #raise 'done loop'
        # map to original indices
        fill = [[vert_map[i] for i in reversed(f)] for f in fill]

    if not fill:
        print('Warning Cannot scanfill, fallback on a triangle fan.')
        fill = [[0, i - 1, i] for i in range(2, len(indices))]
    else:
        # Use real scanfill.
        # See if its flipped the wrong way.
        flip = None
        for fi in fill:
            if flip != None:
                break
            for i, vi in enumerate(fi):
                if vi == 0 and fi[i - 1] == 1:
                    flip = False
                    break
                elif vi == 1 and fi[i - 1] == 0:
                    flip = True
                    break

        if not flip:
            for i, fi in enumerate(fill):
                fill[i] = tuple([ii for ii in reversed(fi)])

    return fill