Example #1
0
    def load(self, context):
        pcv = context.object.point_cloud_visualizer
        p = os.path.abspath(bpy.path.abspath(pcv.filepath))
        if (not os.path.exists(p)):
            self.report({'WARNING'}, "File does not exist")
            return {'CANCELLED'}

        points = BinPlyPointCloudReader(p).points

        rnd = random.Random()
        random.shuffle(points, rnd.random)

        # process points
        vertices = []
        colors = []
        for i, p in enumerate(points):
            v = Vector(p[:3])
            vertices.extend(v.to_tuple())
            c = [v / 255 for v in p[3:]]
            colors.extend(c)

        # make buffers
        length = len(points)
        vertex_buffer = bgl.Buffer(bgl.GL_FLOAT, len(vertices), vertices)
        color_buffer = bgl.Buffer(bgl.GL_FLOAT, len(colors), colors)

        o = context.object
        m = o.matrix_world
        matrix = []
        for v in m.transposed():
            matrix.extend(list(v.to_tuple()))
        matrix_buffer = bgl.Buffer(bgl.GL_FLOAT, len(matrix), matrix)

        d = PCVCache.new()
        u = str(uuid.uuid1())
        d['uuid'] = u
        d['path'] = pcv.filepath
        d['ready'] = True
        d['length'] = length
        d['vertex_buffer'] = vertex_buffer
        d['color_buffer'] = color_buffer
        d['matrix'] = m
        d['matrix_buffer'] = matrix_buffer
        d['object'] = o
        d['display_percent'] = pcv.display_percent
        PCVCache.add(d)

        pcv.uuid = u
 def load(self, context):
     pcv = context.object.point_cloud_visualizer
     p = os.path.abspath(bpy.path.abspath(pcv.filepath))
     if(not os.path.exists(p)):
         self.report({'WARNING'}, "File does not exist")
         return {'CANCELLED'}
     
     points = BinPlyPointCloudReader(p).points
     
     rnd = random.Random()
     random.shuffle(points, rnd.random)
     
     # process points
     vertices = []
     colors = []
     for i, p in enumerate(points):
         v = Vector(p[:3])
         vertices.extend(v.to_tuple())
         c = [v / 255 for v in p[3:]]
         colors.extend(c)
     
     # make buffers
     length = len(points)
     vertex_buffer = bgl.Buffer(bgl.GL_FLOAT, len(vertices), vertices)
     color_buffer = bgl.Buffer(bgl.GL_FLOAT, len(colors), colors)
     
     o = context.object
     m = o.matrix_world
     matrix = []
     for v in m.transposed():
         matrix.extend(list(v.to_tuple()))
     matrix_buffer = bgl.Buffer(bgl.GL_FLOAT, len(matrix), matrix)
     
     d = PCVCache.new()
     u = str(uuid.uuid1())
     d['uuid'] = u
     d['path'] = pcv.filepath
     d['ready'] = True
     d['length'] = length
     d['vertex_buffer'] = vertex_buffer
     d['color_buffer'] = color_buffer
     d['matrix'] = m
     d['matrix_buffer'] = matrix_buffer
     d['object'] = o
     d['display_percent'] = pcv.display_percent
     PCVCache.add(d)
     
     pcv.uuid = u
    def make_coil():

        # variables
        amp = prad
        th = height / n_turns
        ipt = n_iter
        radius = crad
        diameter = radius * 2
        section_angle = 360.0 / n_verts
        rad_slice = 2.0 * pi / ipt
        total_segments = (ipt * n_turns) + 1
        z_jump = height / total_segments

        x_rotation = atan2(th / 2, diameter)

        n = n_verts
        Verts = []
        for segment in range(total_segments):
            rad_angle = rad_slice * segment

            for i in range(n):

                # create the vector
                this_angle = section_angle * i
                x_float = amp * sin(radians(this_angle)) + radius
                z_float = amp * cos(radians(this_angle))
                v1 = Vector((x_float, 0.0, z_float))

                # rotate it
                some_euler = Euler((-x_rotation, 0.0, -rad_angle), 'XYZ')
                v1.rotate(some_euler)

                # add extra z height per segment
                v1 += Vector((0, 0, (segment * z_jump)))

                # append it
                Verts.append(v1.to_tuple())

        Faces = []
        for t in range(total_segments - 1):
            for i in range(n - 1):
                p0 = i + (n * t)
                p1 = i + (n * t) + 1
                p2 = i + (n * t + n) + 1
                p3 = i + (n * t + n)
                Faces.append([p0, p1, p2, p3])
            p0 = n * t
            p1 = n * t + n
            p2 = n * t + (2 * n) - 1
            p3 = n * t + n - 1
            Faces.append([p0, p1, p2, p3])

        return Verts, Faces
    def make_coil():

        # variables
        amp = prad
        th = height / n_turns
        ipt = n_iter
        radius = crad
        diameter = radius * 2
        section_angle = 360.0 / n_verts
        rad_slice = 2.0 * pi / ipt
        total_segments = (ipt * n_turns) + 1
        z_jump = height / total_segments

        x_rotation = atan2(th / 2, diameter)

        n = n_verts
        Verts = []
        for segment in range(total_segments):
            rad_angle = rad_slice * segment

            for i in range(n):

                # create the vector
                this_angle = section_angle * i
                x_float = amp * sin(radians(this_angle)) + radius
                z_float = amp * cos(radians(this_angle))
                v1 = Vector((x_float, 0.0, z_float))

                # rotate it
                some_euler = Euler((-x_rotation, 0.0, -rad_angle), 'XYZ')
                v1.rotate(some_euler)

                # add extra z height per segment
                v1 += Vector((0, 0, (segment * z_jump)))

                # append it
                Verts.append(v1.to_tuple())

        Faces = []
        for t in range(total_segments - 1):
            for i in range(n - 1):
                p0 = i + (n * t)
                p1 = i + (n * t) + 1
                p2 = i + (n * t + n) + 1
                p3 = i + (n * t + n)
                Faces.append([p0, p1, p2, p3])
            p0 = n * t
            p1 = n * t + n
            p2 = n * t + (2 * n) - 1
            p3 = n * t + n - 1
            Faces.append([p0, p1, p2, p3])

        return Verts, Faces
def uvsphere(u, v, sphereRadius):
    circ = math.pi * 2

    lp = []
    lf = []

    index = 0

    lp.append((0, 0, -sphereRadius))
    index += 1
    lastLayer = [0]

    for ui in range(1, u):
        phi = circ / u * ui
        radius = math.sin(phi / 2) * sphereRadius
        layer = []
        for vi in range(v):
            theta = circ / v * vi
            x = math.cos(theta) * radius
            y = math.sin(theta) * radius
            z = -sphereRadius + sphereRadius * 2 / u * ui

            co = Vector((x, y, z))
            ce = Vector((0, 0, 0))
            dl = co - ce
            co = ce + dl.normalized() * sphereRadius

            lp.append(co.to_tuple())
            layer.append(index)
            index += 1
        if len(lastLayer) == len(layer):
            for i in range(v - 1):
                lf.append(
                    (lastLayer[i], lastLayer[i + 1], layer[i + 1], layer[i]))
            lf.append((layer[0], layer[v - 1], lastLayer[v - 1], lastLayer[0]))
        else:
            for i in range(v - 1):
                lf.append((0, layer[i + 1], layer[i]))
            lf.append((0, layer[0], layer[v - 1]))
        lastLayer = layer

    lp.append((0, 0, sphereRadius))
    index += 1
    for i in range(v - 1):
        lf.append((layer[i], layer[i + 1], index - 1))
    lf.append((layer[0], index - 1, layer[v - 1]))

    return lp, lf
Example #6
0
def rotate_camera(p_rotation):
	p_camera = bpy.data.objects['Camera']
	#camera position
	loc = Vector((0,3.0,6.0)) * mathutils.Matrix.Rotation(radians(p_rotation), 4, 'Z')
	p_camera.location = loc.to_tuple()
	
	rx = 35.264 #isometric angle
	mat_rot = mathutils.Matrix.Rotation(radians(180-p_rotation), 4, 'Z')
	mat_rot *= mathutils.Matrix.Rotation(radians(rx), 4, 'X')
	#print(mat_rot)
	#print(mat_rot.to_euler())
	p_camera.rotation_euler = mat_rot.to_euler()
	
	fov = 50.0
	# Set camera fov in degrees
	p_camera.data.angle = radians(fov)
def uvsphere(u, v, sphereRadius):
    circ = math.pi*2
    
    lp = []
    lf = []
    
    index = 0
    
    lp.append((0, 0, -sphereRadius))
    index += 1
    lastLayer = [0]
    
    for ui in range(1, u):
        phi = circ/u*ui
        radius = math.sin(phi/2) * sphereRadius
        layer = []
        for vi in range(v):
            theta = circ/v*vi
            x = math.cos(theta) * radius
            y = math.sin(theta) * radius
            z = -sphereRadius + sphereRadius*2/u*ui
            
            co = Vector((x, y, z))
            ce = Vector((0, 0, 0))
            dl = co - ce
            co = ce + dl.normalized() * sphereRadius
            
            lp.append(co.to_tuple())
            layer.append(index)
            index += 1
        if len(lastLayer) == len(layer):
            for i in range(v-1):
                lf.append((lastLayer[i], lastLayer[i+1], layer[i+1], layer[i]))
            lf.append((layer[0], layer[v-1], lastLayer[v-1], lastLayer[0]))
        else:
            for i in range(v-1):
                lf.append((0, layer[i+1], layer[i]))
            lf.append((0, layer[0], layer[v-1]))
        lastLayer = layer
        
    lp.append((0, 0, sphereRadius))
    index += 1
    for i in range(v-1):
        lf.append((layer[i], layer[i+1], index-1))
    lf.append((layer[0], index-1, layer[v-1]))
    
    return lp, lf
 def add_uv_sphere(self, u, v):
     lv = []
     circ = math.pi * 2
     index = 1
     lv.append(self.new_vertex(Vector((0, 0, -1))))
     lastLayer = [0]
     for ui in range(1, u):
         phi = circ / u * ui
         radius = math.sin(phi / 2)
         layer = []
         for vi in range(v):
             theta = circ / v * vi
             x = math.cos(theta) * radius
             y = math.sin(theta) * radius
             z = -1 + 1 * 2 / u * ui
             co = Vector((x, y, z))
             ce = Vector((0, 0, 0))
             dl = co - ce
             co = ce + dl.normalized()
             lv.append(self.new_vertex(Vector((co.to_tuple()))))
             layer.append(index)
             index += 1
         if len(lastLayer) == len(layer):
             for i in range(v - 1):
                 self.new_face([
                     lv[lastLayer[i]], lv[lastLayer[i + 1]],
                     lv[layer[i + 1]], lv[layer[i]]
                 ])
             self.new_face([
                 lv[layer[0]], lv[layer[v - 1]], lv[lastLayer[v - 1]],
                 lv[lastLayer[0]]
             ])
         else:
             for i in range(v - 1):
                 self.new_face([lv[0], lv[layer[i + 1]], lv[layer[i]]])
             self.new_face([lv[0], lv[layer[0]], lv[layer[v - 1]]])
         lastLayer = layer
     lv.append(self.new_vertex(Vector((0, 0, 1))))
     index += 1
     for i in range(v - 1):
         self.new_face([lv[layer[i]], lv[layer[i + 1]], lv[index - 1]])
     self.new_face([lv[layer[0]], lv[index - 1], lv[layer[v - 1]]])
 def add_uv_sphere(self, u, v):
     lv = []
     circ = math.pi*2
     index = 1
     lv.append(self.new_vertex(Vector((0,0,-1))))
     lastLayer = [0]
     for ui in range(1, u):
         phi = circ/u*ui
         radius = math.sin(phi/2)
         layer = []
         for vi in range(v):
             theta = circ/v*vi
             x = math.cos(theta) * radius
             y = math.sin(theta) * radius
             z = -1 + 1*2/u*ui
             co = Vector((x, y, z))
             ce = Vector((0, 0, 0))
             dl = co - ce
             co = ce + dl.normalized()
             lv.append(self.new_vertex(Vector((co.to_tuple()))))
             layer.append(index)
             index += 1
         if len(lastLayer) == len(layer):
             for i in range(v-1):
                 self.new_face([lv[lastLayer[i]], lv[lastLayer[i+1]], lv[layer[i+1]], lv[layer[i]]])
             self.new_face([lv[layer[0]], lv[layer[v-1]], lv[lastLayer[v-1]], lv[lastLayer[0]]])
         else:
             for i in range(v-1):
                 self.new_face([lv[0], lv[layer[i+1]], lv[layer[i]]])
             self.new_face([lv[0], lv[layer[0]], lv[layer[v-1]]])
         lastLayer = layer
     lv.append(self.new_vertex(Vector((0, 0, 1))))
     index += 1
     for i in range(v-1):
         self.new_face([lv[layer[i]], lv[layer[i+1]], lv[index-1]])
     self.new_face([lv[layer[0]], lv[index-1], lv[layer[v-1]]])
Example #10
0
def tassellate(ob0, ob1, offset, zscale, gen_modifiers, com_modifiers, mode, scale_mode, randomize, rand_seed, fill_mode):
    random.seed(rand_seed)

    if gen_modifiers:
        me0 = ob0.to_mesh(bpy.context.scene, apply_modifiers=True, settings = 'PREVIEW')
    else: me0 = ob0.data

    if com_modifiers:
        me1 = ob1.to_mesh(bpy.context.scene, apply_modifiers=True, settings = 'PREVIEW')
    else: me1 = ob1.data

    verts0 = me0.vertices

    n_verts = len(me1.vertices)
    n_edges = len(me1.edges)
    n_faces = len(me1.polygons)

    loc = ob1.location
    dim = ob1.dimensions
    scale = ob1.scale

    new_verts = []
    new_edges = []
    new_faces = []
    new_verts_np = np.array(())

    min = Vector((0,0,0))
    max = Vector((0,0,0))

    first = True

    for v in me1.vertices:
        vert = ( ob1.matrix_world * v.co )

        if vert[0] < min[0] or first:
            min[0] = vert[0]
        if vert[1] < min[1] or first:
            min[1] = vert[1]
        if vert[2] < min[2] or first:
            min[2] = vert[2]
        if vert[0] > max[0] or first:
            max[0] = vert[0]
        if vert[1] > max[1] or first:
            max[1] = vert[1]
        if vert[2] > max[2] or first:
            max[2] = vert[2]
        first = False

    bb = max-min

    verts1 = []

    for v in me1.vertices:
        if mode=="ADAPTIVE":
            vert = ( ob1.matrix_world * v.co ) - min
            vert[0] = vert[0] / bb[0]
            vert[1] = vert[1] / bb[1]
            vert[2] = (vert[2] + (-0.5 + offset*0.5)*bb[2])*zscale
        else:
            vert = v.co
            vert[2] *= zscale

        verts1.append(vert)

    # component vertices
    vs1 = np.array([v for v in verts1]).reshape(len(verts1),3,1)
    vx = vs1[:,0]
    vy = vs1[:,1]
    vz = vs1[:,2]

    # component polygons
    fs1 = [[i for i in p.vertices] for p in me1.polygons]
    new_faces = fs1[:]

    j = 0

    if fill_mode == 'FAN':
        fan_verts = [v.co.to_tuple() for v in me0.vertices]
        fan_polygons = []
        for p in me0.polygons:
            fan_center = Vector((0,0,0))
            for v in p.vertices:
                fan_center += me0.vertices[v].co
            fan_center /= len(p.vertices)
            last_vert = len(fan_verts)
            fan_verts.append(fan_center.to_tuple())
            for i in range(len(p.vertices)):
                fan_polygons.append((p.vertices[i], p.vertices[(i+1)%len(p.vertices)], last_vert, last_vert))
        print(fan_verts)
        print(fan_polygons)
        fan_me = bpy.data.meshes.new('Fan.Mesh')
        fan_me.from_pydata(tuple(fan_verts), [], tuple(fan_polygons))
        me0 = fan_me
        verts0 = me0.vertices


    for p in me0.polygons:

        #polygon vertices

        if randomize:
            shifted_vertices = []
            n_poly_verts = len(p.vertices)
            rand = random.randint(0,n_poly_verts)
            for i in range(n_poly_verts):
                shifted_vertices.append(p.vertices[(i+rand)%n_poly_verts])
            vs0 = np.array([verts0[i].co for i in shifted_vertices])
            nvs0 = np.array([verts0[i].normal for i in shifted_vertices])
        else:
            vs0 = np.array([verts0[i].co for i in p.vertices])
            nvs0 = np.array([verts0[i].normal for i in p.vertices])

        vs0 = np.array((vs0[0], vs0[1], vs0[2], vs0[-1]))
        #polygon normals

        nvs0 = np.array((nvs0[0], nvs0[1], nvs0[2], nvs0[-1]))

        v0 = vs0[0] + (vs0[1] -vs0[0])*vx
        v1 = vs0[3] + (vs0[2] -vs0[3])*vx
        v2 = v0 + (v1 - v0)*vy

        nv0 = nvs0[0] + (nvs0[1] -nvs0[0])*vx
        nv1 = nvs0[3] + (nvs0[2] -nvs0[3])*vx
        nv2 = nv0 + (nv1 - nv0)*vy

        v3 = v2 + nv2*vz*(sqrt(p.area) if scale_mode == "ADAPTIVE" else 1)

        if j == 0: new_verts_np = v3
        else:
            new_verts_np = np.concatenate((new_verts_np, v3), axis=0)
            for p in fs1: new_faces.append([i+n_verts*j for i in p])

        j+=1

    new_verts = new_verts_np.tolist()

    new_name = ob0.name + "_" + ob1.name
    new_me = bpy.data.meshes.new(new_name)
    new_me.from_pydata(new_verts, [], new_faces)
    #new_me.from_pydata(new_verts, new_edges, [])
    new_me.update()

    return new_me
Example #11
0
def tassellate(ob0, ob1, offset, zscale, gen_modifiers, com_modifiers, mode,
               scale_mode, rotation_mode, rand_seed, fill_mode,
               bool_vertex_group, bool_selection):
    random.seed(rand_seed)

    print(ob0.tissue_tessellate.offset)

    old_me0 = ob0.data
    if gen_modifiers:
        me0 = ob0.to_mesh(bpy.context.scene,
                          apply_modifiers=True,
                          settings='PREVIEW')
    else:
        me0 = ob0.data
    ob0.data = me0

    if com_modifiers:
        me1 = ob1.to_mesh(bpy.context.scene,
                          apply_modifiers=True,
                          settings='PREVIEW')
    else:
        me1 = ob1.data

    verts0 = me0.vertices

    n_verts = len(me1.vertices)
    n_edges = len(me1.edges)
    n_faces = len(me1.polygons)

    loc = ob1.location
    dim = ob1.dimensions
    scale = ob1.scale

    new_verts = []
    new_edges = []
    new_faces = []
    new_verts_np = np.array(())

    min = Vector((0, 0, 0))
    max = Vector((0, 0, 0))

    first = True

    for v in me1.vertices:
        vert = (ob1.matrix_world * v.co)

        if vert[0] < min[0] or first:
            min[0] = vert[0]
        if vert[1] < min[1] or first:
            min[1] = vert[1]
        if vert[2] < min[2] or first:
            min[2] = vert[2]
        if vert[0] > max[0] or first:
            max[0] = vert[0]
        if vert[1] > max[1] or first:
            max[1] = vert[1]
        if vert[2] > max[2] or first:
            max[2] = vert[2]
        first = False

    bb = max - min

    verts1 = []

    for v in me1.vertices:
        if mode == "ADAPTIVE":
            vert = (ob1.matrix_world * v.co) - min
            vert[0] = vert[0] / bb[0]
            vert[1] = vert[1] / bb[1]
            vert[2] = (vert[2] + (-0.5 + offset * 0.5) * bb[2]) * zscale
        else:
            vert = v.co.xyz
            vert[2] *= zscale

        verts1.append(vert)

    # component vertices
    vs1 = np.array([v for v in verts1]).reshape(len(verts1), 3, 1)
    vx = vs1[:, 0]
    vy = vs1[:, 1]
    vz = vs1[:, 2]

    # component polygons
    fs1 = [[i for i in p.vertices] for p in me1.polygons]
    new_faces = fs1[:]

    j = 0

    # active vertex group

    if bool_vertex_group:
        weight = []
        active_vertex_group = ob0.vertex_groups[ob0.vertex_groups.active_index]
        for v in me0.vertices:
            try:
                weight.append(active_vertex_group.weight(v.index))
            except:
                weight.append(0)

    if fill_mode == 'FAN':
        fan_verts = [v.co.to_tuple() for v in me0.vertices]
        fan_polygons = []
        selected_faces = []

        for p in me0.polygons:

            #if bool_selection and not p.select: continue

            fan_center = Vector((0, 0, 0))
            for v in p.vertices:
                fan_center += me0.vertices[v].co
            fan_center /= len(p.vertices)
            last_vert = len(fan_verts)
            fan_verts.append(fan_center.to_tuple())

            # vertex group
            if bool_vertex_group:
                center_weight = sum([weight[i]
                                     for i in p.vertices]) / len(p.vertices)
                weight.append(center_weight)

            for i in range(len(p.vertices)):
                fan_polygons.append(
                    (p.vertices[i], p.vertices[(i + 1) % len(p.vertices)],
                     last_vert, last_vert))
                if bool_selection: selected_faces.append(p.select)

        #print(fan_verts)
        #print(fan_polygons)
        fan_me = bpy.data.meshes.new('Fan.Mesh')
        fan_me.from_pydata(tuple(fan_verts), [], tuple(fan_polygons))
        me0 = fan_me
        verts0 = me0.vertices
        for i in range(len(selected_faces)):
            fan_me.polygons[i].select = selected_faces[i]

    count = 0  # necessary for UV calculation

    for p in me0.polygons:
        if bool_selection and not p.select: continue

        # active vertex group
        '''
        ws0 = []
        active_vertex_group = ob0.vertex_groups[ob0.vertex_groups.active_index]
        for v in p.vertices:
            try:
                ws0.append(active_vertex_group.weight(v.index))
            except:
                ws0.append(0)
        '''

        #polygon vertices

        if rotation_mode == 'RANDOM':
            shifted_vertices = []
            n_poly_verts = len(p.vertices)
            rand = random.randint(0, n_poly_verts)
            for i in range(n_poly_verts):
                shifted_vertices.append(p.vertices[(i + rand) % n_poly_verts])
            vs0 = np.array([verts0[i].co for i in shifted_vertices])
            nvs0 = np.array([verts0[i].normal for i in shifted_vertices])

            # vertex weight
            if bool_vertex_group:
                ws0 = []
                for i in shifted_vertices:
                    try:
                        ws0.append(weight[i])
                    except:
                        ws0.append(0)
                ws0 = np.array(ws0)

        elif rotation_mode == 'UV' and len(
                ob0.data.uv_layers) > 0 and fill_mode != 'FAN':
            i = p.index

            v01 = (me0.uv_layers.active.data[count].uv +
                   me0.uv_layers.active.data[count + 1].uv)  #/2
            if len(p.vertices) > 3:
                v32 = (me0.uv_layers.active.data[count + 3].uv +
                       me0.uv_layers.active.data[count + 2].uv)  #/2
            else:
                v32 = (me0.uv_layers.active.data[count].uv +
                       me0.uv_layers.active.data[count + 2].uv)
            v0132 = v32 - v01
            v0132.normalize()

            v12 = (me0.uv_layers.active.data[count + 1].uv +
                   me0.uv_layers.active.data[count + 2].uv)  #/2
            if len(p.vertices) > 3:
                v03 = (me0.uv_layers.active.data[count].uv +
                       me0.uv_layers.active.data[count + 3].uv)  #/2
            else:
                v03 = (me0.uv_layers.active.data[count].uv +
                       me0.uv_layers.active.data[count].uv)  #/2
            v1203 = v03 - v12
            v1203.normalize()

            vertUV = []
            dot1203 = v1203.x  #.dot(Vector((1,0)))
            dot0132 = v0132.x  #.dot(Vector((1,0)))
            if (abs(dot1203) < abs(dot0132)):
                if (dot0132 > 0): vertUV = p.vertices[1:] + p.vertices[:1]
                else: vertUV = p.vertices[3:] + p.vertices[:3]
            else:
                if (dot1203 < 0): vertUV = p.vertices[:]
                else: vertUV = p.vertices[2:] + p.vertices[:2]
            vs0 = np.array([verts0[i].co for i in vertUV])
            nvs0 = np.array([verts0[i].normal for i in vertUV])

            # vertex weight
            if bool_vertex_group:
                ws0 = []
                for i in vertUV:
                    try:
                        ws0.append(weight[i])
                    except:
                        ws0.append(0)
                ws0 = np.array(ws0)

            count += len(p.vertices)

        else:
            vs0 = np.array([verts0[i].co for i in p.vertices])
            nvs0 = np.array([verts0[i].normal for i in p.vertices])

            # vertex weight
            if bool_vertex_group:
                ws0 = []
                for i in p.vertices:
                    try:
                        ws0.append(weight[i])
                    except:
                        ws0.append(0)
                ws0 = np.array(ws0)

        # considering only 4 vertices
        vs0 = np.array((vs0[0], vs0[1], vs0[2], vs0[-1]))
        nvs0 = np.array((nvs0[0], nvs0[1], nvs0[2], nvs0[-1]))

        # remapped vertex coordinates
        v0 = vs0[0] + (vs0[1] - vs0[0]) * vx
        v1 = vs0[3] + (vs0[2] - vs0[3]) * vx
        v2 = v0 + (v1 - v0) * vy

        # remapped vertex normal
        nv0 = nvs0[0] + (nvs0[1] - nvs0[0]) * vx
        nv1 = nvs0[3] + (nvs0[2] - nvs0[3]) * vx
        nv2 = nv0 + (nv1 - nv0) * vy

        # vertex z to normal
        v3 = v2 + nv2 * vz * (sqrt(p.area) if scale_mode == "ADAPTIVE" else 1)

        if bool_vertex_group:
            ws0 = np.array((ws0[0], ws0[1], ws0[2], ws0[-1]))
            # interpolate vertex weight
            w0 = ws0[0] + (ws0[1] - ws0[0]) * vx
            w1 = ws0[3] + (ws0[2] - ws0[3]) * vx
            w2 = w0 + (w1 - w0) * vy

        if j == 0:
            new_verts_np = v3
            if bool_vertex_group: new_vertex_group_np = w2
        else:
            new_verts_np = np.concatenate((new_verts_np, v3), axis=0)
            if bool_vertex_group:
                new_vertex_group_np = np.concatenate((new_vertex_group_np, w2),
                                                     axis=0)
            for p in fs1:
                new_faces.append([i + n_verts * j for i in p])

        j += 1

    new_verts = new_verts_np.tolist()

    new_name = ob0.name + "_" + ob1.name
    new_me = bpy.data.meshes.new(new_name)
    new_me.from_pydata(new_verts, [], new_faces)
    #new_me.from_pydata(new_verts, new_edges, [])
    new_me.update()
    new_ob = bpy.data.objects.new("generated", new_me)

    # vertex group
    if bool_vertex_group:
        new_ob.vertex_groups.new("generator_group")
        for i in range(len(new_vertex_group_np)):
            new_ob.vertex_groups["generator_group"].add([i],
                                                        new_vertex_group_np[i],
                                                        "ADD")

    ob0.data = old_me0
    return new_ob
Example #12
0
    def execute(self, context):
        scene = context.scene
        props = scene.gcode_settings
        # manage data
        if props.speed_mode == 'SPEED':
            props.feed = props.speed * 60
            props.feed_vertical = props.speed_vertical * 60
            props.feed_horizontal = props.speed_horizontal * 60
        feed = props.feed
        feed_v = props.feed_vertical
        feed_h = props.feed_horizontal
        layer = props.layer_height
        flow_mult = props.flow_mult
        use_curve_thickness = props.use_curve_thickness
        if context.object.type != 'CURVE': use_curve_thickness = False
        #if context.object.type != 'CURVE':
        #    self.report({'ERROR'}, 'Please select a Curve object')
        #    return {'CANCELLED'}
        ob = context.object
        matr = ob.matrix_world
        if ob.type == 'MESH':
            dg = context.evaluated_depsgraph_get()
            mesh = ob.evaluated_get(dg).data
            edges = [list(e.vertices) for e in mesh.edges]
            verts = [v.co for v in mesh.vertices]
            radii = [1] * len(verts)
            ordered_verts = find_curves(edges, len(mesh.vertices))
            ob = curve_from_pydata(verts,
                                   radii,
                                   ordered_verts,
                                   name='__temp_curve__',
                                   merge_distance=0.1,
                                   set_active=False)

        vertices = [[matr @ p.co.xyz for p in s.points]
                    for s in ob.data.splines]
        if use_curve_thickness:
            bevel_depth = ob.data.bevel_depth
            var_height = [[p.radius * bevel_depth for p in s.points]
                          for s in ob.data.splines]
        cyclic_u = [s.use_cyclic_u for s in ob.data.splines]

        if ob.name == '__temp_curve__': bpy.data.objects.remove(ob)

        if len(vertices) == 1: props.gcode_mode = 'CONT'
        export = True

        # open file
        if (export):
            if props.folder == '':
                folder = '//' + os.path.splitext(
                    bpy.path.basename(bpy.context.blend_data.filepath))[0]
            else:
                folder = props.folder
            if '.gcode' not in folder: folder += '.gcode'
            path = bpy.path.abspath(folder)
            file = open(path, 'w')
            try:
                for line in bpy.data.texts[props.start_code].lines:
                    file.write(line.body + '\n')
            except:
                pass

        #if props.gcode_mode == 'RETR':

        # sort layers (Z)
        if props.auto_sort_layers:
            sorted_verts = []
            if use_curve_thickness:
                sorted_height = []
            for i, curve in enumerate(vertices):
                # mean z
                listz = [v[2] for v in curve]
                meanz = np.mean(listz)
                # store curve and meanz
                sorted_verts.append((curve, meanz))
                if use_curve_thickness:
                    sorted_height.append((var_height[i], meanz))
            vertices = [
                data[0]
                for data in sorted(sorted_verts, key=lambda height: height[1])
            ]
            if use_curve_thickness:
                var_height = [
                    data[0] for data in sorted(sorted_height,
                                               key=lambda height: height[1])
                ]

        # sort vertices (XY)
        if props.auto_sort_points:
            # curves median point
            median_points = [np.mean(verts, axis=0) for verts in vertices]

            # chose starting point for each curve
            for j, curve in enumerate(vertices):
                # for closed curves finds the best starting point
                if cyclic_u[j]:
                    # create kd tree
                    kd = mathutils.kdtree.KDTree(len(curve))
                    for i, v in enumerate(curve):
                        kd.insert(v, i)
                    kd.balance()

                    if props.gcode_mode == 'RETR':
                        if j == 0:
                            # close to next two curves median point
                            co_find = np.mean(median_points[j + 1:j + 3],
                                              axis=0)
                        elif j < len(vertices) - 1:
                            co_find = np.mean(
                                [median_points[j - 1], median_points[j + 1]],
                                axis=0)
                        else:
                            co_find = np.mean(median_points[j - 2:j], axis=0)
                        #flow_mult[j] = flow_mult[j][index:]+flow_mult[j][:index]
                        #layer[j] = layer[j][index:]+layer[j][:index]
                    else:
                        if j == 0:
                            # close to next two curves median point
                            co_find = np.mean(median_points[j + 1:j + 3],
                                              axis=0)
                        else:
                            co_find = vertices[j - 1][-1]
                    co, index, dist = kd.find(co_find)
                    vertices[j] = vertices[j][index:] + vertices[j][:index + 1]
                    if use_curve_thickness:
                        var_height[j] = var_height[j][index:] + var_height[
                            j][:index + 1]
                else:
                    if j > 0:
                        p0 = curve[0]
                        p1 = curve[-1]
                        last = vertices[j - 1][-1]
                        d0 = (last - p0).length
                        d1 = (last - p1).length
                        if d1 < d0:
                            vertices[j].reverse()
                            if use_curve_thickness:
                                var_height[j].reverse()

        # calc bounding box
        min_corner = np.min(vertices[0], axis=0)
        max_corner = np.max(vertices[0], axis=0)
        for i in range(1, len(vertices)):
            eval_points = vertices[i] + [min_corner]
            min_corner = np.min(eval_points, axis=0)
            eval_points = vertices[i] + [max_corner]
            max_corner = np.max(eval_points, axis=0)

        # initialize variables
        e = 0
        last_vert = Vector((0, 0, 0))
        maxz = 0
        path_length = 0
        travel_length = 0

        printed_verts = []
        printed_edges = []
        travel_verts = []
        travel_edges = []

        # write movements
        for i in range(len(vertices)):
            curve = vertices[i]
            first_id = len(printed_verts)
            for j in range(len(curve)):
                v = curve[j]
                v_flow_mult = flow_mult  #[i][j]
                v_layer = layer  #[i][j]
                if use_curve_thickness:
                    v_layer = var_height[i][j] * 2

                # record max z
                maxz = np.max((maxz, v[2]))
                #maxz = max(maxz,v[2])

                # first point of the gcode
                if i == j == 0:
                    printed_verts.append(v)
                    if (export):
                        file.write('G92 E0 \n')
                        params = v[:3] + (feed, )
                        to_write = 'G1 X{0:.4f} Y{1:.4f} Z{2:.4f} F{3:.0f}\n'.format(
                            *params)
                        file.write(to_write)
                else:
                    # start after retraction
                    if j == 0 and props.gcode_mode == 'RETR':
                        if (export):
                            params = v[:2] + (maxz + props.dz, ) + (feed_h, )
                            to_write = 'G1 X{0:.4f} Y{1:.4f} Z{2:.4f} F{3:.0f}\n'.format(
                                *params)
                            file.write(to_write)
                            params = v[:3] + (feed_v, )
                            to_write = 'G1 X{0:.4f} Y{1:.4f} Z{2:.4f} F{3:.0f}\n'.format(
                                *params)
                            file.write(to_write)
                            to_write = 'G1 F{:.0f}\n'.format(feed)
                            file.write(to_write)
                            if props.retraction_mode == 'GCODE':
                                e += props.push
                                file.write('G1 E' + format(e, '.4f') + '\n')
                            else:
                                file.write('G11\n')
                        printed_verts.append((v[0], v[1], maxz + props.dz))
                        travel_edges.append(
                            (len(printed_verts) - 1, len(printed_verts) - 2))
                        travel_length += (Vector(printed_verts[-1]) -
                                          Vector(printed_verts[-2])).length
                        printed_verts.append(v)
                        travel_edges.append(
                            (len(printed_verts) - 1, len(printed_verts) - 2))
                        travel_length += maxz + props.dz - v[2]
                    # regular extrusion
                    else:
                        printed_verts.append(v)
                        v1 = Vector(v)
                        v0 = Vector(curve[j - 1])
                        dist = (v1 - v0).length
                        area = v_layer * props.nozzle + pi * (
                            v_layer / 2)**2  # rectangle + circle
                        cylinder = pi * (props.filament / 2)**2
                        flow = area / cylinder * (0 if j == 0 else 1)
                        e += dist * v_flow_mult * flow
                        params = v[:3] + (e, )
                        if (export):
                            to_write = 'G1 X{0:.4f} Y{1:.4f} Z{2:.4f} E{3:.4f}\n'.format(
                                *params)
                            file.write(to_write)
                        path_length += dist
                        printed_edges.append(
                            [len(printed_verts) - 1,
                             len(printed_verts) - 2])
            if props.gcode_mode == 'RETR':
                v0 = Vector(curve[-1])
                if props.close_all and False:
                    #printed_verts.append(v0)
                    printed_edges.append([len(printed_verts) - 1, first_id])

                    v1 = Vector(curve[0])
                    dist = (v0 - v1).length
                    area = v_layer * props.nozzle + pi * (
                        v_layer / 2)**2  # rectangle + circle
                    cylinder = pi * (props.filament / 2)**2
                    flow = area / cylinder
                    e += dist * v_flow_mult * flow
                    params = v1[:3] + (e, )
                    if (export):
                        to_write = 'G1 X{0:.4f} Y{1:.4f} Z{2:.4f} E{3:.4f}\n'.format(
                            *params)
                        file.write(to_write)
                    path_length += dist
                    v0 = v1
                if i < len(vertices) - 1:
                    if (export):
                        if props.retraction_mode == 'GCODE':
                            e -= props.pull
                            file.write('G0 E' + format(e, '.4f') + '\n')
                        else:
                            file.write('G10\n')
                        params = v0[:2] + (maxz + props.dz, ) + (feed_v, )
                        to_write = 'G1 X{0:.4f} Y{1:.4f} Z{2:.4f} F{3:.0f}\n'.format(
                            *params)
                        file.write(to_write)
                    printed_verts.append(v0.to_tuple())
                    printed_verts.append((v0.x, v0.y, maxz + props.dz))
                    travel_edges.append(
                        (len(printed_verts) - 1, len(printed_verts) - 2))
                    travel_length += maxz + props.dz - v0.z
        if (export):
            # end code
            try:
                for line in bpy.data.texts[props.end_code].lines:
                    file.write(line.body + '\n')
            except:
                pass
            file.close()
            print("Saved gcode to " + path)
        bb = list(min_corner) + list(max_corner)
        info = 'Bounding Box:\n'
        info += '\tmin\tX: {0:.1f}\tY: {1:.1f}\tZ: {2:.1f}\n'.format(*bb)
        info += '\tmax\tX: {3:.1f}\tY: {4:.1f}\tZ: {5:.1f}\n'.format(*bb)
        info += 'Extruded Filament: ' + format(e, '.2f') + '\n'
        info += 'Extruded Volume: ' + format(e * pi * (props.filament / 2)**2,
                                             '.2f') + '\n'
        info += 'Printed Path Length: ' + format(path_length, '.2f') + '\n'
        info += 'Travel Length: ' + format(travel_length, '.2f')
        '''
        # animate
        if scene.animate:
            scene = bpy.context.scene
            try:
                param = (scene.frame_current - scene.frame_start)/(scene.frame_end - scene.frame_start)
            except:
                param = 1
            last_vert = max(int(param*len(printed_verts)),1)
            printed_verts = printed_verts[:last_vert]
            printed_edges = [e for e in printed_edges if e[0] < last_vert and e[1] < last_vert]
            travel_edges = [e for e in travel_edges if e[0] < last_vert and e[1] < last_vert]
        '''
        return {'FINISHED'}
Example #13
0
def tassellate(ob0, ob1, offset, zscale, gen_modifiers, com_modifiers, mode, scale_mode, rotation_mode, rand_seed, fill_mode, bool_vertex_group, bool_selection):
    random.seed(rand_seed)

    print(ob0.tissue_tessellate.offset)

    old_me0 = ob0.data
    if gen_modifiers:
        me0 = ob0.to_mesh(bpy.context.scene, apply_modifiers=True, settings = 'PREVIEW')
    else: me0 = ob0.data
    ob0.data = me0

    if com_modifiers:
        me1 = ob1.to_mesh(bpy.context.scene, apply_modifiers=True, settings = 'PREVIEW')
    else: me1 = ob1.data

    verts0 = me0.vertices

    n_verts = len(me1.vertices)
    n_edges = len(me1.edges)
    n_faces = len(me1.polygons)

    loc = ob1.location
    dim = ob1.dimensions
    scale = ob1.scale

    new_verts = []
    new_edges = []
    new_faces = []
    new_verts_np = np.array(())

    min = Vector((0,0,0))
    max = Vector((0,0,0))

    first = True

    for v in me1.vertices:
        vert = ( ob1.matrix_world * v.co )

        if vert[0] < min[0] or first:
            min[0] = vert[0]
        if vert[1] < min[1] or first:
            min[1] = vert[1]
        if vert[2] < min[2] or first:
            min[2] = vert[2]
        if vert[0] > max[0] or first:
            max[0] = vert[0]
        if vert[1] > max[1] or first:
            max[1] = vert[1]
        if vert[2] > max[2] or first:
            max[2] = vert[2]
        first = False

    bb = max-min

    verts1 = []

    for v in me1.vertices:
        if mode=="ADAPTIVE":
            vert = ( ob1.matrix_world * v.co ) - min
            vert[0] = vert[0] / bb[0]
            vert[1] = vert[1] / bb[1]
            vert[2] = (vert[2] + (-0.5 + offset*0.5)*bb[2])*zscale
        else:
            vert = v.co.xyz
            vert[2] *= zscale

        verts1.append(vert)

    # component vertices
    vs1 = np.array([v for v in verts1]).reshape(len(verts1),3,1)
    vx = vs1[:,0]
    vy = vs1[:,1]
    vz = vs1[:,2]

    # component polygons
    fs1 = [[i for i in p.vertices] for p in me1.polygons]
    new_faces = fs1[:]

    j = 0


    # active vertex group

    if bool_vertex_group:
        weight = []
        active_vertex_group = ob0.vertex_groups[ob0.vertex_groups.active_index]
        for v in me0.vertices:
            try:
                weight.append(active_vertex_group.weight(v.index))
            except:
                weight.append(0)



    if fill_mode == 'FAN':
        fan_verts = [v.co.to_tuple() for v in me0.vertices]
        fan_polygons = []
        selected_faces = []

        for p in me0.polygons:

            #if bool_selection and not p.select: continue

            fan_center = Vector((0,0,0))
            for v in p.vertices:
                fan_center += me0.vertices[v].co
            fan_center /= len(p.vertices)
            last_vert = len(fan_verts)
            fan_verts.append(fan_center.to_tuple())

            # vertex group
            if bool_vertex_group:
                center_weight = sum([weight[i] for i in p.vertices])/len(p.vertices)
                weight.append(center_weight)

            for i in range(len(p.vertices)):
                fan_polygons.append((p.vertices[i], p.vertices[(i+1)%len(p.vertices)], last_vert, last_vert))
                if bool_selection: selected_faces.append(p.select)

        #print(fan_verts)
        #print(fan_polygons)
        fan_me = bpy.data.meshes.new('Fan.Mesh')
        fan_me.from_pydata(tuple(fan_verts), [], tuple(fan_polygons))
        me0 = fan_me
        verts0 = me0.vertices
        for i in range(len(selected_faces)):
            fan_me.polygons[i].select = selected_faces[i]


    count = 0   # necessary for UV calculation


    for p in me0.polygons:
        if bool_selection and not p.select: continue


        # active vertex group
        '''
        ws0 = []
        active_vertex_group = ob0.vertex_groups[ob0.vertex_groups.active_index]
        for v in p.vertices:
            try:
                ws0.append(active_vertex_group.weight(v.index))
            except:
                ws0.append(0)
        '''


        #polygon vertices

        if rotation_mode == 'RANDOM':
            shifted_vertices = []
            n_poly_verts = len(p.vertices)
            rand = random.randint(0,n_poly_verts)
            for i in range(n_poly_verts):
                shifted_vertices.append(p.vertices[(i+rand)%n_poly_verts])
            vs0 = np.array([verts0[i].co for i in shifted_vertices])
            nvs0 = np.array([verts0[i].normal for i in shifted_vertices])

            # vertex weight
            if bool_vertex_group:
                ws0 = []
                for i in shifted_vertices:
                    try: ws0.append(weight[i])
                    except: ws0.append(0)
                ws0 = np.array(ws0)

        elif rotation_mode == 'UV' and len(ob0.data.uv_layers) > 0 and fill_mode != 'FAN':
            i = p.index

            v01 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count+1].uv)#/2
            if len(p.vertices) > 3: v32 = (me0.uv_layers.active.data[count+3].uv + me0.uv_layers.active.data[count+2].uv)#/2
            else: v32 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count+2].uv)
            v0132 = v32-v01
            v0132.normalize()

            v12 = (me0.uv_layers.active.data[count+1].uv + me0.uv_layers.active.data[count+2].uv)#/2
            if len(p.vertices) > 3: v03 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count+3].uv)#/2
            else: v03 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count].uv)#/2
            v1203 = v03 - v12
            v1203.normalize()

            vertUV = []
            dot1203 = v1203.x#.dot(Vector((1,0)))
            dot0132 = v0132.x#.dot(Vector((1,0)))
            if(abs(dot1203) < abs(dot0132)):
                if(dot0132 > 0): vertUV = p.vertices[1:] + p.vertices[:1]
                else: vertUV = p.vertices[3:] + p.vertices[:3]
            else:
                if(dot1203 < 0): vertUV = p.vertices[:]
                else: vertUV = p.vertices[2:] + p.vertices[:2]
            vs0 = np.array([verts0[i].co for i in vertUV])
            nvs0 = np.array([verts0[i].normal for i in vertUV])

            # vertex weight
            if bool_vertex_group:
                ws0 = []
                for i in vertUV:
                    try: ws0.append(weight[i])
                    except: ws0.append(0)
                ws0 = np.array(ws0)

            count += len(p.vertices)

        else:
            vs0 = np.array([verts0[i].co for i in p.vertices])
            nvs0 = np.array([verts0[i].normal for i in p.vertices])

            # vertex weight
            if bool_vertex_group:
                ws0 = []
                for i in p.vertices:
                    try: ws0.append(weight[i])
                    except: ws0.append(0)
                ws0 = np.array(ws0)

        # considering only 4 vertices
        vs0 = np.array((vs0[0], vs0[1], vs0[2], vs0[-1]))
        nvs0 = np.array((nvs0[0], nvs0[1], nvs0[2], nvs0[-1]))

        # remapped vertex coordinates
        v0 = vs0[0] + (vs0[1] -vs0[0])*vx
        v1 = vs0[3] + (vs0[2] -vs0[3])*vx
        v2 = v0 + (v1 - v0)*vy

        # remapped vertex normal
        nv0 = nvs0[0] + (nvs0[1] -nvs0[0])*vx
        nv1 = nvs0[3] + (nvs0[2] -nvs0[3])*vx
        nv2 = nv0 + (nv1 - nv0)*vy

        # vertex z to normal
        v3 = v2 + nv2*vz*(sqrt(p.area) if scale_mode == "ADAPTIVE" else 1)


        if bool_vertex_group:
            ws0 = np.array((ws0[0], ws0[1], ws0[2], ws0[-1]))
            # interpolate vertex weight
            w0 = ws0[0] + (ws0[1] -ws0[0])*vx
            w1 = ws0[3] + (ws0[2] -ws0[3])*vx
            w2 = w0 + (w1 - w0)*vy




        if j == 0:
            new_verts_np = v3
            if bool_vertex_group: new_vertex_group_np = w2
        else:
            new_verts_np = np.concatenate((new_verts_np, v3), axis=0)
            if bool_vertex_group: new_vertex_group_np = np.concatenate((new_vertex_group_np, w2), axis=0)
            for p in fs1: new_faces.append([i+n_verts*j for i in p])

        j+=1

    new_verts = new_verts_np.tolist()

    new_name = ob0.name + "_" + ob1.name
    new_me = bpy.data.meshes.new(new_name)
    new_me.from_pydata(new_verts, [], new_faces)
    #new_me.from_pydata(new_verts, new_edges, [])
    new_me.update()
    new_ob = bpy.data.objects.new("generated", new_me)

    # vertex group
    if bool_vertex_group:
        new_ob.vertex_groups.new("generator_group")
        for i in range(len(new_vertex_group_np)):
            new_ob.vertex_groups["generator_group"].add([i], new_vertex_group_np[i], "ADD")

    ob0.data = old_me0
    return new_ob
Example #14
0
def tassellate(ob0, ob1, offset, zscale, gen_modifiers, com_modifiers, mode, scale_mode, rotation_mode, rand_seed, fill_mode, bool_vertex_group, bool_selection, bool_shapekeys):
    random.seed(rand_seed)

    old_me0 = ob0.data      # store generator mesh
    if gen_modifiers:       # apply generator modifiers
        me0 = ob0.to_mesh(bpy.context.scene, apply_modifiers=True, settings = 'PREVIEW')
    else: me0 = ob0.data
    ob0.data = me0

    base_polygons = []

    # check if zero selected faces
    if bool_selection:
        for p in ob0.data.polygons:
            if p.select: base_polygons.append(p)
    else:
        base_polygons = ob0.data.polygons

    if len(base_polygons) == 0: return 0


    if com_modifiers:       # apply component modifiers
        me1 = ob1.to_mesh(bpy.context.scene, apply_modifiers=True, settings = 'PREVIEW')
    else: me1 = ob1.data

    verts0 = me0.vertices   # collect generator vertices

    # component statistics
    n_verts = len(me1.vertices)
    n_edges = len(me1.edges)
    n_faces = len(me1.polygons)

    # component transformations
    loc = ob1.location
    dim = ob1.dimensions
    scale = ob1.scale

    # create empty lists
    new_verts = []
    new_edges = []
    new_faces = []
    new_verts_np = np.array(())

    ### Component Bounding Box ###

    min = Vector((0,0,0))
    max = Vector((0,0,0))

    first = True

    for v in me1.vertices:
        vert = v.co#( ob1.matrix_world * v.co )

        if vert[0] < min[0] or first:
            min[0] = vert[0]
        if vert[1] < min[1] or first:
            min[1] = vert[1]
        if vert[2] < min[2] or first:
            min[2] = vert[2]
        if vert[0] > max[0] or first:
            max[0] = vert[0]
        if vert[1] > max[1] or first:
            max[1] = vert[1]
        if vert[2] > max[2] or first:
            max[2] = vert[2]
        first = False

    bb = max-min

    ### Component Bounding Box - END ###


    # adaptive XY

    verts1 = []

    for v in me1.vertices:
        if mode=="ADAPTIVE":
            vert = v.co - min#( ob1.matrix_world * v.co ) - min
            vert[0] = (vert[0] / bb[0] if bb[0] != 0 else 0.5)
            vert[1] = (vert[1] / bb[1] if bb[1] != 0 else 0.5)
            vert[2] = (vert[2] + (-0.5 + offset*0.5)*bb[2])*zscale
        else:
            vert = v.co.xyz
            vert[2] = (vert[2] - min[2] + (-0.5 + offset*0.5)*bb[2])*zscale

        verts1.append(vert)

    # component vertices
    vs1 = np.array([v for v in verts1]).reshape(len(verts1),3,1)
    vx = vs1[:,0]
    vy = vs1[:,1]
    vz = vs1[:,2]

    # component polygons
    fs1 = [[i for i in p.vertices] for p in me1.polygons]
    new_faces = fs1[:]

    # component edges
    es1 = [[i for i in e.vertices] for e in me1.edges if e.is_loose]
    new_edges = es1[:]
    
    j = 0




    ### SHAPE KEYS ###

    shapekeys = []

    if me1.shape_keys is not None and bool_shapekeys:
        if len(me1.shape_keys.key_blocks) > 1:

            # read active key
            active_key = ob1.active_shape_key_index
            if active_key == 0: active_key = 1

            for v in me1.shape_keys.key_blocks[active_key].data:
                if mode=="ADAPTIVE":
                    vert = ( ob1.matrix_world * v.co ) - min
                    vert[0] = vert[0] / bb[0]
                    vert[1] = vert[1] / bb[1]
                    vert[2] = (vert[2] + (-0.5 + offset*0.5)*bb[2])*zscale
                else:
                    vert = v.co.xyz
                    vert[2] = (vert[2] - min[2] + (-0.5 + offset*0.5)*bb[2])*zscale

                shapekeys.append(vert)

            # component vertices
            key1 = np.array([v for v in shapekeys]).reshape(len(shapekeys),3,1)
            vx_key = key1[:,0]
            vy_key = key1[:,1]
            vz_key = key1[:,2]

    ### SHAPE KEYS - END ###





    # active vertex group

    if bool_vertex_group:
        try:
            weight = []
            active_vertex_group = ob0.vertex_groups[ob0.vertex_groups.active_index]
            for v in me0.vertices:
                try:
                    weight.append(active_vertex_group.weight(v.index))
                except:
                    weight.append(0)
        except:
            bool_vertex_group = False



    if fill_mode == 'FAN':
        fan_verts = [v.co.to_tuple() for v in me0.vertices]
        fan_polygons = []
        selected_faces = []

        #for p in me0.polygons:
        for p in base_polygons:

            #if bool_selection and not p.select: continue

            fan_center = Vector((0,0,0))
            for v in p.vertices:
                fan_center += me0.vertices[v].co
            fan_center /= len(p.vertices)
            last_vert = len(fan_verts)
            fan_verts.append(fan_center.to_tuple())

            # vertex group
            if bool_vertex_group:
                center_weight = sum([weight[i] for i in p.vertices])/len(p.vertices)
                weight.append(center_weight)

            for i in range(len(p.vertices)):
                fan_polygons.append((p.vertices[i], p.vertices[(i+1)%len(p.vertices)], last_vert, last_vert))
                ###if bool_selection: selected_faces.append(p.select)

        fan_me = bpy.data.meshes.new('Fan.Mesh')
        fan_me.from_pydata(tuple(fan_verts), [], tuple(fan_polygons))
        me0 = fan_me
        verts0 = me0.vertices
        base_polygons = me0.polygons
        #for i in range(len(selected_faces)):
        #    fan_me.polygons[i].select = selected_faces[i]


    count = 0   # necessary for UV calculation

    ### TESSELLATION ###

    #for p in me0.polygons:
    for p in base_polygons:
        #if bool_selection and not p.select: continue


        # active vertex group
        '''
        ws0 = []
        active_vertex_group = ob0.vertex_groups[ob0.vertex_groups.active_index]
        for v in p.vertices:
            try:
                ws0.append(active_vertex_group.weight(v.index))
            except:
                ws0.append(0)
        '''


        #polygon vertices

        ### RANDOM ROTATION ###

        if rotation_mode == 'RANDOM':
            shifted_vertices = []
            n_poly_verts = len(p.vertices)
            rand = random.randint(0,n_poly_verts)
            for i in range(n_poly_verts):
                shifted_vertices.append(p.vertices[(i+rand)%n_poly_verts])
            vs0 = np.array([verts0[i].co for i in shifted_vertices])
            nvs0 = np.array([verts0[i].normal for i in shifted_vertices])

            # vertex weight
            if bool_vertex_group:
                ws0 = []
                for i in shifted_vertices:
                    try: ws0.append(weight[i])
                    except: ws0.append(0)
                ws0 = np.array(ws0)

        ### UV ROTATION ###

        elif rotation_mode == 'UV' and len(ob0.data.uv_layers) > 0 and fill_mode != 'FAN':
            i = p.index

            v01 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count+1].uv)#/2
            if len(p.vertices) > 3: v32 = (me0.uv_layers.active.data[count+3].uv + me0.uv_layers.active.data[count+2].uv)#/2
            else: v32 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count+2].uv)
            v0132 = v32-v01
            v0132.normalize()

            v12 = (me0.uv_layers.active.data[count+1].uv + me0.uv_layers.active.data[count+2].uv)#/2
            if len(p.vertices) > 3: v03 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count+3].uv)#/2
            else: v03 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count].uv)#/2
            v1203 = v03 - v12
            v1203.normalize()

            vertUV = []
            dot1203 = v1203.x#.dot(Vector((1,0)))
            dot0132 = v0132.x#.dot(Vector((1,0)))
            if(abs(dot1203) < abs(dot0132)):
                if(dot0132 > 0): vertUV = p.vertices[1:] + p.vertices[:1]
                else: vertUV = p.vertices[3:] + p.vertices[:3]
            else:
                if(dot1203 < 0): vertUV = p.vertices[:]
                else: vertUV = p.vertices[2:] + p.vertices[:2]
            vs0 = np.array([verts0[i].co for i in vertUV])
            nvs0 = np.array([verts0[i].normal for i in vertUV])

            # vertex weight
            if bool_vertex_group:
                ws0 = []
                for i in vertUV:
                    try: ws0.append(weight[i])
                    except: ws0.append(0)
                ws0 = np.array(ws0)

            count += len(p.vertices)

        ### DEFAULT ROTATION ###

        else:
            vs0 = np.array([verts0[i].co for i in p.vertices])
            nvs0 = np.array([verts0[i].normal for i in p.vertices])

            # vertex weight
            if bool_vertex_group:
                ws0 = []
                for i in p.vertices:
                    try: ws0.append(weight[i])
                    except: ws0.append(0)
                ws0 = np.array(ws0)

        ### INTERPOLATING ###

        # considering only 4 vertices
        vs0 = np.array((vs0[0], vs0[1], vs0[2], vs0[-1]))
        nvs0 = np.array((nvs0[0], nvs0[1], nvs0[2], nvs0[-1]))

        # remapped vertex coordinates
        v0 = vs0[0] + (vs0[1] -vs0[0])*vx
        v1 = vs0[3] + (vs0[2] -vs0[3])*vx
        v2 = v0 + (v1 - v0)*vy

        # remapped vertex normal
        nv0 = nvs0[0] + (nvs0[1] -nvs0[0])*vx
        nv1 = nvs0[3] + (nvs0[2] -nvs0[3])*vx
        nv2 = nv0 + (nv1 - nv0)*vy

        # vertex z to normal
        v3 = v2 + nv2*vz*(sqrt(p.area) if scale_mode == "ADAPTIVE" else 1)


        if bool_vertex_group:
            ws0 = np.array((ws0[0], ws0[1], ws0[2], ws0[-1]))
            # interpolate vertex weight
            w0 = ws0[0] + (ws0[1] -ws0[0])*vx
            w1 = ws0[3] + (ws0[2] -ws0[3])*vx
            w2 = w0 + (w1 - w0)*vy

            ### SHAPE KEYS ###

            if me1.shape_keys is not None and bool_shapekeys:

                # remapped vertex coordinates
                v0 = vs0[0] + (vs0[1] -vs0[0])*vx_key
                v1 = vs0[3] + (vs0[2] -vs0[3])*vx_key
                v2 = v0 + (v1 - v0)*vy_key

                # remapped vertex normal
                nv0 = nvs0[0] + (nvs0[1] -nvs0[0])*vx_key
                nv1 = nvs0[3] + (nvs0[2] -nvs0[3])*vx_key
                nv2 = nv0 + (nv1 - nv0)*vy_key

                # vertex z to normal
                v3_key = v2 + nv2*vz_key*(sqrt(p.area) if scale_mode == "ADAPTIVE" else 1)
                v3 = v3 + (v3_key - v3) * w2




        if j == 0:
            new_verts_np = v3
            if bool_vertex_group: new_vertex_group_np = w2
        else:
            new_verts_np = np.concatenate((new_verts_np, v3), axis=0)   # appending vertices
            if bool_vertex_group: new_vertex_group_np = np.concatenate((new_vertex_group_np, w2), axis=0)   # appending vertex group
            for p in fs1: new_faces.append([i+n_verts*j for i in p])    # appending faces
            for e in es1: new_edges.append([i+n_verts*j for i in e])    # appending edges

        j+=1

    new_verts = new_verts_np.tolist()

    new_name = ob0.name + "_" + ob1.name
    new_me = bpy.data.meshes.new(new_name)
    new_me.from_pydata(new_verts, new_edges, new_faces)
    #new_me.from_pydata(new_verts, new_edges, [])
    new_me.update(calc_edges=True)
    new_ob = bpy.data.objects.new("tessellate_temp", new_me)

    # vertex group
    if bool_vertex_group:
        new_ob.vertex_groups.new("generator_group")
        for i in range(len(new_vertex_group_np)):
            new_ob.vertex_groups["generator_group"].add([i], new_vertex_group_np[i], "ADD")

    ob0.data = old_me0
    return new_ob
Example #15
0
    def execute(self, context):
        log(
            'Export Realflow Particles (.bin)',
            0,
            LogStyles.MESSAGE,
        )
        log(
            'use_velocity: {}, use_size: {}, size: {}, use_uv: {}, uv_layer: "{}"'
            .format(
                self.use_velocity,
                self.use_size,
                self.size,
                self.use_uv,
                self.uv_layer,
            ),
            1,
        )

        o = context.active_object
        ps = o.particle_systems.active
        pset = ps.settings

        # no particles (number of particles set to zero) and no alive particles > kill export
        if (len(ps.particles) == 0):
            log(
                "particle system {} has no particles".format(ps.name),
                1,
                LogStyles.ERROR,
            )
            self.report(
                {'ERROR'},
                "particle system {} has no particles".format(ps.name),
            )
            return {'CANCELLED'}
        ok = False
        for p in ps.particles:
            if (p.alive_state == "ALIVE"):
                ok = True
                break
        if (not ok):
            log(
                "particle system {} has no 'ALIVE' particles".format(ps.name),
                1,
                LogStyles.ERROR,
            )
            self.report(
                {'ERROR'},
                "particle system {} has no 'ALIVE' particles".format(ps.name),
            )
            return {'CANCELLED'}

        mat = o.matrix_world.copy()
        mat.invert()

        locs = []
        vels = []
        sizes = []

        # location, velocity and size from alive particles
        for part in ps.particles:
            if (part.alive_state == "ALIVE"):
                l = part.location.copy()
                l = mat * l
                locs.append(l)
                if (self.use_velocity):
                    v = part.velocity.copy()
                    v = mat * v
                    vels.append(v)
                else:
                    vels.append(Vector((0.0, 0.0, 0.0)))
                # size per particle
                if (self.use_size):
                    sizes.append(part.size / 2)
                else:
                    sizes.append(self.size / 2)

        # transform
        # TODO: axis conversion is overly complicated, is it?
        ROTATE_X_90 = Matrix.Rotation(math.radians(90.0), 4, 'X')
        rfms = Matrix.Scale(1.0, 4)
        rfms[0][0] = -1.0
        rfmr = Matrix.Rotation(math.radians(-90.0), 4, 'Z')
        rfm = rfms * rfmr * ROTATE_X_90
        mry90 = Matrix.Rotation(math.radians(90.0), 4, 'Y')
        for i, l in enumerate(locs):
            locs[i] = Vector(l * rfm).to_tuple()
        if (self.use_velocity):
            for i, v in enumerate(vels):
                vels[i] = Vector(v * rfm).to_tuple()

        # particle uvs
        if (self.uv_layer is not "" and self.use_uv):
            uv_no = 0
            for i, uv in enumerate(o.data.uv_textures):
                if (self.uv_layer == uv.name):
                    uv_no = i
                    break

            uv_locs = tuple()

            if (len(ps.child_particles) > 0):
                # NOT TO DO: use bvhtree to make uvs for particles, like with hair - no way to get child particles locations = no uvs
                log(
                    "child particles uvs are not supported yet..",
                    1,
                    LogStyles.WARNING,
                )
                self.report({'WARNING'},
                            "child particles uvs are not supported yet..")
            else:
                # no child particles, use 'uv_on_emitter'
                nc0 = len(ps.particles)
                nc1 = len(ps.child_particles) - nc0
                uv_no = 0
                for i, uv in enumerate(o.data.uv_textures):
                    if (self.uv_layer == uv.name):
                        uv_no = i
                        break
                mod = None
                for m in o.modifiers:
                    if (m.type == 'PARTICLE_SYSTEM'):
                        if (m.particle_system == ps):
                            mod = m
                            break
                uv_locs = tuple()
                for i, p in enumerate(ps.particles):
                    co = ps.uv_on_emitter(
                        mod,
                        p,
                        particle_no=i,
                        uv_no=uv_no,
                    )
                    # (x, y, 0.0, )
                    t = co.to_tuple() + (0.0, )
                    uv_locs += (
                        t[0],
                        1.0 - t[1],
                        t[2],
                    )
                if (nc1 != 0):
                    ex = int(nc1 / nc0)
                for i in range(ex):
                    uv_locs += uv_locs
            has_uvs = True
        else:
            uv_locs = [0.0] * (len(ps.particles) * 3)
            if (self.use_uv):
                log(
                    "emitter has no UVs or no UV is selected to be used.. UVs will be exported, but set to (0.0, 0.0)",
                    1,
                    LogStyles.WARNING,
                )
                self.report({
                    'WARNING'
                }, "emitter has no UVs or no UV is selected to be used.. UVs will be exported, but set to (0.0, 0.0)"
                            )

        flip_xy = Matrix(((-1.0, 0.0, 0.0, 0.0), (0.0, -1.0, 0.0, 0.0),
                          (0.0, 0.0, 1.0, 0.0), (0.0, 0.0, 0.0, 1.0)))
        fv = Vector((-1.0, -1.0, 0.0))
        particles = []
        for i, ploc in enumerate(locs):
            # normal from velocity
            pnor = Vector(vels[i])
            pnor.normalize()
            uv = uv_locs[i * 3:(i * 3) + 3]
            uvv = Vector(uv).reflect(fv) * flip_xy
            uvt = (
                uvv.z,
                uvv.x,
                uvv.y,
            )
            particles.append((i, ) + tuple(ploc[:3]) + pnor.to_tuple() +
                             tuple(vels[i][:3]) + (sizes[i], ) + uvt, )

        # and now.. export!
        h, t = os.path.split(self.filepath)
        n, e = os.path.splitext(t)
        # remove frame number automaticaly added in ui
        n = n[:-6]

        cf = bpy.context.scene.frame_current
        prms = {
            'directory': bpy.path.abspath(h),
            'name': "{}".format(n),
            'frame': cf,
            'particles': particles,
            'fps': bpy.context.scene.render.fps,
            # blender's size is in fact diameter, but we need radius..
            'size': 1.0 if self.use_size else self.size / 2,
            'log_indent': 1,
        }
        rfbw = RFBinWriter(**prms)

        log(
            'done.',
            1,
        )

        return {'FINISHED'}
Example #16
0
    def load(self, context):
        pcv = context.object.point_cloud_visualizer
        p = os.path.abspath(bpy.path.abspath(pcv.filepath))
        if (not os.path.exists(p)):
            self.report({'WARNING'}, "File does not exist")
            return {'CANCELLED'}

        # load points
        i = BinPlyPointCloudInfo(p, True)
        c = i.creator
        # l = i.vertices
        # f = pcv.load_from
        # t = pcv.load_to
        # f, t = clamp(f, t, l)
        # pcv.load_from = f
        # pcv.load_to = t
        # points = BinPlyPointCloudReader(p, c, f, f + t, ).vertices
        points = BinPlyPointCloudReader(
            p,
            c,
            0,
            0,
        ).vertices

        rnd = random.Random()
        # rnd.seed(seed)
        random.shuffle(points, rnd.random)

        # process points
        vertices = []
        colors = []
        for i, p in enumerate(points):
            v = Vector(p[:3])
            vertices.extend(v.to_tuple())
            # ply from meshlab has also alpha value, throw it away..
            c = [v / 255 for v in p[6:9]]
            colors.extend(c)

        # make buffers
        length = len(points)
        vertex_buffer = bgl.Buffer(bgl.GL_FLOAT, len(vertices), vertices)
        color_buffer = bgl.Buffer(bgl.GL_FLOAT, len(colors), colors)

        o = context.object
        m = o.matrix_world
        matrix = []
        for v in m.transposed():
            matrix.extend(list(v.to_tuple()))
        matrix_buffer = bgl.Buffer(bgl.GL_FLOAT, len(matrix), matrix)

        d = PCVCache.new()
        u = str(uuid.uuid1())
        d['uuid'] = u
        d['path'] = pcv.filepath
        d['ready'] = True
        d['length'] = length
        d['vertex_buffer'] = vertex_buffer
        d['color_buffer'] = color_buffer
        d['matrix'] = m
        d['matrix_buffer'] = matrix_buffer
        d['object'] = o
        d['display_percent'] = pcv.display_percent
        PCVCache.add(d)

        pcv.uuid = u
Example #17
0
def create_atom(pos: Vector, atom_size):
    bpy.ops.mesh.primitive_uv_sphere_add(location=pos.to_tuple(),
                                         size=atom_size,
                                         segments=QUALITY)
    return bpy.context.object
Example #18
0
    def execute(self, context):
        obj = context.selected_objects[
            0]  #Selected object, should only be Scene Root
        export_info = {  #Array of empty nodes
            'nodes': {}
        }
        obj_models = get_nodes_by_empty(
            obj, export_info['nodes'])  #Select entire hierarchy

        if len(obj_models):  #If meshes even exist
            bb_min = Vector((10000.0, 10000.0, 10000.0))  #Bounding box min
            bb_max = Vector((-10000.0, -10000.0, -10000.0))  #Bounding box max

            export_info['exporter_version'] = '%s.%s.%s' % bl_info[
                'version']  #For .temp_model, save version of blender primitives exporter

            for obj_model in obj_models:
                if not obj_model.data.uv_layers:  #If model does not have a uv layer
                    self.report({'ERROR'}, 'mesh.uv_layers is None')
                    if self.debug_mode:
                        print('[Export Error] mesh.uv_layers is None')
                    return {'CANCELLED'}

                if not len(obj_model.data.materials
                           ):  #If model does not have a material
                    self.report({'ERROR'}, 'mesh.materials is None')
                    if self.debug_mode:
                        print('[Export Error] mesh.materials is None')
                    return {'CANCELLED'}
                #Get min and max bounding box
                bb_min.x = min(
                    obj_model.location.x + obj_model.bound_box[0][0], bb_min.x)
                bb_min.z = min(
                    obj_model.location.y + obj_model.bound_box[0][1], bb_min.z)
                bb_min.y = min(
                    obj_model.location.z + obj_model.bound_box[0][2], bb_min.y)

                bb_max.x = max(
                    obj_model.location.x + obj_model.bound_box[6][0], bb_max.x)
                bb_max.z = max(
                    obj_model.location.y + obj_model.bound_box[6][1], bb_max.z)
                bb_max.y = max(
                    obj_model.location.z + obj_model.bound_box[6][2], bb_max.y)
            #Save bounding box dimensions
            export_info['bb_min'] = bb_min.to_tuple()
            export_info['bb_max'] = bb_max.to_tuple()

            from .export_bw_primitives import BigWorldModelExporter

            try:
                bw_exporter = BigWorldModelExporter()
                bw_exporter.export(obj_models, self.filepath, export_info,
                                   self.debug_mode)
            except:
                self.report({'ERROR'}, 'Error in import %s!' %
                            os.path.basename(self.filepath))
                import traceback
                traceback.print_exc()
                return {'CANCELLED'}
        print('=' * 48)  #Divider
        print('[Export Info] Export %s' %
              os.path.basename(self.filepath))  #Filename info
        return {'FINISHED'}
Example #19
0
def tassellate(ob0, ob1, offset, zscale, gen_modifiers, com_modifiers, mode,
               scale_mode, randomize, rand_seed, fill_mode):
    random.seed(rand_seed)

    if gen_modifiers:
        me0 = ob0.to_mesh(bpy.context.scene,
                          apply_modifiers=True,
                          settings='PREVIEW')
    else:
        me0 = ob0.data

    if com_modifiers:
        me1 = ob1.to_mesh(bpy.context.scene,
                          apply_modifiers=True,
                          settings='PREVIEW')
    else:
        me1 = ob1.data

    verts0 = me0.vertices

    n_verts = len(me1.vertices)
    n_edges = len(me1.edges)
    n_faces = len(me1.polygons)

    loc = ob1.location
    dim = ob1.dimensions
    scale = ob1.scale

    new_verts = []
    new_edges = []
    new_faces = []
    new_verts_np = np.array(())

    min = Vector((0, 0, 0))
    max = Vector((0, 0, 0))

    first = True

    for v in me1.vertices:
        vert = (ob1.matrix_world * v.co)

        if vert[0] < min[0] or first:
            min[0] = vert[0]
        if vert[1] < min[1] or first:
            min[1] = vert[1]
        if vert[2] < min[2] or first:
            min[2] = vert[2]
        if vert[0] > max[0] or first:
            max[0] = vert[0]
        if vert[1] > max[1] or first:
            max[1] = vert[1]
        if vert[2] > max[2] or first:
            max[2] = vert[2]
        first = False

    bb = max - min

    verts1 = []

    for v in me1.vertices:
        if mode == "ADAPTIVE":
            vert = (ob1.matrix_world * v.co) - min
            vert[0] = vert[0] / bb[0]
            vert[1] = vert[1] / bb[1]
            vert[2] = (vert[2] + (-0.5 + offset * 0.5) * bb[2]) * zscale
        else:
            vert = v.co.xyz
            vert[2] *= zscale

        verts1.append(vert)

    # component vertices
    vs1 = np.array([v for v in verts1]).reshape(len(verts1), 3, 1)
    vx = vs1[:, 0]
    vy = vs1[:, 1]
    vz = vs1[:, 2]

    # component polygons
    fs1 = [[i for i in p.vertices] for p in me1.polygons]
    new_faces = fs1[:]

    j = 0

    if fill_mode == 'FAN':
        fan_verts = [v.co.to_tuple() for v in me0.vertices]
        fan_polygons = []
        for p in me0.polygons:
            fan_center = Vector((0, 0, 0))
            for v in p.vertices:
                fan_center += me0.vertices[v].co
            fan_center /= len(p.vertices)
            last_vert = len(fan_verts)
            fan_verts.append(fan_center.to_tuple())
            for i in range(len(p.vertices)):
                fan_polygons.append(
                    (p.vertices[i], p.vertices[(i + 1) % len(p.vertices)],
                     last_vert, last_vert))
        #print(fan_verts)
        #print(fan_polygons)
        fan_me = bpy.data.meshes.new('Fan.Mesh')
        fan_me.from_pydata(tuple(fan_verts), [], tuple(fan_polygons))
        me0 = fan_me
        verts0 = me0.vertices

    for p in me0.polygons:

        #polygon vertices

        if randomize:
            shifted_vertices = []
            n_poly_verts = len(p.vertices)
            rand = random.randint(0, n_poly_verts)
            for i in range(n_poly_verts):
                shifted_vertices.append(p.vertices[(i + rand) % n_poly_verts])
            vs0 = np.array([verts0[i].co for i in shifted_vertices])
            nvs0 = np.array([verts0[i].normal for i in shifted_vertices])
        else:
            vs0 = np.array([verts0[i].co for i in p.vertices])
            nvs0 = np.array([verts0[i].normal for i in p.vertices])

        vs0 = np.array((vs0[0], vs0[1], vs0[2], vs0[-1]))
        #polygon normals

        nvs0 = np.array((nvs0[0], nvs0[1], nvs0[2], nvs0[-1]))

        v0 = vs0[0] + (vs0[1] - vs0[0]) * vx
        v1 = vs0[3] + (vs0[2] - vs0[3]) * vx
        v2 = v0 + (v1 - v0) * vy

        nv0 = nvs0[0] + (nvs0[1] - nvs0[0]) * vx
        nv1 = nvs0[3] + (nvs0[2] - nvs0[3]) * vx
        nv2 = nv0 + (nv1 - nv0) * vy

        v3 = v2 + nv2 * vz * (sqrt(p.area) if scale_mode == "ADAPTIVE" else 1)

        if j == 0: new_verts_np = v3
        else:
            new_verts_np = np.concatenate((new_verts_np, v3), axis=0)
            for p in fs1:
                new_faces.append([i + n_verts * j for i in p])

        j += 1

    new_verts = new_verts_np.tolist()

    new_name = ob0.name + "_" + ob1.name
    new_me = bpy.data.meshes.new(new_name)
    new_me.from_pydata(new_verts, [], new_faces)
    #new_me.from_pydata(new_verts, new_edges, [])
    new_me.update()

    return new_me
Example #20
0
def tassellate(ob0, ob1, offset, zscale, gen_modifiers, com_modifiers, mode, scale_mode, rotation_mode, rand_seed, fill_mode):
    random.seed(rand_seed)

    print(ob0.tissue_tessellate.offset)

    if gen_modifiers:
        me0 = ob0.to_mesh(bpy.context.scene, apply_modifiers=True, settings = 'PREVIEW')
    else: me0 = ob0.data

    if com_modifiers:
        me1 = ob1.to_mesh(bpy.context.scene, apply_modifiers=True, settings = 'PREVIEW')
    else: me1 = ob1.data

    verts0 = me0.vertices

    n_verts = len(me1.vertices)
    n_edges = len(me1.edges)
    n_faces = len(me1.polygons)

    loc = ob1.location
    dim = ob1.dimensions
    scale = ob1.scale

    new_verts = []
    new_edges = []
    new_faces = []
    new_verts_np = np.array(())

    min = Vector((0,0,0))
    max = Vector((0,0,0))

    first = True

    for v in me1.vertices:
        vert = ( ob1.matrix_world * v.co )

        if vert[0] < min[0] or first:
            min[0] = vert[0]
        if vert[1] < min[1] or first:
            min[1] = vert[1]
        if vert[2] < min[2] or first:
            min[2] = vert[2]
        if vert[0] > max[0] or first:
            max[0] = vert[0]
        if vert[1] > max[1] or first:
            max[1] = vert[1]
        if vert[2] > max[2] or first:
            max[2] = vert[2]
        first = False

    bb = max-min

    verts1 = []

    for v in me1.vertices:
        if mode=="ADAPTIVE":
            vert = ( ob1.matrix_world * v.co ) - min
            vert[0] = vert[0] / bb[0]
            vert[1] = vert[1] / bb[1]
            vert[2] = (vert[2] + (-0.5 + offset*0.5)*bb[2])*zscale
        else:
            vert = v.co.xyz
            vert[2] *= zscale

        verts1.append(vert)

    # component vertices
    vs1 = np.array([v for v in verts1]).reshape(len(verts1),3,1)
    vx = vs1[:,0]
    vy = vs1[:,1]
    vz = vs1[:,2]

    # component polygons
    fs1 = [[i for i in p.vertices] for p in me1.polygons]
    new_faces = fs1[:]

    j = 0

    if fill_mode == 'FAN':
        fan_verts = [v.co.to_tuple() for v in me0.vertices]
        fan_polygons = []
        for p in me0.polygons:
            fan_center = Vector((0,0,0))
            for v in p.vertices:
                fan_center += me0.vertices[v].co
            fan_center /= len(p.vertices)
            last_vert = len(fan_verts)
            fan_verts.append(fan_center.to_tuple())
            for i in range(len(p.vertices)):
                fan_polygons.append((p.vertices[i], p.vertices[(i+1)%len(p.vertices)], last_vert, last_vert))
        #print(fan_verts)
        #print(fan_polygons)
        fan_me = bpy.data.meshes.new('Fan.Mesh')
        fan_me.from_pydata(tuple(fan_verts), [], tuple(fan_polygons))
        me0 = fan_me
        verts0 = me0.vertices


    count = 0   # necessary for UV calculation
    for p in me0.polygons:

        #polygon vertices

        if rotation_mode == 'RANDOM':
            shifted_vertices = []
            n_poly_verts = len(p.vertices)
            rand = random.randint(0,n_poly_verts)
            for i in range(n_poly_verts):
                shifted_vertices.append(p.vertices[(i+rand)%n_poly_verts])
            vs0 = np.array([verts0[i].co for i in shifted_vertices])
            nvs0 = np.array([verts0[i].normal for i in shifted_vertices])
        elif rotation_mode == 'UV' and len(ob0.data.uv_layers) > 0 and fill_mode != 'FAN':
            i = p.index

            v01 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count+1].uv)#/2
            if len(p.vertices) > 3: v32 = (me0.uv_layers.active.data[count+3].uv + me0.uv_layers.active.data[count+2].uv)#/2
            else: v32 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count+2].uv)
            v0132 = v32-v01
            v0132.normalize()

            v12 = (me0.uv_layers.active.data[count+1].uv + me0.uv_layers.active.data[count+2].uv)#/2
            if len(p.vertices) > 3: v03 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count+3].uv)#/2
            else: v03 = (me0.uv_layers.active.data[count].uv + me0.uv_layers.active.data[count].uv)#/2
            v1203 = v03 - v12
            v1203.normalize()

            vertUV = []
            dot1203 = v1203.x#.dot(Vector((1,0)))
            dot0132 = v0132.x#.dot(Vector((1,0)))
            if(abs(dot1203) < abs(dot0132)):
                if(dot0132 > 0): vertUV = p.vertices[1:] + p.vertices[:1]
                else: vertUV = p.vertices[3:] + p.vertices[:3]
            else:
                if(dot1203 < 0): vertUV = p.vertices[:]
                else: vertUV = p.vertices[2:] + p.vertices[:2]
            vs0 = np.array([verts0[i].co for i in vertUV])
            nvs0 = np.array([verts0[i].normal for i in vertUV])

            count += len(p.vertices)

        else:
            vs0 = np.array([verts0[i].co for i in p.vertices])
            nvs0 = np.array([verts0[i].normal for i in p.vertices])


        vs0 = np.array((vs0[0], vs0[1], vs0[2], vs0[-1]))
        #polygon normals

        nvs0 = np.array((nvs0[0], nvs0[1], nvs0[2], nvs0[-1]))

        v0 = vs0[0] + (vs0[1] -vs0[0])*vx
        v1 = vs0[3] + (vs0[2] -vs0[3])*vx
        v2 = v0 + (v1 - v0)*vy

        nv0 = nvs0[0] + (nvs0[1] -nvs0[0])*vx
        nv1 = nvs0[3] + (nvs0[2] -nvs0[3])*vx
        nv2 = nv0 + (nv1 - nv0)*vy

        v3 = v2 + nv2*vz*(sqrt(p.area) if scale_mode == "ADAPTIVE" else 1)

        if j == 0: new_verts_np = v3
        else:
            new_verts_np = np.concatenate((new_verts_np, v3), axis=0)
            for p in fs1: new_faces.append([i+n_verts*j for i in p])

        j+=1

    new_verts = new_verts_np.tolist()

    new_name = ob0.name + "_" + ob1.name
    new_me = bpy.data.meshes.new(new_name)
    new_me.from_pydata(new_verts, [], new_faces)
    #new_me.from_pydata(new_verts, new_edges, [])
    new_me.update()

    return new_me