def build_tree(self): tree = KDTree(self.nmax) vnum = 0 for i,v in enumerate(self.obj.data.vertices): tree.insert(v.co,i) vnum += 1 tree.balance() self.tree = tree self.vnum = vnum return
def make_blobs(context, gridob, groundob, samples2D, display_radius): blob_group_clear(context) blobs = [] imat = groundob.matrix_world.inverted() blobtree = KDTree(len(gridob.data.vertices)) for i, v in enumerate(gridob.data.vertices): co = gridob.matrix_world * v.co # note: only using 2D coordinates, otherwise weights get distorted by z offset blobtree.insert((co[0], co[1], 0.0), i) blobtree.balance() for v in gridob.data.vertices: co = gridob.matrix_world * v.co ok, loc, nor, poly_index = project_on_ground(groundob, co) blobs.append(Blob(loc, nor, poly_index) if ok else None) with progress.ProgressContext("Grouping Samples", 0, len(samples2D)): mpolys = groundob.data.polygons mverts = groundob.data.vertices for xy in samples2D: progress.progress_add(1) # note: use only 2D coordinates for weighting, z component should be 0 index = assign_blob(blobtree, (xy[0], xy[1], 0.0), nor) if index < 0: continue blob = blobs[index] if blob is None: continue # project samples onto the ground object ok, sloc, snor, spoly = project_on_ground(groundob, xy[0:2]+(0,)) if not ok: continue # calculate barycentric vertex weights on the poly poly = mpolys[spoly] sverts = list(poly.vertices) # note: coordinate space has to be consistent, use sloc in object space sweights = poly_3d_calc(tuple(mverts[i].co for i in sverts), imat * sloc) blob.add_sample(sloc, snor, spoly, sverts, sweights) blobs_to_customprops(groundob.meadow, blobs) make_blob_visualizer(context, groundob, blobs, display_radius, hide=True)
def __init__(self, guide, ground, scale): self.GUIDE_STRENGTH = 1.0 * scale self.TURBULENCE_FREQUENCY = 10 * scale self.TURBULENCE_STRENGTH = 1.0 * scale self.AVOID_THRESHOLD = 0.01 * scale self.AVOID_STRENGTH = 0.2 * scale self.frame = 0 self.particles = [] self.guide = guide # self.vertex_distance = (self.guide.data.vertices[0].co - self.guide.data.vertices[1].co).length_squared self.guide_tree = KDTree(len(self.guide.data.vertices)) for v in self.guide.data.vertices: self.guide_tree.insert(v.co, v.index) self.guide_tree.balance() self.ground = ground self.scale = scale # bpy.ops.mesh.primitive_ico_sphere_add(location=(0,0,0), size=0.01) # self.instance_obj = bpy.context.object # self.instance_obj = bpy.data.objects['Fleche'] self.instance_obj = bpy.data.objects[bpy.context.scene.ant_instance] self.instance_mesh = self.instance_obj.data
def __build_src_kd(self): mesh = self.decimated.data size = len(mesh.vertices) self.src_kd = KDTree(size) for i, v in enumerate(mesh.vertices): self.src_kd.insert(v.co, i) self.src_kd.balance()
def build_tree(self): tree = KDTree(self.nmax) vnum = 0 for i,v in enumerate(self.obj.data.vertices): tree.insert(v.co,i) vnum += 1 #co,index,dist = kd.find(co) #co,index,dist = kd.find(co,10) #co, index, dist = kd.find_range(co,0.5) tree.balance() self.tree = tree self.vnum = vnum return
def connect_verts(bm, z_idx, v1, verts2_bm, connections, max_rho): tree = KDTree(len(verts2_bm)) for i, v2 in enumerate(verts2_bm): tree.insert(v2.co, i) tree.balance() for co, i, dist in tree.find_n(v1.co, connections): if dist <= max_rho: v2 = verts2_bm[i] if bm.edges.get((v1, v2)) is None: bm.edges.new((v1, v2)) bm.edges.ensure_lookup_table()
def _init(self): self._faces_by_vertex = defaultdict(set) self._faces_by_edge = defaultdict(set) self._edges_by_vertex = defaultdict(set) for face in self.solid.Faces: for vertex in face.Vertexes: self._faces_by_vertex[SvSolidTopology.Item(vertex)].add( SvSolidTopology.Item(face)) for edge in face.Edges: self._faces_by_edge[SvSolidTopology.Item(edge)].add( SvSolidTopology.Item(face)) for edge in self.solid.Edges: for vertex in edge.Vertexes: self._edges_by_vertex[SvSolidTopology.Item(vertex)].add( SvSolidTopology.Item(edge)) self._tree = KDTree(len(self.solid.Vertexes)) for i, vertex in enumerate(self.solid.Vertexes): co = (vertex.X, vertex.Y, vertex.Z) self._tree.insert(co, i) self._tree.balance()
def gen(seed, num): random.seed(seed) data = [] tree = KDTree(0) tree.balance() for i in range(num): best = best_candidate(num_candidates, tree) if not best: break yield best data.append((best[0], best[1], 0.0)) tree = KDTree(len(data)) for i, p in enumerate(data): tree.insert(p, i) tree.balance()
class Tile3DFinder: def __init__(self, objects=None): self.cached = {} self.objects = objects or [ c for c in t3d.root.children if c.layers[t3d.layer] ] size = len(self.objects) self.kd = KDTree(size) for i, obj in enumerate(self.objects): self.kd.insert(obj.pos, i) self.kd.balance() def get_tiles_at(self, pos): vec = pos.copy().freeze() if vec in self.cached: return self.cached[vec] else: objs = [ self.objects[index] for pos, index, dist in self.kd.find_range(pos, TOLERANCE) ] self.cached[vec] = objs return objs
def build_kdtree_from_verts(verts): # Create a kd-tree from verts size = len(verts) kd = KDTree(size) for i, vtx in enumerate(verts): # exclude hidden geometry if not vtx.hide: kd.insert(vtx.co, i) kd.balance() return kd
def set_gt_points(cls, gt_points: List[Vector] = None) -> None: """Set the ground truth point cloud. Automatically creates the KDTree to speed up point cloud operations. Keyword Arguments: gt_points {List[Vector]} -- ground truth point cloud. If {None} both the point cloud and the KDTree are cleared. (default: {None}) """ cls.unload_deleted() # cls.gt_points = gt_points if gt_points is not None: # build KDTree for target point cloud to speed up the nearest neighbor search cls.gt_kdtree = KDTree(len(gt_points)) for i, v in enumerate(gt_points): cls.gt_kdtree.insert(v, i) cls.gt_kdtree.balance()
def step( self, speed, ): new_tree = KDTree(len(self.particles)) self.draw_obj.commands.clear() for id, particle in enumerate(self.particles): particle.step(speed) particle.draw() new_tree.insert(particle.location, id) new_tree.balance() self.kd_tree = new_tree
def _execute(self, context): ob = context.object verts = ob.data.vertices kd = KDTree(len(verts)) for i, v in enumerate(verts): kd.insert(ob.matrix_world @ v.co, i) kd.balance() for o in context.selected_objects: if ob is o or o.type != 'MESH': continue for v in o.data.vertices: nearest = kd.find_range(o.matrix_world @ v.co, self.dist) for _, idx, _ in nearest: verts[idx].select = True
class ParticleManager: _particle_types = {} @classmethod def register_particle_type(cls, type, name): cls._particle_types[name] = type @classmethod def unregister_particle_type(cls, name): del cls._particle_types[name] def __init__(self, context): self.obj = context.active_object self.particles = [] self.edges = [] self.draw_layer = grease_draw.StrokeLayer(context) self.kd_tree = None def create_particle(self, type, location, radius=0.03): if type in self._particle_types: p = self._particle_types[type](radius, location, self) self.particles.append(p) return p else: raise KeyError("No such particle registered: %s" % type) def build_kd_tree(self): self.kd_tree = KDTree(len(self.particles)) for index, particle in enumerate(self.particles): self.kd_tree.insert(particle.location, index) self.kd_tree.balance() def nearest_n_particles(self, location, n): for location, index, distance in self.kd_tree.find_n(location, n): particle = self.particles[index] yield particle, distance def nearest_n_tag_particles(self, location, n, tag): def tag_filter(i): return self.particles[i].tag is tag for location, index, distance in self.kd_tree.find_n( location, n, filter=tag_filter): particle = self.particles[index] yield particle, distance def step(self, speed): for particle in self.particles: particle.step(speed) def sample_obj(self, location): return self.obj.closest_point_on_mesh(location)
def init(): nonlocal tree, total_faces, deleted_faces, face_index bm.faces.ensure_lookup_table() # tree = BVHTree.FromObject(ob, context.scene) # tree = BVHTree.FromBMesh(bm, epsilon=0.00001) tree = KDTree(len(bm.faces)) for i, face in enumerate(bm.faces): tree.insert(face.calc_center_median(), i) tree.balance() total_faces = len(bm.faces) deleted_faces = [False] * total_faces face_index = 0
def my_handler(scene): from_name = 'tire' to_name = 'tire.001' tgt_name = 'tire.002' brush_name = 'Cube' from_obj = scene.objects[from_name] to_obj = scene.objects[to_name] tgt_obj = scene.objects[tgt_name] brush_obj = scene.objects[brush_name] from_mesh = from_obj.data to_mesh = to_obj.data tgt_mesh = tgt_obj.data brush_mesh = brush_obj.data tgt_kd = KDTree(len(tgt_mesh.vertices)) for i in range(len(tgt_mesh.vertices)): co = tgt_obj.matrix_world * tgt_mesh.vertices[i].co tgt_kd.insert(co, i) tgt_kd.balance() # return tgt_kd min_x = float('inf') max_x = float('-inf') # brush_mesh.vertices[0].co.x for i in range(len(brush_mesh.vertices)): co = brush_mesh.vertices[i].co * brush_obj.matrix_world max_x = max(co.x, max_x) min_x = min(co.x, min_x) radius = (max_x - min_x) / 2 # raise ValueError('min_x: %f, max_x: %f, radius: %f' % (min_x, max_x, radius)) pts = tgt_kd.find_range(brush_obj.location, radius) # raise ValueError('len(pts): %d' % len(pts)) for (co, idx, dist) in pts: tgt_obj.vertex_groups[0].add([idx], 1.0, 'REPLACE') for i in range(len(from_mesh.vertices)): t = tgt_obj.vertex_groups[0].weight(i) co = from_mesh.vertices[i].co * (1 - t) + \ to_mesh.vertices[i].co * (t) no = from_mesh.vertices[i].normal * (1 - t) + \ to_mesh.vertices[i].normal * (t) tgt_mesh.vertices[i].co = co tgt_mesh.vertices[i].normal = no scene.update()
def make_blobs(context, gridob, groundob, samples2D, display_radius): blob_group_clear(context) blobs = [] imat = groundob.matrix_world.inverted() blobtree = KDTree(len(gridob.data.vertices)) for i, v in enumerate(gridob.data.vertices): co = gridob.matrix_world * v.co # note: only using 2D coordinates, otherwise weights get distorted by z offset blobtree.insert((co[0], co[1], 0.0), i) blobtree.balance() for v in gridob.data.vertices: co = gridob.matrix_world * v.co ok, loc, nor, poly_index = project_on_ground(groundob, co) blobs.append(Blob(loc, nor, poly_index) if ok else None) with progress.ProgressContext("Grouping Samples", 0, len(samples2D)): mpolys = groundob.data.polygons mverts = groundob.data.vertices for xy in samples2D: progress.progress_add(1) # note: use only 2D coordinates for weighting, z component should be 0 index = assign_blob(blobtree, (xy[0], xy[1], 0.0), nor) if index < 0: continue blob = blobs[index] if blob is None: continue # project samples onto the ground object ok, sloc, snor, spoly = project_on_ground(groundob, xy[0:2] + (0, )) if not ok: continue # calculate barycentric vertex weights on the poly poly = mpolys[spoly] sverts = list(poly.vertices) # note: coordinate space has to be consistent, use sloc in object space sweights = poly_3d_calc(tuple(mverts[i].co for i in sverts), imat * sloc) blob.add_sample(sloc, snor, spoly, sverts, sweights) blobs_to_customprops(groundob.meadow, blobs) make_blob_visualizer(context, groundob, blobs, display_radius, hide=True)
def unique_points(points, eps=1e-4): kdt = KDTree(len(points)) for i, p in enumerate(points): kdt.insert(p, i) kdt.balance() unique = [] repeating = [] mask = [] for p in points: found = kdt.find_n(p, 2) if len(found) > 1: loc, idx, distance = found[1] ok = distance > eps mask.append(ok) if ok: unique.append(p) else: repeating.append(p) return mask, unique, repeating
def init_guess(curve, points_from, samples=50): u_min, u_max = curve.get_u_bounds() us = np.linspace(u_min, u_max, num=samples) points = curve.evaluate_array(us).tolist() #print("P:", points) kdt = KDTree(len(us)) for i, v in enumerate(points): kdt.insert(v, i) kdt.balance() us_out = [] nearest_out = [] for point_from in points_from: nearest, i, distance = kdt.find(point_from) us_out.append(us[i]) nearest_out.append(tuple(nearest)) return us_out, nearest_out
def my_handler(scene): from_name='tire' to_name='tire.001' tgt_name='tire.002' brush_name = 'Cube' from_obj=scene.objects[from_name] to_obj=scene.objects[to_name] tgt_obj=scene.objects[tgt_name] brush_obj=scene.objects[brush_name] from_mesh=from_obj.data to_mesh=to_obj.data tgt_mesh=tgt_obj.data brush_mesh=brush_obj.data tgt_kd = KDTree(len(tgt_mesh.vertices)) for i in range(len(tgt_mesh.vertices)): co = tgt_obj.matrix_world * tgt_mesh.vertices[i].co tgt_kd.insert(co, i) tgt_kd.balance() # return tgt_kd min_x = float('inf') max_x = float('-inf') # brush_mesh.vertices[0].co.x for i in range(len(brush_mesh.vertices)): co = brush_mesh.vertices[i].co * brush_obj.matrix_world max_x = max(co.x, max_x) min_x = min(co.x, min_x) radius = (max_x - min_x) / 2 # raise ValueError('min_x: %f, max_x: %f, radius: %f' % (min_x, max_x, radius)) pts = tgt_kd.find_range(brush_obj.location, radius) # raise ValueError('len(pts): %d' % len(pts)) for (co, idx, dist) in pts: tgt_obj.vertex_groups[0].add([idx], 1.0, 'REPLACE') for i in range(len(from_mesh.vertices)): t = tgt_obj.vertex_groups[0].weight(i) co = from_mesh.vertices[i].co * (1 - t) + \ to_mesh.vertices[i].co * (t) no = from_mesh.vertices[i].normal * (1 - t) + \ to_mesh.vertices[i].normal * (t) tgt_mesh.vertices[i].co = co tgt_mesh.vertices[i].normal = no scene.update()
def to_point(self, amplitude, coefficient, vertex, centers, direction): vertex = Vector(vertex) n = len(centers) if self.point_mode == 'AVG' or n <= 1: vectors = [] for center in centers: vector = Vector(center) - vertex vector = self.falloff(amplitude, coefficient, vector.length) * vector.normalized() vectors.append(vector) result = get_avg_vector(vectors) return result.length, result.normalized() else: kdt = KDTree(n) for i, center in enumerate(centers): kdt.insert(Vector(center), i) kdt.balance() nearest_co, nearest_idx, nearest_distance = kdt.find(vertex) vector = nearest_co - vertex coeff = self.falloff(amplitude, coefficient, nearest_distance) return coeff, vector.normalized()
def invoke(self, context, event): self.ob = context.active_object.data self.bm = bmesh.new() self.bm.from_mesh(self.ob) self.bm.verts.ensure_lookup_table() links = [] for vert in self.bm.verts: l = [] links.append(l) for v in n_ring(vert, 100): l.append(v.index) immediate_edges = [len(vert.link_edges) for vert in self.bm.verts] bmesh.ops.triangulate(self.bm, faces=self.bm.faces) self.bm.verts.ensure_lookup_table() co = [tuple(v.co) for v in self.bm.verts] t = [tuple(v.index for v in f.verts) for f in self.bm.faces] kd = KDTree(len(self.bm.verts)) for vert in self.bm.verts: kd.insert(vert.co, vert.index) kd.balance() x_mirr_table = [kd.find((vert.co[0] * -1, vert.co[1], vert.co[2]))[1] for vert in self.bm.verts] self.engine = softwrap_core.ShapeEngine(co, t, links, co, t, immediate_edges, x_mirr_table) self.engine.random_co() self.engine.add_pin(co=(10, 0, 0), vert_index=0, stiffness=50, twisty=False, x_mirr=True) context.window_manager.modal_handler_add(self) return {"RUNNING_MODAL"}
def spread_step(self): count = 0 new_particles = [] self.draw_obj.commands.clear() for particle in self.particles: new_particles += particle.spread() count += len(new_particles) for particle in self.particles: if not particle.tag == "REMOVE": new_particles.append(particle) self.particles = new_particles new_tree = KDTree(len(self.particles)) for id, particle in enumerate(self.particles): particle.draw() new_tree.insert(particle.location, id) new_tree.balance() self.kd_tree = new_tree return count
def execute(self, vertices, clusters, connections, minDistance, maxDistance): minDistance = max(0, minDistance) maxDistance = max(minDistance, maxDistance) verticesAmount = len(vertices) kdTree = KDTree(verticesAmount) for i, vector in enumerate(vertices): kdTree.insert(vector, i) kdTree.balance() edges = [] for searchIndex in range(min(verticesAmount, clusters)): added = 0 for (vector, foundIndex, distance) in kdTree.find_range(vertices[searchIndex], maxDistance): if searchIndex != foundIndex and distance > minDistance: if added >= connections: break if foundIndex > searchIndex: edge = (searchIndex, foundIndex) else: edge = (foundIndex, searchIndex) edges.append(edge) added += 1 return list(set(edges))
def getDefaultValue(cls): kdTree = KDTree(0) kdTree.balance() return kdTree
def build_kdtree(self): tree = KDTree(len(self.particles)) for id, p in enumerate(self.particles): tree.insert(p.location, id) tree.balance() self.kd_tree = tree
class Converter(object): TARGET_NUM_FACET = 2000 DEFAULT_OCTREE = 3 @elapsed def __init__(self, src): self.src = src self.decimated = None self.src_kd = None self.voxel_list = Manager().list() self.mesh_list = Manager().list() self.color_dict = {} self.parent = None self.block_map = Manager().list() self.unit = None self.join = True # Initial procedure self.__calc_decimated() self.__build_src_kd() self.__create_color_dict() bpy.ops.object.select_all(action="DESELECT") @elapsed def __calc_decimated(self): num_facet = len(self.src.data.polygons) ratio = float(Converter.TARGET_NUM_FACET) / float(num_facet) mesh = bpy.data.meshes.new("Decimated") self.decimated = bpy.data.objects.new("Decimated", mesh) self.decimated.data = self.src.data.copy() self.decimated.scale = self.src.scale self.decimated.location = self.src.location bpy.context.scene.objects.link(self.decimated) self.decimated.select = True self.decimated.modifiers.new("Decimate", "DECIMATE") self.decimated.modifiers["Decimate"].ratio = ratio bpy.ops.object.modifier_apply(apply_as="DATA", modifier="DECIMATE") @elapsed def __build_src_kd(self): mesh = self.decimated.data size = len(mesh.vertices) self.src_kd = KDTree(size) for i, v in enumerate(mesh.vertices): self.src_kd.insert(v.co, i) self.src_kd.balance() @elapsed def __create_color_dict(self): for i, loop in enumerate(self.decimated.data.loops): vi = loop.vertex_index if vi not in self.color_dict: self.color_dict[vi] = i @elapsed def apply_join(self): if self.join: bpy.ops.object.join() @elapsed def cleanup(self): bpy.context.scene.objects.unlink(self.decimated) @staticmethod def create_new_octree(box): box0 = ( box[0], (box[0] + box[1])/2.0, (box[0] + box[2])/2.0, (box[0] + box[3])/2.0, (box[0] + box[4])/2.0, (box[0] + box[5])/2.0, (box[0] + box[6])/2.0, (box[0] + box[7])/2.0, ) box1 = ( # Left side (box[0] + box[1])/2.0, box[1], (box[1] + box[2])/2.0, (box[0] + box[2])/2.0, # Right side (box[0] + box[5])/2.0, (box[1] + box[5])/2.0, (box[1] + box[6])/2.0, (box[0] + box[6])/2.0, ) box2 = ( # Left side (box[0] + box[2])/2.0, (box[1] + box[2])/2.0, box[2], (box[2] + box[3])/2.0, # Right side (box[0] + box[6])/2.0, (box[1] + box[6])/2.0, (box[2] + box[6])/2.0, (box[3] + box[6])/2.0 ) box3 = ( # Left side (box[0] + box[3])/2.0, (box[0] + box[2])/2.0, (box[2] + box[3])/2.0, box[3], # Right side (box[0] + box[7])/2.0, (box[0] + box[6])/2.0, (box[3] + box[6])/2.0, (box[3] + box[7])/2.0, ) box4 = ( # Left side (box[0] + box[4])/2.0, (box[0] + box[5])/2.0, (box[0] + box[6])/2.0, (box[0] + box[7])/2.0, # Right side box[4], (box[4] + box[5])/2.0, (box[4] + box[6])/2.0, (box[4] + box[7])/2.0, ) box5 = ( # Left side (box[0] + box[5])/2.0, (box[1] + box[5])/2.0, (box[1] + box[6])/2.0, (box[0] + box[6])/2.0, # Right side (box[4] + box[5])/2.0, box[5], (box[5] + box[6])/2.0, (box[4] + box[6])/2.0, ) box6 = ( # Left side (box[0] + box[6])/2.0, (box[1] + box[6])/2.0, (box[2] + box[6])/2.0, (box[3] + box[6])/2.0, # Right side (box[4] + box[6])/2.0, (box[5] + box[6])/2.0, box[6], (box[6] + box[7])/2.0, ) box7 = ( # Left side (box[0] + box[7])/2.0, (box[0] + box[6])/2.0, (box[3] + box[6])/2.0, (box[3] + box[7])/2.0, # Right side (box[4] + box[7])/2.0, (box[4] + box[6])/2.0, (box[6] + box[7])/2.0, box[7], ) return box0, box1, box2, box3, box4, box5, box6, box7 @staticmethod def get_bvhtree_from_box(box): mesh_data = bpy.data.meshes.new("cube_mesh_data") faces = [(0, 1, 2, 3), (4, 7, 6, 5), (0, 4, 5, 1), (1, 5, 6, 2), (2, 3, 7, 6), (4, 0, 3, 7)] mesh_data.from_pydata([x.to_tuple() for x in box], [], faces) mesh_data.update() bm = bmesh.new() bm.from_mesh(mesh_data) return bvh.BVHTree.FromBMesh(bm) @staticmethod def check_if_overlap(obj, box): bvh_tree1 = bvh.BVHTree.FromObject(obj, bpy.context.scene) bvh_tree2 = Converter.get_bvhtree_from_box(box) return bvh_tree1.overlap(bvh_tree2) @elapsed def invoke(self, obj, box, max_depth): try: self.invoke_create_voxel(obj, box, max_depth) self.draw_voxel(origin=box[0]) finally: # Post procedure self.apply_join() self.cleanup() return list(self.block_map) @elapsed def invoke_create_voxel(self, obj, box, max_depth): # Calc unit length self.unit = (box[1].z - box[0].z) / float(2 ** max_depth) overlap = Converter.check_if_overlap(obj, box) if overlap: boxes = Converter.create_new_octree(box) jobs = [] for child in boxes: p = Process( target=self.create_voxel, args=(obj, child, 1, self.voxel_list, max_depth) ) jobs.append(p) p.start() [job.join() for job in jobs] def create_voxel(self, obj, box, depth, queue, max_depth=3): """For multiprocessing :param obj: :param box: :param depth: :param queue: :param max_depth: :return: """ depth += 1 overlap = Converter.check_if_overlap(obj, box) if overlap: if depth == max_depth: queue.append([x.to_tuple() for x in box]) else: boxes = Converter.create_new_octree(box) for _child in boxes: self.create_voxel(obj, _child, depth, queue, max_depth) def calc_mesh_and_color(self, voxel_list, mesh_list, block_list, origin): """For multiprocessing :param list voxel_list: :param list mesh_list: :param list block_list: :param mathutils.Vector origin: """ faces = ((0, 1, 2, 3), (4, 7, 6, 5), (0, 4, 5, 1), (1, 5, 6, 2), (2, 3, 7, 6), (4, 0, 3, 7)) for i, voxel in enumerate(voxel_list): mesh = bpy.data.meshes.new("cube_mesh_data") mesh.from_pydata(voxel, [], faces) mesh.update() # Find closest color co, index, dist = self.src_kd.find(voxel[0]) if self.decimated.data.vertex_colors: rgb = self.decimated.data.vertex_colors["Col"].data[self.color_dict[index]].color else: rgb = (1.0, 1.0, 1.0) # White mesh_list.append((voxel, tuple(rgb))) ix = int(round((voxel[0][0] - origin.x) / self.unit)) iy = int(round((voxel[0][1] - origin.y) / self.unit)) iz = int(round((voxel[0][2] - origin.z) / self.unit)) col_def = BlockDef.find_nearest_color_block(Vector(rgb)) block_list.append(BlockInfo( has_block=True, block_type=col_def.block_def[0], color=col_def.block_def[1], pos=(ix, iy, iz) )) @elapsed def draw_voxel(self, origin): # Add null object self.parent = bpy.data.objects.new("Voxcel", bpy.data.meshes.new("Voxcel")) bpy.context.scene.objects.link(self.parent) bpy.context.scene.objects.active = self.parent self.parent.select = True def chunks(l, n): """Yield successive n-sized chunks from l.""" for i in range(0, len(l), n): yield l[i:i+n] parallels = 8 chunk_list = chunks( self.voxel_list, len(self.voxel_list)//parallels ) jobs = [] for chunk in chunk_list: job = Process( target=self.calc_mesh_and_color, args=(chunk, self.mesh_list, self.block_map, origin) ) jobs.append(job) job.start() [job.join() for job in jobs] @elapsed def add_voxels(): for i, item in enumerate(self.mesh_list): vertices = item[0] color = item[1] name = "Cube.%010d" % i voxel.Voxel(name, vertices, color).create().add( scene=bpy.context.scene, parent=self.parent ) add_voxels()
def getValue(self): kdTree = KDTree(0) kdTree.balance() return kdTree
def kd_from_points(points): tree = KDTree(len(points)) for i, p in enumerate(points): tree.insert(p, i) tree.balance() return tree
def simplify_mesh(self, bm): class Ownership: def __init__(self, particle, dist): self.particle = particle self.distance = dist self.valid = False bmesh.ops.triangulate(bm, faces=bm.faces) last_edges = float("+inf") while True: edges = set() for edge in bm.edges: le = (edge.verts[0].co - edge.verts[1].co).length_squared center = edge.verts[0].co + edge.verts[1].co center /= 2 for p, dist in self.get_nearest(center, 1): if p.radius**2 < le: edges.add(edge) if not len(edges) < last_edges: break last_edges = len(edges) bmesh.ops.subdivide_edges(bm, edges=list(edges), cuts=1) bmesh.ops.triangulate(bm, faces=bm.faces) bm.faces.ensure_lookup_table() bm.verts.ensure_lookup_table() tree = KDTree(len(bm.verts)) for vert in bm.verts: tree.insert(vert.co, vert.index) tree.balance() ownership_mapping = {} ownership_validation_front = set() for vert in bm.verts: for p, dist in self.get_nearest(vert.co, 1): ownership_mapping[vert] = Ownership(p, dist) for particle in self.particles: location, index, dist = tree.find(particle.location) vert = bm.verts[index] if vert in ownership_mapping: if ownership_mapping[vert].particle == particle: ownership_mapping[vert].valid = True ownership_validation_front.add(vert) while True: new_front = set() for vert in ownership_validation_front: for edge in vert.link_edges: other_vert = edge.other_vert(vert) if other_vert not in ownership_mapping: continue if ownership_mapping[other_vert].valid: continue if other_vert in ownership_mapping: if ownership_mapping[ vert].particle is ownership_mapping[ other_vert].particle: new_front.add(other_vert) ownership_mapping[other_vert].valid = True ownership_validation_front = new_front if not new_front: break new_bm = bmesh.new() for particle in self.particles: particle.vert = new_bm.verts.new(particle.location) for face in bm.faces: connections = set() for vert in face.verts: if vert in ownership_mapping: if ownership_mapping[vert].valid: p = ownership_mapping[vert].particle connections.add(p) if len(connections) == 3: try: new_bm.faces.new( [particle.vert for particle in connections]) except ValueError: pass while True: stop = True for vert in new_bm.verts: if len(vert.link_edges) < 3: new_bm.verts.remove(vert) stop = False if stop: break bmesh.ops.holes_fill(new_bm, edges=new_bm.edges) bmesh.ops.triangulate(new_bm, faces=new_bm.faces) bmesh.ops.recalc_face_normals(new_bm, faces=new_bm.faces) if not self.triangle_mode: bmesh.ops.join_triangles(new_bm, faces=new_bm.faces, angle_face_threshold=1.0, angle_shape_threshold=3.14) return new_bm
class SvSolidTopology(object): class Item(object): def __init__(self, item): self.item = item def __hash__(self): return self.item.hashCode() def __eq__(self, other): return self.item.isSame(other.item) def __repr__(self): return f"<Item: {type(self.item).__name__} #{self.item.hashCode()}>" def __init__(self, solid): self.solid = solid self._init() def __repr__(self): v = len(self.solid.Vertexes) e = len(self.solid.Edges) f = len(self.solid.Faces) return f"<Solid topology: {v} vertices, {e} edges, {f} faces>" def _init(self): self._faces_by_vertex = defaultdict(set) self._faces_by_edge = defaultdict(set) self._edges_by_vertex = defaultdict(set) for face in self.solid.Faces: for vertex in face.Vertexes: self._faces_by_vertex[SvSolidTopology.Item(vertex)].add( SvSolidTopology.Item(face)) for edge in face.Edges: self._faces_by_edge[SvSolidTopology.Item(edge)].add( SvSolidTopology.Item(face)) for edge in self.solid.Edges: for vertex in edge.Vertexes: self._edges_by_vertex[SvSolidTopology.Item(vertex)].add( SvSolidTopology.Item(edge)) self._tree = KDTree(len(self.solid.Vertexes)) for i, vertex in enumerate(self.solid.Vertexes): co = (vertex.X, vertex.Y, vertex.Z) self._tree.insert(co, i) self._tree.balance() def tessellate(self, precision): self._points_by_edge = defaultdict(list) self._points_by_face = defaultdict(list) for edge in self.solid.Edges: points = edge.discretize(Deflection=precision) i_edge = SvSolidTopology.Item(edge) for pt in points: self._points_by_edge[i_edge].append((pt.x, pt.y, pt.z)) for face in self.solid.Faces: data = face.tessellate(precision) i_face = SvSolidTopology.Item(face) for pt in data[0]: self._points_by_face[i_face].append((pt.x, pt.y, pt.z)) def calc_normals(self): self._normals_by_face = dict() for face in self.solid.Faces: #face.tessellate(precision) #u_min, u_max, v_min, v_max = face.ParameterRange sum_normal = Base.Vector(0, 0, 0) for u, v in face.getUVNodes(): normal = face.normalAt(u, v) sum_normal = sum_normal + normal sum_normal = np.array([sum_normal.x, sum_normal.y, sum_normal.z]) sum_normal = sum_normal / np.linalg.norm(sum_normal) self._normals_by_face[SvSolidTopology.Item(face)] = sum_normal def get_normal_by_face(self, face): return self._normals_by_face[SvSolidTopology.Item(face)] def get_vertices_by_location(self, condition): to_tuple = lambda v: (v.X, v.Y, v.Z) return [ to_tuple(v) for v in self.solid.Vertexes if condition(to_tuple(v)) ] def get_vertices_by_location_mask(self, condition): to_tuple = lambda v: (v.X, v.Y, v.Z) return [condition(to_tuple(v)) for v in self.solid.Vertexes] def get_points_by_edge(self, edge): return self._points_by_edge[SvSolidTopology.Item(edge)] def get_points_by_face(self, face): return self._points_by_face[SvSolidTopology.Item(face)] def get_edges_by_location_mask(self, condition, include_partial): # condition is vectorized check = any if include_partial else all mask = [] for edge in self.solid.Edges: test = condition( np.array(self._points_by_edge[SvSolidTopology.Item(edge)])) mask.append(check(test)) return mask def get_faces_by_location_mask(self, condition, include_partial): # condition is vectorized check = any if include_partial else all mask = [] for face in self.solid.Faces: test = condition( np.array(self._points_by_face[SvSolidTopology.Item(face)])) mask.append(check(test)) return mask def get_faces_by_vertex(self, vertex): return [ i.item for i in self._faces_by_vertex[SvSolidTopology.Item(vertex)] ] def get_faces_by_vertices_mask(self, vertices, include_partial=True): if include_partial: good = set() for vertex in vertices: new = self._faces_by_vertex[SvSolidTopology.Item(vertex)] good.update(new) return [ SvSolidTopology.Item(face) in good for face in self.solid.Faces ] else: vertices = set([SvSolidTopology.Item(v) for v in vertices]) mask = [] for face in self.solid.Faces: ok = all( SvSolidTopology.Item(v) in vertices for v in face.Vertexes) mask.append(ok) return mask def get_faces_by_edge(self, edge): return [ i.item for i in self._faces_by_edge[SvSolidTopology.Item(edge)] ] def get_faces_by_edges_mask(self, edges, include_partial=True): if include_partial: good = set() for edge in edges: new = self._faces_by_edge[SvSolidTopology.Item(edge)] good.update(new) return [ SvSolidTopology.Item(face) in good for face in self.solid.Faces ] else: edges = set([SvSolidTopology.Item(e) for e in edges]) mask = [] for face in self.solid.Faces: ok = all(SvSolidTopology.Item(e) in edges for e in face.Edges) mask.append(ok) return mask def get_edges_by_vertex(self, vertex): return [ i.item for i in self._edges_by_vertex[SvSolidTopology.Item(vertex)] ] def get_edges_by_vertices_mask(self, vertices, include_partial=True): if include_partial: good = set() for vertex in vertices: new = self._edges_by_vertex[SvSolidTopology.Item(vertex)] good.update(new) return [ SvSolidTopology.Item(edge) in good for edge in self.solid.Edges ] else: vertices = set([SvSolidTopology.Item(v) for v in vertices]) mask = [] for edge in self.solid.Edges: ok = all( SvSolidTopology.Item(v) in vertices for v in edge.Vertexes) mask.append(ok) return mask def get_edges_by_faces_mask(self, faces): good = set() for face in faces: new = set([SvSolidTopology.Item(e) for e in face.Edges]) good.update(new) return [ SvSolidTopology.Item(edge) in good for edge in self.solid.Edges ] def get_vertices_by_faces_mask(self, faces): good = set() for face in faces: new = set([SvSolidTopology.Item(v) for v in face.Vertexes]) good.update(new) return [ SvSolidTopology.Item(vertex) in good for vertex in self.solid.Vertexes ] def get_vertices_by_edges_mask(self, edges): good = set() for edge in edges: new = set([SvSolidTopology.Item(v) for v in edge.Vertexes]) good.update(new) return [ SvSolidTopology.Item(vertex) in good for vertex in self.solid.Vertexes ] def get_vertices_within_range(self, origin, distance): found = self._tree.find_range(tuple(origin), distance) idxs = [item[1] for item in found] vertices = [self.solid.Vertexes[i] for i in idxs] return vertices def get_vertices_within_range_mask(self, origin, distance): found = self._tree.find_range(tuple(origin), distance) idxs = set([item[1] for item in found]) return [i in idxs for i in range(len(self.solid.Vertexes))]
def finish(self, context): #ray cast the entire grid into if 'Posterior Plane' in bpy.data.objects: Plane = bpy.data.objects['Posterior Plane'] Plane.hide = False else: me = bpy.data.meshes.new('Posterior Plane') Plane = bpy.data.objects.new('Posterior Plane', me) context.scene.objects.link(Plane) pbme = bmesh.new() pbme.verts.ensure_lookup_table() pbme.edges.ensure_lookup_table() pbme.faces.ensure_lookup_table() bmesh.ops.create_grid(pbme, x_segments = 200, y_segments = 200, size = 39.9) pbme.to_mesh(Plane.data) pt, pno = calculate_plane(self.crv.b_pts) if self.splint.jaw_type == 'MANDIBLE': Zw = Vector((0,0,-1)) Xw = Vector((1,0,0)) Yw = Vector((0,-1,1)) else: Zw = Vector((0,0,1)) Xw = Vector((1,0,0)) Yw = Vector((0,1,0)) Z = pno Z.normalize() if Zw.dot(Z) < 0: Z *= -1 Y = Z.cross(Xw) X = Y.cross(Z) R = Matrix.Identity(3) #make the columns of matrix U, V, W R[0][0], R[0][1], R[0][2] = X[0] ,Y[0], Z[0] R[1][0], R[1][1], R[1][2] = X[1], Y[1], Z[1] R[2][0] ,R[2][1], R[2][2] = X[2], Y[2], Z[2] R = R.to_4x4() T = Matrix.Translation(pt - 5 * Z) Plane.matrix_world = T * R pmx = Plane.matrix_world ipmx = pmx.inverted() bme_pln = bmesh.new() bme_pln.from_mesh(Plane.data) bme_pln.verts.ensure_lookup_table() bme_pln.edges.ensure_lookup_table() bme_pln.faces.ensure_lookup_table() bvh = BVHTree.FromBMesh(bme_pln) #we are going to raycast the user world coordinate points #into a grid, and identify all points in the grid from the local Z direction #Then we will store the local location of the user picked coordinate in a dictionary key_verts = {} for loc in self.crv.b_pts: res = bvh.ray_cast(ipmx * loc, -Z, 30) if res[0] != None: f = bme_pln.faces[res[2]] for v in f.verts: key_verts[v] = ipmx * loc v.select_set(True) continue res = bvh.ray_cast(ipmx * loc, Z, 30) if res[0] != None: f = bme_pln.faces[res[2]] for v in f.verts: key_verts[v] = ipmx * loc v.select_set(True) continue #bme_pln.to_mesh(Plane.data) #bme_pln.free() #return kdtree = KDTree(len(key_verts)) for v in key_verts.keys(): kdtree.insert(v.co, v.index) kdtree.balance() #raycast the shell if we can raycast_shell = False if 'Splint Shell' in bpy.data.objects: shell = bpy.data.objects.get('Splint Shell') bvh_shell = BVHTree.FromObject(shell, context.scene) mx_shell = shell.matrix_world imx_shell = mx_shell.inverted() Z_shell = imx_shell.to_3x3()*Z raycast_shell = True right_side = set() left_side = set() ray_casted = set() to_delete = set() for v in bme_pln.verts: if v in key_verts: v.co[2] = key_verts[v][2] if v.co[1] > 0: left_side.add(v) else: right_side.add(v) continue results = kdtree.find_range(v.co, .5) if len(results): N = len(results) r_total = 0 v_new = Vector((0,0,0)) for res in results: r_total += 1/res[2] v_new += (1/res[2]) * key_verts[bme_pln.verts[res[1]]] v_new *= 1/r_total v.co[2] = v_new[2] if v.co[1] > 0: left_side.add(v) else: right_side.add(v) continue results = kdtree.find_range(v.co, 6) if len(results): N = len(results) r_total = 0 v_new = Vector((0,0,0)) for res in results: r_total += (1/res[2])**2 v_new += ((1/res[2])**2) * key_verts[bme_pln.verts[res[1]]] v_new *= 1/r_total v.co[2] = v_new[2] if v.co[1] > 0: left_side.add(v) else: right_side.add(v) continue loc, no, index, d = bvh_shell.ray_cast(imx_shell * pmx * v.co, Z_shell) if loc: ray_casted.add(v) results = kdtree.find_n(v.co, 4) N = len(results) r_total = 0 v_new = Vector((0,0,0)) for res in results: r_total += (1/res[2])**2 v_new += ((1/res[2])**2) * key_verts[bme_pln.verts[res[1]]] v_new *= 1/r_total v.co[2] = v_new[2] continue total_verts = ray_casted | left_side | right_side ant_left = max(left_side, key = lambda x: x.co[0]) ant_right = max(right_side, key = lambda x: x.co[0]) new_verts = set() dilation_verts = set() for v in total_verts: for ed in v.link_edges: v_new = ed.other_vert(v) if v_new in total_verts or v_new in new_verts: continue else: new_verts.add(v_new) print('adding %i new verts' % len(new_verts)) total_verts.update(new_verts) dilation_verts.update(new_verts) #for v in ray_casted: # if v.co[1] > 0: # if v.co[0] > ant_left.co[0]: # to_delete.add(v) # else: # if v.co[0] > ant_right.co[0]: # to_delete.add(v) #print('added %i ray_casted' % len(ray_casted)) #total_verts = ray_casted | left_side | right_side #total_verts.difference_update(to_delete) #new_verts = set() #for v in total_verts: # for ed in v.link_edges: # v_new = ed.other_vert(v) # if v_new in total_verts: continue # if v_new.co[1] > 0 and v_new.co[0] < ant_left.co[0]: # if v in to_delete: # new_verts.add(v) # if v_new.co[1] <= 0 and v_new.co[0] < ant_right.co[0]: # if v in to_delete: # new_verts.add(v) #to_delete.difference_update(new_verts) #print('adding %i new verts' % len(new_verts)) for j in range(0,3): newer_verts = set() for v in new_verts: for ed in v.link_edges: v_new = ed.other_vert(v) if v_new in total_verts or v_new in newer_verts: continue newer_verts.add(v_new) total_verts.update(newer_verts) dilation_verts.update(newer_verts) new_verts = newer_verts to_delete = set(bme_pln.verts[:]) - total_verts #filter out anteior dilation for v in dilation_verts: if v.co[1] > 0 and v.co[0] > ant_left.co[0]: to_delete.add(v) continue if v.co[1] <= 0 and v.co[0] > ant_right.co[0]: to_delete.add(v) continue results = kdtree.find_n(v.co, 4) N = len(results) r_total = 0 v_new = Vector((0,0,0)) for res in results: r_total += (1/res[2])**2 v_new += ((1/res[2])**2) * key_verts[bme_pln.verts[res[1]]] v_new *= 1/r_total v.co[2] = v_new[2] #filter out anteior dilation for v in ray_casted: if v.co[1] > 0 and v.co[0] > ant_left.co[0]: to_delete.add(v) continue if v.co[1] <= 0 and v.co[0] > ant_right.co[0]: to_delete.add(v) continue bmesh.ops.delete(bme_pln, geom = list(to_delete), context = 1) bme_pln.to_mesh(Plane.data) Plane.data.update() smod = Plane.modifiers.new('Smooth', type = 'SMOOTH') smod.iterations = 5 smod.factor = 1 self.splint.ops_string += 'Mark Posterior Cusps:'
class ParticleManager: def __init__(self, obj): self.particles = [] self.obj = obj self.field = vector_fields.FrameField(obj) self.inv_mat = obj.matrix_world.inverted() self.bm = self.field.bm self.kd_tree = KDTree(0) self.kd_tree.balance() self.draw_obj = draw_3d.DrawObject() self.triangle_mode = False def build_field(self, context, use_gp, x_mirror): self.field.build_major_curvatures() frame = get_gp_frame(context) if frame: self.field.from_grease_pencil(frame, mat=self.inv_mat, x_mirror=x_mirror) self.field.marching_growth() self.field.smooth(2) else: self.field.erase_part(2) self.field.marching_growth() self.field.smooth() def build_kdtree(self): tree = KDTree(len(self.particles)) for id, p in enumerate(self.particles): tree.insert(p.location, id) tree.balance() self.kd_tree = tree def initialize_particles_from_gp(self, resolution, adaptive, context): scale = max(self.obj.dimensions) / max(self.obj.scale) target_resolution = scale / resolution frame = get_gp_frame(context) created_particles = 0 if not frame: return created_particles for stroke in frame.strokes: co = self.inv_mat * stroke.points[0].co last_particle = self.create_particle(Partile, co) last_particle.target_resolution = target_resolution last_particle.radius = target_resolution / ( last_particle.last_hit.curvature * adaptive + (1 - adaptive)) last_particle.adaptive = adaptive created_particles += 1 for point in stroke.points: co = self.inv_mat * point.co if (co - last_particle.location ).length >= last_particle.radius * 2: last_particle = self.create_particle(Partile, co) last_particle.target_resolution = target_resolution last_particle.radius = target_resolution / ( last_particle.last_hit.curvature * adaptive + (1 - adaptive)) last_particle.adaptive = adaptive created_particles += 1 return created_particles def initialize_from_features(self, verts, resolution=20, adaptive=0, count=50): scale = max(self.obj.dimensions) / max(self.obj.scale) target_resolution = scale / resolution verts = sorted(self.field.bm.verts, key=lambda v: self.field.sharpness_field.get( v.index, float("inf")), reverse=True) for i in range(min(count, len(self.field.bm.verts))): vert = verts[i] co = vert.co.copy() p1 = self.create_particle(Partile, co) p1.radius = target_resolution p1.target_resolution = target_resolution p1.adaptive = adaptive def initialize_from_verts(self, verts, adaptive): for vert in verts: p = self.create_particle(Partile, vert.co) p.adaptive = adaptive def initialize_grid(self, verts, resolution=20, use_x_mirror=True, adaptive=0): particle_locations = set() scale = max(self.obj.dimensions) target_resolution = 1 / ((1 / scale) * resolution) for vert in verts: co = vert.co.copy() co /= scale co *= resolution x = int(co.x) y = int(co.y) z = int(co.z) if use_x_mirror: if x > 0: particle_locations.add((x, y, z)) else: particle_locations.add((x, y, z)) for location in particle_locations: co = Vector(location) co *= scale co /= resolution hit = self.sample_surface(co) p1 = self.create_particle(Partile, hit.co) p1.adaptive = adaptive p1.target_resolution = target_resolution if use_x_mirror: hit.co.x *= -1 p2 = self.create_particle(Partile, hit.co) p2.adaptive = adaptive p2.target_resolution = target_resolution p1.counter_pair, p2.counter_pair = p2, p1 def mirror_particles(self, any_side=False): new_particles = [] for particle in self.particles: if particle.location.x > particle.radius or any_side: co = particle.location.copy() co.x *= -1 p1 = particle p2 = Partile(co, self) p2.radius = p1.radius p2.tag = p1.tag p2.adaptive = p1.adaptive p2.target_resolution = p1.target_resolution p1.counter_pair, p2.counter_pair = p2, p1 new_particles.append(p2) new_particles.append(p1) elif -particle.radius * 0.5 < particle.location.x < particle.radius * 0.5: new_particles.append(particle) particle.lock_x = True self.particles = new_particles self.build_kdtree() def create_particle(self, type, location, prepend=False): p = type(location, self) if not prepend: self.particles.append(p) else: self.particles.insert(0, p) return p def remove_particle(self, particle): self.particles.remove(particle) def step( self, speed, ): new_tree = KDTree(len(self.particles)) self.draw_obj.commands.clear() for id, particle in enumerate(self.particles): particle.step(speed) particle.draw() new_tree.insert(particle.location, id) new_tree.balance() self.kd_tree = new_tree def spread_step(self): count = 0 new_particles = [] self.draw_obj.commands.clear() for particle in self.particles: new_particles += particle.spread() count += len(new_particles) for particle in self.particles: if not particle.tag == "REMOVE": new_particles.append(particle) self.particles = new_particles new_tree = KDTree(len(self.particles)) for id, particle in enumerate(self.particles): particle.draw() new_tree.insert(particle.location, id) new_tree.balance() self.kd_tree = new_tree return count def get_nearest(self, location, n): for location, index, dist in self.kd_tree.find_n(location, n): yield self.particles[index], dist def sample_surface(self, location): return self.field.sample_point(location) def draw(self): self.draw_obj.commands.clear() for particle in self.particles: particle.draw() def simplify_mesh(self, bm): class Ownership: def __init__(self, particle, dist): self.particle = particle self.distance = dist self.valid = False bmesh.ops.triangulate(bm, faces=bm.faces) last_edges = float("+inf") while True: edges = set() for edge in bm.edges: le = (edge.verts[0].co - edge.verts[1].co).length_squared center = edge.verts[0].co + edge.verts[1].co center /= 2 for p, dist in self.get_nearest(center, 1): if p.radius**2 < le: edges.add(edge) if not len(edges) < last_edges: break last_edges = len(edges) bmesh.ops.subdivide_edges(bm, edges=list(edges), cuts=1) bmesh.ops.triangulate(bm, faces=bm.faces) bm.faces.ensure_lookup_table() bm.verts.ensure_lookup_table() tree = KDTree(len(bm.verts)) for vert in bm.verts: tree.insert(vert.co, vert.index) tree.balance() ownership_mapping = {} ownership_validation_front = set() for vert in bm.verts: for p, dist in self.get_nearest(vert.co, 1): ownership_mapping[vert] = Ownership(p, dist) for particle in self.particles: location, index, dist = tree.find(particle.location) vert = bm.verts[index] if vert in ownership_mapping: if ownership_mapping[vert].particle == particle: ownership_mapping[vert].valid = True ownership_validation_front.add(vert) while True: new_front = set() for vert in ownership_validation_front: for edge in vert.link_edges: other_vert = edge.other_vert(vert) if other_vert not in ownership_mapping: continue if ownership_mapping[other_vert].valid: continue if other_vert in ownership_mapping: if ownership_mapping[ vert].particle is ownership_mapping[ other_vert].particle: new_front.add(other_vert) ownership_mapping[other_vert].valid = True ownership_validation_front = new_front if not new_front: break new_bm = bmesh.new() for particle in self.particles: particle.vert = new_bm.verts.new(particle.location) for face in bm.faces: connections = set() for vert in face.verts: if vert in ownership_mapping: if ownership_mapping[vert].valid: p = ownership_mapping[vert].particle connections.add(p) if len(connections) == 3: try: new_bm.faces.new( [particle.vert for particle in connections]) except ValueError: pass while True: stop = True for vert in new_bm.verts: if len(vert.link_edges) < 3: new_bm.verts.remove(vert) stop = False if stop: break bmesh.ops.holes_fill(new_bm, edges=new_bm.edges) bmesh.ops.triangulate(new_bm, faces=new_bm.faces) bmesh.ops.recalc_face_normals(new_bm, faces=new_bm.faces) if not self.triangle_mode: bmesh.ops.join_triangles(new_bm, faces=new_bm.faces, angle_face_threshold=1.0, angle_shape_threshold=3.14) return new_bm
class Particle_system: def __init__(self, guide, ground, scale): self.GUIDE_STRENGTH = 1.0 * scale self.TURBULENCE_FREQUENCY = 10 * scale self.TURBULENCE_STRENGTH = 1.0 * scale self.AVOID_THRESHOLD = 0.01 * scale self.AVOID_STRENGTH = 0.2 * scale self.frame = 0 self.particles = [] self.guide = guide # self.vertex_distance = (self.guide.data.vertices[0].co - self.guide.data.vertices[1].co).length_squared self.guide_tree = KDTree(len(self.guide.data.vertices)) for v in self.guide.data.vertices: self.guide_tree.insert(v.co, v.index) self.guide_tree.balance() self.ground = ground self.scale = scale # bpy.ops.mesh.primitive_ico_sphere_add(location=(0,0,0), size=0.01) # self.instance_obj = bpy.context.object # self.instance_obj = bpy.data.objects['Fleche'] self.instance_obj = bpy.data.objects[bpy.context.scene.ant_instance] self.instance_mesh = self.instance_obj.data # self.instance_mesh.materials.append(bpy.data.materials['noir']) def add_particles(self, particles_number): '''Add a new particle to the system''' for p in range(particles_number): ind = randint(1, len(self.guide.data.vertices)-2) self.particles.append(Particle(ind, self.scale, self.guide.data.vertices[ind].co)) def kill_particle(self, part): self.particles.remove(part) def create_tree(self): self.parts_tree = KDTree(len(self.particles)) for i, p in enumerate(self.particles): self.parts_tree.insert(p.location, i) self.parts_tree.balance() def step(self): '''Simulate next frame''' self.frame += 1 self.create_tree() for part in self.particles: if part.active: previous_velocity = part.velocity.copy() #guide vector guide_vector = self.guide.data.vertices[part.guide_index].co - part.location guide_vector = guide_vector.normalized() * self.GUIDE_STRENGTH #turbulence vector turbulence = noise.turbulence_vector(part.noise_seed+part.location, 2, False, 1, self.TURBULENCE_STRENGTH, self.TURBULENCE_FREQUENCY) # part.noise_seed += turbulence / 50 # if part.velocity.length_squared < 0.0001: # part.noise_seed = noise.random_unit_vector() part.noise_seed.z += 0.01 #boid-like vector too_close = self.parts_tree.find_range(part.location, self.AVOID_THRESHOLD) avoid_vector = Vector() for p in too_close: other_vec = part.location - p[0] if other_vec.length_squared < 0.0001: continue other_vec /= other_vec.length avoid_vector += other_vec # avoid_vector.normalize() # avoid_vector -= part.velocity avoid_vector *= self.AVOID_STRENGTH #velocity change part.velocity += avoid_vector part.velocity += turbulence * (1.0-part.behaviour) part.velocity += guide_vector * part.behaviour #limit velocity (drag and shit) if part.velocity.length > part.MAX_VEL: part.velocity.length = part.MAX_VEL # limit rotation rotation_scalar = previous_velocity.dot(part.velocity) * 0.5 + 0.5 # normalized 0-1 # rotation_scalar **= 3 if rotation_scalar > 0.1: rotation_scalar = 0.1 # rotation_scalar = 0 part.velocity *= (rotation_scalar) part.velocity += previous_velocity * (1-rotation_scalar) # put that shit on the ground closest = self.ground.closest_point_on_mesh(part.location) part.location = closest[0] # velocity parallel to the ground vel_norm = part.velocity.length inter = part.velocity.cross(closest[1]) part.velocity = closest[1].cross(inter) part.velocity.length = vel_norm # print(part.velocity) # SET NEW LOCATION part.location += part.velocity # behaviour change part.behaviour += random()*0.1-0.05 if part.behaviour < 0.8: part.behaviour = 0.8 if part.behaviour > 0.9: part.behaviour = 0.9 # # set goal to next vertex if close enough pt, ind, dist = self.guide_tree.find(part.location) if fabs(ind - part.guide_index) < 2: part.guide_index += part.direction # if self.frame % 20 == 0: # part.guide_index += part.direction # if next_point_distance.length_squared < self.vertex_distance: # part.guide_index += 1 # switch direction if end reached if part.guide_index >= len(self.guide.data.vertices)-1 or part.guide_index == 1: # part.active = False # self.kill_particle(part) part.direction = -part.direction part.guide_index += part.direction self.create_frame(self.frame) def create_frame(self, frame): ''' For each frame: - create a new instance of the object to duplicate (eg. a sphere) - get a list of vertices from particles' positions - create a new generator objects, use the vertex list to generate mesh - this object will be used for duplication - parent the object to duplicate to the generator object - animate the visibility of both objects ''' instance_obj_frame = bpy.data.objects.new('instance_{:05}'.format(frame), self.instance_mesh) bpy.context.scene.objects.link(instance_obj_frame) vertices = [(p.location, p.velocity) for p in self.particles] generator_mesh = bpy.data.meshes.new('generator_{:05}'.format(frame)) # generator_mesh.from_pydata(vertices, [], []) ## Track to camera # cam = bpy.context.scene.camera for v in vertices: generator_mesh.vertices.add(1) generator_mesh.vertices[-1].co = v[0] generator_mesh.vertices[-1].normal = v[1] # generator_mesh.vertices[-1].normal = cam.location - v generator_obj = bpy.data.objects.new('generator_{:05}'.format(frame), generator_mesh) bpy.context.scene.objects.link(generator_obj) instance_obj_frame.parent = generator_obj generator_obj.dupli_type = "VERTS" generator_obj.use_dupli_vertices_rotation = True #anim generator_obj.keyframe_insert('hide', frame=frame) generator_obj.keyframe_insert('hide_render', frame=frame) generator_obj.hide = True generator_obj.hide_render = True generator_obj.keyframe_insert('hide', frame=frame+1) generator_obj.keyframe_insert('hide_render', frame=frame+1) generator_obj.keyframe_insert('hide', frame=frame-1) generator_obj.keyframe_insert('hide_render', frame=frame-1)
def create_tree(self): self.parts_tree = KDTree(len(self.particles)) for i, p in enumerate(self.particles): self.parts_tree.insert(p.location, i) self.parts_tree.balance()
def initialize(self): self.frozen = True nodes = self.nodes tree = KDTree(len(nodes)) for i, node in enumerate(nodes): tree.insert(node.point, i) tree.balance() processed = set() final_nodes = [] groups = [] for i in range(len(nodes)): if i in processed: continue # Find points to merge pending = [i] merge_set = set(pending) while pending: added = set() for j in pending: for co, idx, dist in tree.find_range( nodes[j].point, self.epsilon): added.add(idx) pending = added.difference(merge_set) merge_set.update(added) assert merge_set.isdisjoint(processed) processed.update(merge_set) # Group the points merge_list = [nodes[i] for i in merge_set] merge_list.sort(key=lambda x: x.name) group_class = merge_list[0].group_class for item in merge_list[1:]: cls = item.group_class if issubclass(cls, group_class): group_class = cls elif not issubclass(group_class, cls): raise MetarigError( 'Group class conflict: {} and {} from {} of {}'.format( group_class, cls, item.name, item.rig.base_bone, )) group = group_class(merge_list) group.build(final_nodes) groups.append(group) self.final_nodes = self.rigify_sub_objects = final_nodes self.groups = groups
class BoundaryAlignedRemesher: def get_hold_edges(self, obj): sc = bpy.context.scene props = sc.ba_remesh split_edge_l = [] # create layer if props.use_edge_bevel_weight: if self.bm.edges.layers.bevel_weight: bevelweight_Layer = self.bm.edges.layers.bevel_weight.verify() if props.use_edge_crease: if self.bm.edges.layers.crease: crease_Layer = self.bm.edges.layers.crease.verify() if props.use_edge_freestyle: if self.bm.edges.layers.freestyle: freestyle_Layer = self.bm.edges.layers.freestyle.verify() # find edge for edge in self.bm.edges: # 選択 if props.use_edge_select: if edge.select: split_edge_l.append(edge) # 角度 if props.use_edge_angle: try: if math.degrees( edge.calc_face_angle()) >= props.edge_angle: split_edge_l.append(edge) except: pass # シーム if props.use_edge_seam: if edge.seam: split_edge_l.append(edge) # シャープ if props.use_edge_sharp: if not edge.smooth: # sharp split_edge_l.append(edge) # ベベルウェイト if props.use_edge_bevel_weight: if self.bm.edges.layers.bevel_weight: if edge[bevelweight_Layer]: split_edge_l.append(edge) # クリース if props.use_edge_crease: if self.bm.edges.layers.crease: if edge[crease_Layer]: split_edge_l.append(edge) # Freestyle if props.use_edge_freestyle: if self.bm.edges.layers.freestyle: if edge[freestyle_Layer]: split_edge_l.append(edge) # 重複を削除 new_split_edge_l = [] for i in split_edge_l: if not i in new_split_edge_l: new_split_edge_l.append(i) return new_split_edge_l def split_feature_edges(self, obj): new_split_edge_l = self.get_hold_edges(obj) if new_split_edge_l: bmesh.ops.split_edges(self.bm, edges=new_split_edge_l) def __init__(self, obj): self.obj = object self.bm = bmesh.new() self.bm.from_mesh(obj.data) self.bvh = BVHTree.FromBMesh(self.bm) # ホールドエッジ self.split_feature_edges(obj) # 開いたエッジのガイド # Boundary_data is a list of directions and locations of boundaries. # This data will serve as guidance for the alignment self.boundary_data = [] # Fill the data using boundary edges as source of directional data. for edge in self.bm.edges: if edge.is_boundary: vec = (edge.verts[0].co - edge.verts[1].co).normalized() center = (edge.verts[0].co + edge.verts[1].co) / 2 self.boundary_data.append((center, vec)) # Create a Kd Tree to easily locate the nearest boundary point self.boundary_kd_tree = KDTree(len(self.boundary_data)) for index, (center, vec) in enumerate(self.boundary_data): self.boundary_kd_tree.insert(center, index) self.boundary_kd_tree.balance() def nearest_boundary_vector(self, location): """ Gets the nearest boundary direction """ location, index, dist = self.boundary_kd_tree.find(location) location, vec = self.boundary_data[index] return vec def enforce_edge_length(self, edge_length=0.05, bias=0.333): """ Replicates dyntopo behaviour """ upper_length = edge_length + edge_length * bias lower_length = edge_length - edge_length * bias # Subdivide Long edges subdivide = [] for edge in self.bm.edges: if edge.calc_length() > upper_length: subdivide.append(edge) bmesh.ops.subdivide_edges(self.bm, edges=subdivide, cuts=1) bmesh.ops.triangulate(self.bm, faces=self.bm.faces) # Remove verts with less than 5 edges, this helps inprove mesh quality dissolve_verts = [] for vert in self.bm.verts: if len(vert.link_edges) < 5: if not vert.is_boundary: dissolve_verts.append(vert) bmesh.ops.dissolve_verts(self.bm, verts=dissolve_verts) bmesh.ops.triangulate(self.bm, faces=self.bm.faces) # 外側エッジを固定 # Collapse short edges but ignore boundaries and never collapse two chained edges lock_verts = set(vert for vert in self.bm.verts if vert.is_boundary) collapse = [] for edge in self.bm.edges: if edge.calc_length() < lower_length and not edge.is_boundary: verts = set(edge.verts) if verts & lock_verts: continue collapse.append(edge) lock_verts |= verts bmesh.ops.collapse(self.bm, edges=collapse) bmesh.ops.beautify_fill(self.bm, faces=self.bm.faces, method="ANGLE") def align_verts(self, rule=(-1, -2, -3, -4)): # Align verts to the nearest boundary by averaging neigbor vert locations selected # by a specific rule, # Rules work by sorting edges by angle relative to the boundary. # Eg1. (0, 1) stands for averagiing the biggest angle and the 2nd biggest angle edges. # Eg2. (-1, -2, -3, -4), averages the four smallest angle edges for vert in self.bm.verts: if not vert.is_boundary: vec = self.nearest_boundary_vector(vert.co) neighbor_locations = [ edge.other_vert(vert).co for edge in vert.link_edges ] best_locations = sorted( neighbor_locations, key=lambda n_loc: abs( (n_loc - vert.co).normalized().dot(vec))) co = vert.co.copy() le = len(vert.link_edges) for i in rule: co += best_locations[i % le] co /= len(rule) + 1 co -= vert.co co -= co.dot(vert.normal) * vert.normal vert.co += co def reproject(self): """ Recovers original shape """ for vert in self.bm.verts: location, normal, index, dist = self.bvh.find_nearest(vert.co) if location: vert.co = location def remesh(self, edge_length=0.05, iterations=30, quads=True): """ Coordenates remeshing """ if quads: rule = (-1, -2, 0, 1) else: rule = (0, 1, 2, 3) for _ in range(iterations): self.enforce_edge_length(edge_length=edge_length) try: self.align_verts(rule=rule) except: pass self.reproject() if quads: bmesh.ops.join_triangles(self.bm, faces=self.bm.faces, angle_face_threshold=3.14, angle_shape_threshold=3.14) bmesh.ops.remove_doubles(self.bm, verts=self.bm.verts, dist=0.001) return self.bm
def __init__(self, source_bm, target_bm=None, max_springs=300, x_mirror=False, immediate_edges_max=6): self.max_springs = max_springs self.immediate_edges_max = immediate_edges_max self.bm = source_bm self.target_bm = target_bm self.n = len(source_bm.verts) self.co = np.array(list(tuple(v.co) for v in source_bm.verts), dtype=np.float64) self.last_co = self.co.copy() self.springs = np.zeros((self.n, max_springs), dtype=np.int64) self.immediate_edges = np.full((self.n, immediate_edges_max), -1, dtype=np.int64) self.lengths = np.zeros((self.n, max_springs), dtype=np.float64) self.sizing = 1 self.pins = [] self.out_cache = DummyObj() if target_bm: target_bm.faces.ensure_lookup_table() self.bvh = BVHTree.FromBMesh(target_bm) else: self.bvh = None source_bm.verts.ensure_lookup_table() source_bm.faces.ensure_lookup_table() if x_mirror: self.mirror_table = np.full((self.n, ), -1, dtype=np.int64) self.x_mirr = True kd = KDTree(self.n) for vert in source_bm.verts: kd.insert(vert.co, vert.index) kd.balance() else: self.x_mirr = False self._mirror_table = None for vert in source_bm.verts: for j, edge in enumerate(vert.link_edges): if not j < immediate_edges_max: break other = edge.other_vert(vert) if not vert.is_boundary or other.is_boundary == vert.is_boundary: self.immediate_edges[vert.index, j] = other.index for j, other in enumerate(n_ring(vert, self.max_springs)): self.springs[vert.index, j] = other.index self.lengths[vert.index, j] = (other.co - vert.co).length if self.x_mirr: co = vert.co.copy() co.x *= -1 mirrco, mirri, dist = kd.find(co) self.mirror_table[vert.index] = mirri self.immediate_edges_invalid_places = self.immediate_edges == -1 self.immediate_edges_number = ( immediate_edges_max - self.immediate_edges_invalid_places.sum(axis=1))
def execute(self, context): source = context.object try: sdata = source.data sgeom = sdata.polygons except AttributeError: self.report( {'ERROR_INVALID_INPUT'}, "The active object needs to have a mesh data block.", ) return {'CANCELLED'} # get comparison values for source scvals = get_vecs(sgeom, 'center') if self.in_wrld_crds: # transform source to world coordinates mat = np.array(source.matrix_world) scvals = transf_pts(mat, scvals) # build KD-Tree from comparison values kd = KDTree(len(sgeom)) for i, v in enumerate(scvals): kd.insert(v, i) kd.balance() # get values to transfer from source stvals = get_scalars(sgeom, 'material_index', np.int8) all_meshless = True # for error-reporting for target in context.selected_objects: if target is source: continue try: tdata = target.data tgeom = tdata.polygons all_meshless = False except AttributeError: continue # get comparison values for target tcvals = get_vecs(tgeom, 'center') if self.in_wrld_crds: # transform target to world coordinates mat = np.array(target.matrix_world) tcvals = transf_pts(mat, tcvals) ttvals = np.empty(len(tgeom), dtype=np.int32) # for every comparison point in target, find closest in # source and copy over its transfer value for ti, tv in enumerate(tcvals): _, si, _ = kd.find(tv) ttvals[ti] = stvals[si] # set values to transfer to target set_vals(tgeom, ttvals, 'material_index') tmats = tdata.materials if self.assign_mat: # transfer assigned materials for i, m in enumerate(sdata.materials): if i < len(tmats): tmats[i] = m else: tmats.append(m) if all_meshless: self.report( {'ERROR_INVALID_INPUT'}, "No selected target object has a mesh data block.", ) return {'CANCELLED'} return {'FINISHED'}
class BoundaryAlignedRemesher: def __init__(self, obj): self.obj = obj mode = obj.mode self.edit_mode = mode == 'EDIT' # hack to update the mesh data bpy.ops.object.mode_set(mode='OBJECT') bpy.ops.object.mode_set(mode=mode) self.bm = bmesh.new() self.bm.from_mesh(obj.data) self.bm1 = None if self.edit_mode: self.bm1 = self.bm.copy() remove, remove1 = [], [] for vert in self.bm.verts: if all(not f.select for f in vert.link_faces): remove.append(vert) for vert in remove: self.bm.verts.remove(vert) for vert in self.bm1.verts: if all(v.select for v in vert.link_faces): remove1.append(vert) for vert in remove1: self.bm1.verts.remove(vert) remove1 = [f for f in self.bm1.faces if f.select] for face in remove1: self.bm1.faces.remove(face) self.bvh = BVHTree.FromBMesh(self.bm) # Boundary_data is a list of directions and locations of boundaries. # This data will serve as guidance for the alignment self.boundary_data = [] # Fill the data using boundary edges as source of directional data. for edge in self.bm.edges: if edge.is_boundary: vec = (edge.verts[0].co - edge.verts[1].co).normalized() center = (edge.verts[0].co + edge.verts[1].co) / 2 self.boundary_data.append((center, vec)) # Create a Kd Tree to easily locate the nearest boundary point self.boundary_kd_tree = KDTree(len(self.boundary_data)) for index, (center, vec) in enumerate(self.boundary_data): self.boundary_kd_tree.insert(center, index) self.boundary_kd_tree.balance() def nearest_boundary_vector(self, location): """ Gets the nearest boundary direction """ location, index, dist = self.boundary_kd_tree.find(location) location, vec = self.boundary_data[index] return vec def enforce_edge_length(self, edge_length=0.05, bias=0.333): """ Replicates dyntopo behavior """ upper_length = edge_length + edge_length * bias lower_length = edge_length - edge_length * bias # Subdivide Long edges subdivide = [] for edge in self.bm.edges: if edge.calc_length() > upper_length: subdivide.append(edge) bmesh.ops.subdivide_edges(self.bm, edges=subdivide, cuts=1) bmesh.ops.triangulate(self.bm, faces=self.bm.faces) if self.edit_mode: subdivide = [] for edge in self.bm1.edges: if edge.select and edge.calc_length() > upper_length: subdivide.append(edge) bmesh.ops.subdivide_edges(self.bm1, edges=subdivide, cuts=1) # Remove verts with less than 5 edges, this helps inprove mesh quality dissolve_verts = [] for vert in self.bm.verts: if len(vert.link_edges) < 5: if not vert.is_boundary: dissolve_verts.append(vert) bmesh.ops.dissolve_verts(self.bm, verts=dissolve_verts) bmesh.ops.triangulate(self.bm, faces=self.bm.faces) # Collapse short edges but ignore boundaries and never collapse two chained edges lock_verts = set(vert for vert in self.bm.verts if vert.is_boundary) collapse = [] for edge in self.bm.edges: if edge.calc_length() < lower_length and not edge.is_boundary: verts = set(edge.verts) if verts & lock_verts: continue collapse.append(edge) lock_verts |= verts bmesh.ops.collapse(self.bm, edges=collapse) bmesh.ops.beautify_fill(self.bm, faces=self.bm.faces, method="ANGLE") def align_verts(self, rule=(-1, -2, -3, -4)): # Align verts to the nearest boundary by averaging neigbor vert locations selected # by a specific rule, # Rules work by sorting edges by angle relative to the boundary. # Eg1. (0, 1) stands for averagiing the biggest angle and the 2nd biggest angle edges. # Eg2. (-1, -2, -3, -4), averages the four smallest angle edges for vert in self.bm.verts: if not vert.is_boundary: # min_edge = min(vert.link_edges, key=lambda e: e.calc_length()) # other = min_edge.other_vert(vert) # vec = other.co - vert.co # vert.co -= vec * 0.1 # vec = self.nearest_boundary_vector(vert.co) neighbor_locations = [ edge.other_vert(vert).co for edge in vert.link_edges ] best_locations = sorted( neighbor_locations, key=lambda n_loc: abs( (n_loc - vert.co).normalized().dot(vec))) co = vert.co.copy() le = len(vert.link_edges) for i in rule: co += best_locations[i % le] co /= len(rule) + 1 co -= vert.co co -= co.dot(vert.normal) * vert.normal vert.co += co self.reproject() def reproject(self): """ Recovers original shape """ for vert in self.bm.verts: if vert.is_boundary: continue location, normal, index, dist = self.bvh.find_nearest(vert.co) if location: vert.co = location def remesh(self, edge_length=0.05, iterations=30, quads=True): """ Coordenates remeshing """ if self.edit_mode: bpy.ops.object.mode_set(mode='OBJECT') if quads: rule = (-1, -2, 0, 1) else: rule = (0, 1, 2, 3) for _ in range(iterations): self.enforce_edge_length(edge_length=edge_length) self.align_verts(rule=rule) self.reproject() if quads: bmesh.ops.join_triangles(self.bm, faces=self.bm.faces, angle_face_threshold=3.14, angle_shape_threshold=3.14) for vert in self.bm.verts: vert.select = True for face in self.bm.faces: face.select = True if self.bm1: self.bm1.to_mesh(self.obj.data) self.bm.from_mesh(self.obj.data) bmesh.ops.remove_doubles( self.bm, verts=[v for v in self.bm.verts if v.select], dist=0.00001) self.bm.to_mesh(self.obj.data) if self.edit_mode: bpy.ops.object.mode_set(mode='EDIT')
def execute(self, context): C = context me = C.object.to_mesh(C.scene, apply_modifiers=True, settings='PREVIEW') bme = bmesh.new() bme.from_mesh(me) bme.verts.ensure_lookup_table() bme.edges.ensure_lookup_table() #verts in order loops = edge_loops_from_bmedges(bme, [ed.index for ed in bme.edges]) if len(loops) > 1: print('need a single loop') loop = loops[0] loop.pop() #cyclic #don't need that one any more coords = [bme.verts[i].co for i in loop] spaced_verts, spaced_eds = space_evenly_on_path( coords, [(0, 1), (1, 0)], 300) bme.free() print(len(spaced_verts)) #build our search tree kd = KDTree(len(spaced_verts)) for i, v in enumerate(spaced_verts): kd.insert(v, i) kd.balance() bme2 = bmesh.new() bme2.verts.ensure_lookup_table() bme2.edges.ensure_lookup_table() for v in spaced_verts: bme2.verts.new(v) bme2.verts.ensure_lookup_table() bme2.edges.ensure_lookup_table() for ed in spaced_eds: v0, v1 = ed bme2.edges.new((bme2.verts[v0], bme2.verts[v1])) bme2.verts.index_update() bme2.verts.ensure_lookup_table() bme2.edges.index_update() bme2.edges.ensure_lookup_table() loops = edge_loops_from_bmedges(bme2, [ed.index for ed in bme2.edges]) loop = loops[0] loop.pop() def euc_dist(v1, v2): return (v1.co - v2.co).length def split_loop(vert_loop_inds): best_pairs = {} def geo_dist(v1, v2): N = len(vert_loop_inds) n = vert_loop_inds.index(v1.index) m = vert_loop_inds.index(v2.index) return min(math.fmod(N + m - n, N), math.fmod(N + n - m, N)) for i in vert_loop_inds: v1 = bme2.verts[i] pfactor = 0 match = None link_verts = [ed.other_vert(v1) for ed in v1.link_edges] for loc, ind, dist in kd.find_range(v1.co, self.max_edge): if ind == i: continue #prevent divide by 0 if ind not in vert_loop_inds: continue #filter by this loop v2 = bme2.verts[ind] if v2 in link_verts: continue #prevent neighbors fac = geo_dist(v1, v2) / euc_dist(v1, v2) if fac > pfactor: #if a better match is found, keep it pfactor = fac match = v2 best_pairs[v1] = (match, pfactor) vs = [bme2.verts[i] for i in vert_loop_inds] v1 = max(vs, key=lambda x: best_pairs[x][1]) #connect the best pair v2, pfactor = best_pairs[v1] try: #split the index loop into 2 ind1 = min(vert_loop_inds.index(v1.index), vert_loop_inds.index(v2.index)) ind2 = max(vert_loop_inds.index(v1.index), vert_loop_inds.index(v2.index)) print('splitting loop at ind1: %i and ind2: %i' % (ind1, ind2)) print('creating edge between vert: %i and vert: %i' % (v1.index, v2.index)) loop0 = vert_loop_inds[ind1:ind2 + 1] loop1 = vert_loop_inds[0:ind1 + 1] + vert_loop_inds[ind2:] print(vert_loop_inds) print('\n') print(loop0) print('\n') print(loop1) bme2.edges.new((v1, v2)) bme2.verts.ensure_lookup_table() bme2.edges.ensure_lookup_table() return loop0, loop1 except: print('cant add edge between vert: %i and vert: %i' % (v1.index, v2.index)) return vert_loop_inds, [] loops = [loop] for n in range(0, self.n_partitions): print('\n') print('PARTITION # %i' % (n + 1)) biggest_loop = max(loops, key=len) if len(biggest_loop) < 20: break loop1, loop2 = split_loop(biggest_loop) if loop2 != []: loops.remove(biggest_loop) loops += [loop1, loop2] else: break new_faces = [] bme2.faces.ensure_lookup_table() for loop in loops: new_faces.append(bme2.faces.new([bme2.verts[i] for i in loop])) bme2.faces.ensure_lookup_table() bmesh.ops.triangulate(bme2, faces=new_faces) new_ob = bpy.data.objects.new('Partitioned', me) C.scene.objects.link(new_ob) bme2.to_mesh(me) bme2.free() return {'FINISHED'}
def repeal_particles(self, iterations=20, factor=0.01): particles = list(self.particles) tree = KDTree(len(particles)) for index, particle in enumerate(particles): tree.insert(particle.co, index) tree.balance() for i in range(iterations): new_tree = KDTree(len(self.particles)) for index, particle in enumerate(particles): if particle.tag in {"SHARP", "GREASE"}: continue d = Vector() for loc, other_index, dist in tree.find_n(particle.co, 3): if dist == 0: continue other = particles[other_index] vec = particle.co - other.co d += (vec / (dist ** 3)) if not self.triangle_mode: u = particle.dir v = u.cross(particle.normal) for vec in (u + v, u - v, -u + v, -u - v): vec *= particle.radius vec += other.co vec -= particle.co dist = vec.length d -= vec * 0.3 / (dist ** 3) d.normalize() location, normal, dir, s, c = self.field.sample_point(particle.co + (d * factor * particle.radius)) if location: particle.co = location particle.normal = normal self.grid.update(particle) particle.dir = dir new_tree.insert(particle.co, index) new_tree.balance() tree = new_tree yield i
def buildKDTree(self, points): kdTree = KDTree(len(points)) for i, vector in enumerate(points): kdTree.insert(vector, i) kdTree.balance() return kdTree