def execute(self, vertices, clusters, connections, minDistance, maxDistance): minDistance = max(0, minDistance) maxDistance = max(minDistance, maxDistance) verticesAmount = len(vertices) kdTree = KDTree(verticesAmount) for i, vector in enumerate(vertices): kdTree.insert(vector, i) kdTree.balance() edges = [] for searchIndex in range(min(verticesAmount, clusters)): added = 0 for (vector, foundIndex, distance) in kdTree.find_range(vertices[searchIndex], maxDistance): if searchIndex != foundIndex and distance > minDistance: if added >= connections: break if foundIndex > searchIndex: edge = (searchIndex, foundIndex) else: edge = (foundIndex, searchIndex) edges.append(edge) added += 1 return list(set(edges))
def initialize(self): self.frozen = True nodes = self.nodes tree = KDTree(len(nodes)) for i, node in enumerate(nodes): tree.insert(node.point, i) tree.balance() processed = set() final_nodes = [] groups = [] for i in range(len(nodes)): if i in processed: continue # Find points to merge pending = [i] merge_set = set(pending) while pending: added = set() for j in pending: for co, idx, dist in tree.find_range(nodes[j].point, self.epsilon): added.add(idx) pending = added.difference(merge_set) merge_set.update(added) assert merge_set.isdisjoint(processed) processed.update(merge_set) # Group the points merge_list = [nodes[i] for i in merge_set] merge_list.sort(key=lambda x: x.name) group_class = merge_list[0].group_class for item in merge_list[1:]: cls = item.group_class if issubclass(cls, group_class): group_class = cls elif not issubclass(group_class, cls): raise MetarigError( 'Group class conflict: {} and {} from {} of {}'.format( group_class, cls, item.name, item.rig.base_bone, ) ) group = group_class(merge_list) group.build(final_nodes) groups.append(group) self.final_nodes = self.rigify_sub_objects = final_nodes self.groups = groups
def _execute(self, context): ob = context.object verts = ob.data.vertices kd = KDTree(len(verts)) for i, v in enumerate(verts): kd.insert(ob.matrix_world @ v.co, i) kd.balance() for o in context.selected_objects: if ob is o or o.type != 'MESH': continue for v in o.data.vertices: nearest = kd.find_range(o.matrix_world @ v.co, self.dist) for _, idx, _ in nearest: verts[idx].select = True
def my_handler(scene): from_name = 'tire' to_name = 'tire.001' tgt_name = 'tire.002' brush_name = 'Cube' from_obj = scene.objects[from_name] to_obj = scene.objects[to_name] tgt_obj = scene.objects[tgt_name] brush_obj = scene.objects[brush_name] from_mesh = from_obj.data to_mesh = to_obj.data tgt_mesh = tgt_obj.data brush_mesh = brush_obj.data tgt_kd = KDTree(len(tgt_mesh.vertices)) for i in range(len(tgt_mesh.vertices)): co = tgt_obj.matrix_world * tgt_mesh.vertices[i].co tgt_kd.insert(co, i) tgt_kd.balance() # return tgt_kd min_x = float('inf') max_x = float('-inf') # brush_mesh.vertices[0].co.x for i in range(len(brush_mesh.vertices)): co = brush_mesh.vertices[i].co * brush_obj.matrix_world max_x = max(co.x, max_x) min_x = min(co.x, min_x) radius = (max_x - min_x) / 2 # raise ValueError('min_x: %f, max_x: %f, radius: %f' % (min_x, max_x, radius)) pts = tgt_kd.find_range(brush_obj.location, radius) # raise ValueError('len(pts): %d' % len(pts)) for (co, idx, dist) in pts: tgt_obj.vertex_groups[0].add([idx], 1.0, 'REPLACE') for i in range(len(from_mesh.vertices)): t = tgt_obj.vertex_groups[0].weight(i) co = from_mesh.vertices[i].co * (1 - t) + \ to_mesh.vertices[i].co * (t) no = from_mesh.vertices[i].normal * (1 - t) + \ to_mesh.vertices[i].normal * (t) tgt_mesh.vertices[i].co = co tgt_mesh.vertices[i].normal = no scene.update()
def my_handler(scene): from_name='tire' to_name='tire.001' tgt_name='tire.002' brush_name = 'Cube' from_obj=scene.objects[from_name] to_obj=scene.objects[to_name] tgt_obj=scene.objects[tgt_name] brush_obj=scene.objects[brush_name] from_mesh=from_obj.data to_mesh=to_obj.data tgt_mesh=tgt_obj.data brush_mesh=brush_obj.data tgt_kd = KDTree(len(tgt_mesh.vertices)) for i in range(len(tgt_mesh.vertices)): co = tgt_obj.matrix_world * tgt_mesh.vertices[i].co tgt_kd.insert(co, i) tgt_kd.balance() # return tgt_kd min_x = float('inf') max_x = float('-inf') # brush_mesh.vertices[0].co.x for i in range(len(brush_mesh.vertices)): co = brush_mesh.vertices[i].co * brush_obj.matrix_world max_x = max(co.x, max_x) min_x = min(co.x, min_x) radius = (max_x - min_x) / 2 # raise ValueError('min_x: %f, max_x: %f, radius: %f' % (min_x, max_x, radius)) pts = tgt_kd.find_range(brush_obj.location, radius) # raise ValueError('len(pts): %d' % len(pts)) for (co, idx, dist) in pts: tgt_obj.vertex_groups[0].add([idx], 1.0, 'REPLACE') for i in range(len(from_mesh.vertices)): t = tgt_obj.vertex_groups[0].weight(i) co = from_mesh.vertices[i].co * (1 - t) + \ to_mesh.vertices[i].co * (t) no = from_mesh.vertices[i].normal * (1 - t) + \ to_mesh.vertices[i].normal * (t) tgt_mesh.vertices[i].co = co tgt_mesh.vertices[i].normal = no scene.update()
class Tile3DFinder: def __init__(self, objects=None): self.cached = {} self.objects = objects or [ c for c in t3d.root.children if c.layers[t3d.layer] ] size = len(self.objects) self.kd = KDTree(size) for i, obj in enumerate(self.objects): self.kd.insert(obj.pos, i) self.kd.balance() def get_tiles_at(self, pos): vec = pos.copy().freeze() if vec in self.cached: return self.cached[vec] else: objs = [ self.objects[index] for pos, index, dist in self.kd.find_range(pos, TOLERANCE) ] self.cached[vec] = objs return objs
class SvSolidTopology(object): class Item(object): def __init__(self, item): self.item = item def __hash__(self): return self.item.hashCode() def __eq__(self, other): return self.item.isSame(other.item) def __repr__(self): return f"<Item: {type(self.item).__name__} #{self.item.hashCode()}>" def __init__(self, solid): self.solid = solid self._init() def __repr__(self): v = len(self.solid.Vertexes) e = len(self.solid.Edges) f = len(self.solid.Faces) return f"<Solid topology: {v} vertices, {e} edges, {f} faces>" def _init(self): self._faces_by_vertex = defaultdict(set) self._faces_by_edge = defaultdict(set) self._edges_by_vertex = defaultdict(set) for face in self.solid.Faces: for vertex in face.Vertexes: self._faces_by_vertex[SvSolidTopology.Item(vertex)].add( SvSolidTopology.Item(face)) for edge in face.Edges: self._faces_by_edge[SvSolidTopology.Item(edge)].add( SvSolidTopology.Item(face)) for edge in self.solid.Edges: for vertex in edge.Vertexes: self._edges_by_vertex[SvSolidTopology.Item(vertex)].add( SvSolidTopology.Item(edge)) self._tree = KDTree(len(self.solid.Vertexes)) for i, vertex in enumerate(self.solid.Vertexes): co = (vertex.X, vertex.Y, vertex.Z) self._tree.insert(co, i) self._tree.balance() def tessellate(self, precision): self._points_by_edge = defaultdict(list) self._points_by_face = defaultdict(list) for edge in self.solid.Edges: points = edge.discretize(Deflection=precision) i_edge = SvSolidTopology.Item(edge) for pt in points: self._points_by_edge[i_edge].append((pt.x, pt.y, pt.z)) for face in self.solid.Faces: data = face.tessellate(precision) i_face = SvSolidTopology.Item(face) for pt in data[0]: self._points_by_face[i_face].append((pt.x, pt.y, pt.z)) def calc_normals(self): self._normals_by_face = dict() for face in self.solid.Faces: #face.tessellate(precision) #u_min, u_max, v_min, v_max = face.ParameterRange sum_normal = Base.Vector(0, 0, 0) for u, v in face.getUVNodes(): normal = face.normalAt(u, v) sum_normal = sum_normal + normal sum_normal = np.array([sum_normal.x, sum_normal.y, sum_normal.z]) sum_normal = sum_normal / np.linalg.norm(sum_normal) self._normals_by_face[SvSolidTopology.Item(face)] = sum_normal def get_normal_by_face(self, face): return self._normals_by_face[SvSolidTopology.Item(face)] def get_vertices_by_location(self, condition): to_tuple = lambda v: (v.X, v.Y, v.Z) return [ to_tuple(v) for v in self.solid.Vertexes if condition(to_tuple(v)) ] def get_vertices_by_location_mask(self, condition): to_tuple = lambda v: (v.X, v.Y, v.Z) return [condition(to_tuple(v)) for v in self.solid.Vertexes] def get_points_by_edge(self, edge): return self._points_by_edge[SvSolidTopology.Item(edge)] def get_points_by_face(self, face): return self._points_by_face[SvSolidTopology.Item(face)] def get_edges_by_location_mask(self, condition, include_partial): # condition is vectorized check = any if include_partial else all mask = [] for edge in self.solid.Edges: test = condition( np.array(self._points_by_edge[SvSolidTopology.Item(edge)])) mask.append(check(test)) return mask def get_faces_by_location_mask(self, condition, include_partial): # condition is vectorized check = any if include_partial else all mask = [] for face in self.solid.Faces: test = condition( np.array(self._points_by_face[SvSolidTopology.Item(face)])) mask.append(check(test)) return mask def get_faces_by_vertex(self, vertex): return [ i.item for i in self._faces_by_vertex[SvSolidTopology.Item(vertex)] ] def get_faces_by_vertices_mask(self, vertices, include_partial=True): if include_partial: good = set() for vertex in vertices: new = self._faces_by_vertex[SvSolidTopology.Item(vertex)] good.update(new) return [ SvSolidTopology.Item(face) in good for face in self.solid.Faces ] else: vertices = set([SvSolidTopology.Item(v) for v in vertices]) mask = [] for face in self.solid.Faces: ok = all( SvSolidTopology.Item(v) in vertices for v in face.Vertexes) mask.append(ok) return mask def get_faces_by_edge(self, edge): return [ i.item for i in self._faces_by_edge[SvSolidTopology.Item(edge)] ] def get_faces_by_edges_mask(self, edges, include_partial=True): if include_partial: good = set() for edge in edges: new = self._faces_by_edge[SvSolidTopology.Item(edge)] good.update(new) return [ SvSolidTopology.Item(face) in good for face in self.solid.Faces ] else: edges = set([SvSolidTopology.Item(e) for e in edges]) mask = [] for face in self.solid.Faces: ok = all(SvSolidTopology.Item(e) in edges for e in face.Edges) mask.append(ok) return mask def get_edges_by_vertex(self, vertex): return [ i.item for i in self._edges_by_vertex[SvSolidTopology.Item(vertex)] ] def get_edges_by_vertices_mask(self, vertices, include_partial=True): if include_partial: good = set() for vertex in vertices: new = self._edges_by_vertex[SvSolidTopology.Item(vertex)] good.update(new) return [ SvSolidTopology.Item(edge) in good for edge in self.solid.Edges ] else: vertices = set([SvSolidTopology.Item(v) for v in vertices]) mask = [] for edge in self.solid.Edges: ok = all( SvSolidTopology.Item(v) in vertices for v in edge.Vertexes) mask.append(ok) return mask def get_edges_by_faces_mask(self, faces): good = set() for face in faces: new = set([SvSolidTopology.Item(e) for e in face.Edges]) good.update(new) return [ SvSolidTopology.Item(edge) in good for edge in self.solid.Edges ] def get_vertices_by_faces_mask(self, faces): good = set() for face in faces: new = set([SvSolidTopology.Item(v) for v in face.Vertexes]) good.update(new) return [ SvSolidTopology.Item(vertex) in good for vertex in self.solid.Vertexes ] def get_vertices_by_edges_mask(self, edges): good = set() for edge in edges: new = set([SvSolidTopology.Item(v) for v in edge.Vertexes]) good.update(new) return [ SvSolidTopology.Item(vertex) in good for vertex in self.solid.Vertexes ] def get_vertices_within_range(self, origin, distance): found = self._tree.find_range(tuple(origin), distance) idxs = [item[1] for item in found] vertices = [self.solid.Vertexes[i] for i in idxs] return vertices def get_vertices_within_range_mask(self, origin, distance): found = self._tree.find_range(tuple(origin), distance) idxs = set([item[1] for item in found]) return [i in idxs for i in range(len(self.solid.Vertexes))]
def finish(self, context): #ray cast the entire grid into if 'Posterior Plane' in bpy.data.objects: Plane = bpy.data.objects['Posterior Plane'] Plane.hide = False else: me = bpy.data.meshes.new('Posterior Plane') Plane = bpy.data.objects.new('Posterior Plane', me) context.scene.objects.link(Plane) pbme = bmesh.new() pbme.verts.ensure_lookup_table() pbme.edges.ensure_lookup_table() pbme.faces.ensure_lookup_table() bmesh.ops.create_grid(pbme, x_segments = 200, y_segments = 200, size = 39.9) pbme.to_mesh(Plane.data) pt, pno = calculate_plane(self.crv.b_pts) if self.splint.jaw_type == 'MANDIBLE': Zw = Vector((0,0,-1)) Xw = Vector((1,0,0)) Yw = Vector((0,-1,1)) else: Zw = Vector((0,0,1)) Xw = Vector((1,0,0)) Yw = Vector((0,1,0)) Z = pno Z.normalize() if Zw.dot(Z) < 0: Z *= -1 Y = Z.cross(Xw) X = Y.cross(Z) R = Matrix.Identity(3) #make the columns of matrix U, V, W R[0][0], R[0][1], R[0][2] = X[0] ,Y[0], Z[0] R[1][0], R[1][1], R[1][2] = X[1], Y[1], Z[1] R[2][0] ,R[2][1], R[2][2] = X[2], Y[2], Z[2] R = R.to_4x4() T = Matrix.Translation(pt - 5 * Z) Plane.matrix_world = T * R pmx = Plane.matrix_world ipmx = pmx.inverted() bme_pln = bmesh.new() bme_pln.from_mesh(Plane.data) bme_pln.verts.ensure_lookup_table() bme_pln.edges.ensure_lookup_table() bme_pln.faces.ensure_lookup_table() bvh = BVHTree.FromBMesh(bme_pln) #we are going to raycast the user world coordinate points #into a grid, and identify all points in the grid from the local Z direction #Then we will store the local location of the user picked coordinate in a dictionary key_verts = {} for loc in self.crv.b_pts: res = bvh.ray_cast(ipmx * loc, -Z, 30) if res[0] != None: f = bme_pln.faces[res[2]] for v in f.verts: key_verts[v] = ipmx * loc v.select_set(True) continue res = bvh.ray_cast(ipmx * loc, Z, 30) if res[0] != None: f = bme_pln.faces[res[2]] for v in f.verts: key_verts[v] = ipmx * loc v.select_set(True) continue #bme_pln.to_mesh(Plane.data) #bme_pln.free() #return kdtree = KDTree(len(key_verts)) for v in key_verts.keys(): kdtree.insert(v.co, v.index) kdtree.balance() #raycast the shell if we can raycast_shell = False if 'Splint Shell' in bpy.data.objects: shell = bpy.data.objects.get('Splint Shell') bvh_shell = BVHTree.FromObject(shell, context.scene) mx_shell = shell.matrix_world imx_shell = mx_shell.inverted() Z_shell = imx_shell.to_3x3()*Z raycast_shell = True right_side = set() left_side = set() ray_casted = set() to_delete = set() for v in bme_pln.verts: if v in key_verts: v.co[2] = key_verts[v][2] if v.co[1] > 0: left_side.add(v) else: right_side.add(v) continue results = kdtree.find_range(v.co, .5) if len(results): N = len(results) r_total = 0 v_new = Vector((0,0,0)) for res in results: r_total += 1/res[2] v_new += (1/res[2]) * key_verts[bme_pln.verts[res[1]]] v_new *= 1/r_total v.co[2] = v_new[2] if v.co[1] > 0: left_side.add(v) else: right_side.add(v) continue results = kdtree.find_range(v.co, 6) if len(results): N = len(results) r_total = 0 v_new = Vector((0,0,0)) for res in results: r_total += (1/res[2])**2 v_new += ((1/res[2])**2) * key_verts[bme_pln.verts[res[1]]] v_new *= 1/r_total v.co[2] = v_new[2] if v.co[1] > 0: left_side.add(v) else: right_side.add(v) continue loc, no, index, d = bvh_shell.ray_cast(imx_shell * pmx * v.co, Z_shell) if loc: ray_casted.add(v) results = kdtree.find_n(v.co, 4) N = len(results) r_total = 0 v_new = Vector((0,0,0)) for res in results: r_total += (1/res[2])**2 v_new += ((1/res[2])**2) * key_verts[bme_pln.verts[res[1]]] v_new *= 1/r_total v.co[2] = v_new[2] continue total_verts = ray_casted | left_side | right_side ant_left = max(left_side, key = lambda x: x.co[0]) ant_right = max(right_side, key = lambda x: x.co[0]) new_verts = set() dilation_verts = set() for v in total_verts: for ed in v.link_edges: v_new = ed.other_vert(v) if v_new in total_verts or v_new in new_verts: continue else: new_verts.add(v_new) print('adding %i new verts' % len(new_verts)) total_verts.update(new_verts) dilation_verts.update(new_verts) #for v in ray_casted: # if v.co[1] > 0: # if v.co[0] > ant_left.co[0]: # to_delete.add(v) # else: # if v.co[0] > ant_right.co[0]: # to_delete.add(v) #print('added %i ray_casted' % len(ray_casted)) #total_verts = ray_casted | left_side | right_side #total_verts.difference_update(to_delete) #new_verts = set() #for v in total_verts: # for ed in v.link_edges: # v_new = ed.other_vert(v) # if v_new in total_verts: continue # if v_new.co[1] > 0 and v_new.co[0] < ant_left.co[0]: # if v in to_delete: # new_verts.add(v) # if v_new.co[1] <= 0 and v_new.co[0] < ant_right.co[0]: # if v in to_delete: # new_verts.add(v) #to_delete.difference_update(new_verts) #print('adding %i new verts' % len(new_verts)) for j in range(0,3): newer_verts = set() for v in new_verts: for ed in v.link_edges: v_new = ed.other_vert(v) if v_new in total_verts or v_new in newer_verts: continue newer_verts.add(v_new) total_verts.update(newer_verts) dilation_verts.update(newer_verts) new_verts = newer_verts to_delete = set(bme_pln.verts[:]) - total_verts #filter out anteior dilation for v in dilation_verts: if v.co[1] > 0 and v.co[0] > ant_left.co[0]: to_delete.add(v) continue if v.co[1] <= 0 and v.co[0] > ant_right.co[0]: to_delete.add(v) continue results = kdtree.find_n(v.co, 4) N = len(results) r_total = 0 v_new = Vector((0,0,0)) for res in results: r_total += (1/res[2])**2 v_new += ((1/res[2])**2) * key_verts[bme_pln.verts[res[1]]] v_new *= 1/r_total v.co[2] = v_new[2] #filter out anteior dilation for v in ray_casted: if v.co[1] > 0 and v.co[0] > ant_left.co[0]: to_delete.add(v) continue if v.co[1] <= 0 and v.co[0] > ant_right.co[0]: to_delete.add(v) continue bmesh.ops.delete(bme_pln, geom = list(to_delete), context = 1) bme_pln.to_mesh(Plane.data) Plane.data.update() smod = Plane.modifiers.new('Smooth', type = 'SMOOTH') smod.iterations = 5 smod.factor = 1 self.splint.ops_string += 'Mark Posterior Cusps:'
class Particle_system: def __init__(self, guide, ground, scale): self.GUIDE_STRENGTH = 1.0 * scale self.TURBULENCE_FREQUENCY = 10 * scale self.TURBULENCE_STRENGTH = 1.0 * scale self.AVOID_THRESHOLD = 0.01 * scale self.AVOID_STRENGTH = 0.2 * scale self.frame = 0 self.particles = [] self.guide = guide # self.vertex_distance = (self.guide.data.vertices[0].co - self.guide.data.vertices[1].co).length_squared self.guide_tree = KDTree(len(self.guide.data.vertices)) for v in self.guide.data.vertices: self.guide_tree.insert(v.co, v.index) self.guide_tree.balance() self.ground = ground self.scale = scale # bpy.ops.mesh.primitive_ico_sphere_add(location=(0,0,0), size=0.01) # self.instance_obj = bpy.context.object # self.instance_obj = bpy.data.objects['Fleche'] self.instance_obj = bpy.data.objects[bpy.context.scene.ant_instance] self.instance_mesh = self.instance_obj.data # self.instance_mesh.materials.append(bpy.data.materials['noir']) def add_particles(self, particles_number): '''Add a new particle to the system''' for p in range(particles_number): ind = randint(1, len(self.guide.data.vertices)-2) self.particles.append(Particle(ind, self.scale, self.guide.data.vertices[ind].co)) def kill_particle(self, part): self.particles.remove(part) def create_tree(self): self.parts_tree = KDTree(len(self.particles)) for i, p in enumerate(self.particles): self.parts_tree.insert(p.location, i) self.parts_tree.balance() def step(self): '''Simulate next frame''' self.frame += 1 self.create_tree() for part in self.particles: if part.active: previous_velocity = part.velocity.copy() #guide vector guide_vector = self.guide.data.vertices[part.guide_index].co - part.location guide_vector = guide_vector.normalized() * self.GUIDE_STRENGTH #turbulence vector turbulence = noise.turbulence_vector(part.noise_seed+part.location, 2, False, 1, self.TURBULENCE_STRENGTH, self.TURBULENCE_FREQUENCY) # part.noise_seed += turbulence / 50 # if part.velocity.length_squared < 0.0001: # part.noise_seed = noise.random_unit_vector() part.noise_seed.z += 0.01 #boid-like vector too_close = self.parts_tree.find_range(part.location, self.AVOID_THRESHOLD) avoid_vector = Vector() for p in too_close: other_vec = part.location - p[0] if other_vec.length_squared < 0.0001: continue other_vec /= other_vec.length avoid_vector += other_vec # avoid_vector.normalize() # avoid_vector -= part.velocity avoid_vector *= self.AVOID_STRENGTH #velocity change part.velocity += avoid_vector part.velocity += turbulence * (1.0-part.behaviour) part.velocity += guide_vector * part.behaviour #limit velocity (drag and shit) if part.velocity.length > part.MAX_VEL: part.velocity.length = part.MAX_VEL # limit rotation rotation_scalar = previous_velocity.dot(part.velocity) * 0.5 + 0.5 # normalized 0-1 # rotation_scalar **= 3 if rotation_scalar > 0.1: rotation_scalar = 0.1 # rotation_scalar = 0 part.velocity *= (rotation_scalar) part.velocity += previous_velocity * (1-rotation_scalar) # put that shit on the ground closest = self.ground.closest_point_on_mesh(part.location) part.location = closest[0] # velocity parallel to the ground vel_norm = part.velocity.length inter = part.velocity.cross(closest[1]) part.velocity = closest[1].cross(inter) part.velocity.length = vel_norm # print(part.velocity) # SET NEW LOCATION part.location += part.velocity # behaviour change part.behaviour += random()*0.1-0.05 if part.behaviour < 0.8: part.behaviour = 0.8 if part.behaviour > 0.9: part.behaviour = 0.9 # # set goal to next vertex if close enough pt, ind, dist = self.guide_tree.find(part.location) if fabs(ind - part.guide_index) < 2: part.guide_index += part.direction # if self.frame % 20 == 0: # part.guide_index += part.direction # if next_point_distance.length_squared < self.vertex_distance: # part.guide_index += 1 # switch direction if end reached if part.guide_index >= len(self.guide.data.vertices)-1 or part.guide_index == 1: # part.active = False # self.kill_particle(part) part.direction = -part.direction part.guide_index += part.direction self.create_frame(self.frame) def create_frame(self, frame): ''' For each frame: - create a new instance of the object to duplicate (eg. a sphere) - get a list of vertices from particles' positions - create a new generator objects, use the vertex list to generate mesh - this object will be used for duplication - parent the object to duplicate to the generator object - animate the visibility of both objects ''' instance_obj_frame = bpy.data.objects.new('instance_{:05}'.format(frame), self.instance_mesh) bpy.context.scene.objects.link(instance_obj_frame) vertices = [(p.location, p.velocity) for p in self.particles] generator_mesh = bpy.data.meshes.new('generator_{:05}'.format(frame)) # generator_mesh.from_pydata(vertices, [], []) ## Track to camera # cam = bpy.context.scene.camera for v in vertices: generator_mesh.vertices.add(1) generator_mesh.vertices[-1].co = v[0] generator_mesh.vertices[-1].normal = v[1] # generator_mesh.vertices[-1].normal = cam.location - v generator_obj = bpy.data.objects.new('generator_{:05}'.format(frame), generator_mesh) bpy.context.scene.objects.link(generator_obj) instance_obj_frame.parent = generator_obj generator_obj.dupli_type = "VERTS" generator_obj.use_dupli_vertices_rotation = True #anim generator_obj.keyframe_insert('hide', frame=frame) generator_obj.keyframe_insert('hide_render', frame=frame) generator_obj.hide = True generator_obj.hide_render = True generator_obj.keyframe_insert('hide', frame=frame+1) generator_obj.keyframe_insert('hide_render', frame=frame+1) generator_obj.keyframe_insert('hide', frame=frame-1) generator_obj.keyframe_insert('hide_render', frame=frame-1)
def sample_points_poissondisk(self, number_of_points, init_factor=5, approximate=False): logger = logging.getLogger("SamplePointsPoissonDisk") if number_of_points < 1: logger.error("zero or negative number of points") return if not self.triangles.any: logger.error("input mesh has no triangles") return if init_factor < 1: logger.error( "please provide either a point cloud or an init_factor greater than 0" ) all_points, normals = self.sample_points_uniformlyImpl( init_factor * number_of_points) # Set-up sample elimination alpha = 8 # constant defined in paper beta = 0.5 # constant defined in paper gamma = 1.5 # constant defined in paper pcl_size = len(all_points) ratio = number_of_points / pcl_size r_max = 2 * sqrt( (self.surface_area / number_of_points) / (2 * sqrt(3.0))) r_min = r_max * beta * (1 - pow(ratio, gamma)) deleted = [False] * pcl_size kd = KDTree(len(all_points)) for i, v in enumerate(all_points): kd.insert(v, i) kd.balance() def weight_fcn(d): if d < r_min: d = r_min return pow(1 - d / r_max, alpha) def weight_fcn_squared(d2): d = sqrt(d2) return weight_fcn(d) def compute_point_weight(pidx0): nbs = kd.find_range(all_points[pidx0], r_max) weight = 0 for neighbour, nb_idx, nb_dist in nbs: # only count weights if not the same point if not deleted if nb_idx == pidx0: continue if deleted[nb_idx]: continue weight += weight_fcn(nb_dist) return weight # init weights and priority queue queue = [] for idx in range(pcl_size): weight = compute_point_weight(idx) queue.append(QueueEntry(idx, weight)) priority = copy.copy(queue) current_number_of_points = pcl_size if approximate: first_slice = number_of_points + number_of_points * int( init_factor / 2) step = init_factor * 2 while current_number_of_points > first_slice: priority.sort(key=lambda q: q.weight) for p in priority[-step:]: deleted[p.idx] = True for p in priority[-step:]: nbs = kd.find_range(all_points[p.idx], r_max) for nb, nb_idx, nb_dist in nbs: queue[nb_idx].weight = compute_point_weight(nb_idx) priority = priority[:-step] current_number_of_points -= step while current_number_of_points > number_of_points: priority.sort(key=lambda q: q.weight) last = priority.pop() weight, pidx = last.weight, last.idx deleted[pidx] = True current_number_of_points -= 1 # update weights nbs = kd.find_range(all_points[pidx], r_max) for nb, nb_idx, nb_dist in nbs: queue[nb_idx].weight = compute_point_weight(nb_idx) for i, point in enumerate(all_points): if deleted[i]: continue yield point, normals[i]