예제 #1
0
    def filter_point_cloud(self, target_pc_kdtree: KDTree,
                           initial_alignment: Matrix,
                           distance_threshold: float) -> np.array:
        """Get a filtered version of the point cloud. The filtered cloud is also stored for later use.
        Optionally apply an initial alignment.

        Arguments:
            target_pc_kdtree {KDTree} -- KDTree of the point cloud to align to
            initial_alignment {Matrix} -- initial manual alignment, usually from the UI control empty
            distance_threshold {float} -- maximum allowed distance from ground truth

        Returns:
            np.array -- the filtered point cloud
        """
        logger.info("Starting reconstructed point cloud filtering")
        src = np.copy(self.vertices)
        #
        # initial alignment
        src = PointCloud.transform(src, initial_alignment)
        #
        # filter distant points
        self._discard_vertices.clear()
        self._filter_distance = distance_threshold
        to_delete = []
        for i, v in enumerate(src):
            if target_pc_kdtree.find(v)[2] > distance_threshold:
                to_delete.append(i)
        src = np.delete(src, to_delete, axis=0)
        self._discard_vertices = to_delete
        logger.info("Reconstructed points filtered. Discarded %i points!",
                    len(to_delete))
        if src.shape[0] == 0:
            logger.warning("Point cloud contains 0 points!")
        return self.vertices_filtered
예제 #2
0
    def init_guess(surface, points_from, samples=50):
        u_min = surface.get_u_min()
        u_max = surface.get_u_max()
        v_min = surface.get_v_min()
        v_max = surface.get_v_max()
        us = np.linspace(u_min, u_max, num=samples)
        vs = np.linspace(v_min, v_max, num=samples)
        us, vs = np.meshgrid(us, vs)
        us = us.flatten()
        vs = vs.flatten()

        points = surface.evaluate_array(us, vs).tolist()

        kdt = KDTree(len(us))
        for i, v in enumerate(points):
            kdt.insert(v, i)
        kdt.balance()

        us_out = []
        vs_out = []
        nearest_out = []
        for point_from in points_from:
            nearest, i, distance = kdt.find(point_from)
            us_out.append(us[i])
            vs_out.append(vs[i])
            nearest_out.append(tuple(nearest))

        return us_out, vs_out, nearest_out
예제 #3
0
def _check_min_distance(v_new, vs_old, min_r):
    kdt = KDTree(len(vs_old))
    for i, v in enumerate(vs_old):
        kdt.insert(v, i)
    kdt.balance()
    nearest, idx, dist = kdt.find(v_new)
    if dist is None:
        return True
    return (dist >= min_r)
예제 #4
0
    def init_guess(curve, points_from, samples=50):
        u_min, u_max = curve.get_u_bounds()
        us = np.linspace(u_min, u_max, num=samples)

        points = curve.evaluate_array(us).tolist()
        #print("P:", points)

        kdt = KDTree(len(us))
        for i, v in enumerate(points):
            kdt.insert(v, i)
        kdt.balance()

        us_out = []
        nearest_out = []
        for point_from in points_from:
            nearest, i, distance = kdt.find(point_from)
            us_out.append(us[i])
            nearest_out.append(tuple(nearest))

        return us_out, nearest_out
예제 #5
0
 def to_point(self, amplitude, coefficient, vertex, centers, direction):
     vertex = Vector(vertex)
     n = len(centers)
     if self.point_mode == 'AVG' or n <= 1:
         vectors = []
         for center in centers:
             vector = Vector(center) - vertex
             vector = self.falloff(amplitude, coefficient,
                                   vector.length) * vector.normalized()
             vectors.append(vector)
         result = get_avg_vector(vectors)
         return result.length, result.normalized()
     else:
         kdt = KDTree(n)
         for i, center in enumerate(centers):
             kdt.insert(Vector(center), i)
         kdt.balance()
         nearest_co, nearest_idx, nearest_distance = kdt.find(vertex)
         vector = nearest_co - vertex
         coeff = self.falloff(amplitude, coefficient, nearest_distance)
         return coeff, vector.normalized()
예제 #6
0
    def invoke(self, context, event):
        self.ob = context.active_object.data
        self.bm = bmesh.new()
        self.bm.from_mesh(self.ob)
        self.bm.verts.ensure_lookup_table()

        links = []
        for vert in self.bm.verts:
            l = []
            links.append(l)
            for v in n_ring(vert, 100):
                l.append(v.index)

        immediate_edges = [len(vert.link_edges) for vert in self.bm.verts]

        bmesh.ops.triangulate(self.bm, faces=self.bm.faces)
        self.bm.verts.ensure_lookup_table()

        co = [tuple(v.co) for v in self.bm.verts]
        t = [tuple(v.index for v in f.verts) for f in self.bm.faces]

        kd = KDTree(len(self.bm.verts))
        for vert in self.bm.verts:
            kd.insert(vert.co, vert.index)
        kd.balance()

        x_mirr_table = [kd.find((vert.co[0] * -1, vert.co[1], vert.co[2]))[1] for vert in self.bm.verts]

        self.engine = softwrap_core.ShapeEngine(co, t, links, co, t, immediate_edges, x_mirr_table)
        self.engine.random_co()


        self.engine.add_pin(co=(10, 0, 0), vert_index=0, stiffness=50, twisty=False, x_mirr=True)

        context.window_manager.modal_handler_add(self)
        return {"RUNNING_MODAL"}
예제 #7
0
class BoundaryAlignedRemesher:
    def __init__(self, obj):
        self.obj = obj
        mode = obj.mode
        self.edit_mode = mode == 'EDIT'

        # hack to update the mesh data
        bpy.ops.object.mode_set(mode='OBJECT')
        bpy.ops.object.mode_set(mode=mode)

        self.bm = bmesh.new()

        self.bm.from_mesh(obj.data)
        self.bm1 = None

        if self.edit_mode:
            self.bm1 = self.bm.copy()
            remove, remove1 = [], []

            for vert in self.bm.verts:
                if all(not f.select for f in vert.link_faces):
                    remove.append(vert)
            for vert in remove:
                self.bm.verts.remove(vert)

            for vert in self.bm1.verts:
                if all(v.select for v in vert.link_faces):
                    remove1.append(vert)
            for vert in remove1:
                self.bm1.verts.remove(vert)

            remove1 = [f for f in self.bm1.faces if f.select]
            for face in remove1:
                self.bm1.faces.remove(face)

        self.bvh = BVHTree.FromBMesh(self.bm)

        # Boundary_data is a list of directions and locations of boundaries.
        # This data will serve as guidance for the alignment
        self.boundary_data = []

        # Fill the data using boundary edges as source of directional data.
        for edge in self.bm.edges:
            if edge.is_boundary:
                vec = (edge.verts[0].co - edge.verts[1].co).normalized()
                center = (edge.verts[0].co + edge.verts[1].co) / 2

                self.boundary_data.append((center, vec))

        # Create a Kd Tree to easily locate the nearest boundary point
        self.boundary_kd_tree = KDTree(len(self.boundary_data))

        for index, (center, vec) in enumerate(self.boundary_data):
            self.boundary_kd_tree.insert(center, index)

        self.boundary_kd_tree.balance()

    def nearest_boundary_vector(self, location):
        """ Gets the nearest boundary direction """
        location, index, dist = self.boundary_kd_tree.find(location)
        location, vec = self.boundary_data[index]
        return vec

    def enforce_edge_length(self, edge_length=0.05, bias=0.333):
        """ Replicates dyntopo behavior """
        upper_length = edge_length + edge_length * bias
        lower_length = edge_length - edge_length * bias

        # Subdivide Long edges
        subdivide = []
        for edge in self.bm.edges:
            if edge.calc_length() > upper_length:
                subdivide.append(edge)

        bmesh.ops.subdivide_edges(self.bm, edges=subdivide, cuts=1)
        bmesh.ops.triangulate(self.bm, faces=self.bm.faces)

        if self.edit_mode:
            subdivide = []
            for edge in self.bm1.edges:
                if edge.select and edge.calc_length() > upper_length:
                    subdivide.append(edge)

            bmesh.ops.subdivide_edges(self.bm1, edges=subdivide, cuts=1)

        # Remove verts with less than 5 edges, this helps inprove mesh quality
        dissolve_verts = []
        for vert in self.bm.verts:
            if len(vert.link_edges) < 5:
                if not vert.is_boundary:
                    dissolve_verts.append(vert)

        bmesh.ops.dissolve_verts(self.bm, verts=dissolve_verts)
        bmesh.ops.triangulate(self.bm, faces=self.bm.faces)

        # Collapse short edges but ignore boundaries and never collapse two chained edges
        lock_verts = set(vert for vert in self.bm.verts if vert.is_boundary)
        collapse = []

        for edge in self.bm.edges:
            if edge.calc_length() < lower_length and not edge.is_boundary:
                verts = set(edge.verts)
                if verts & lock_verts:
                    continue
                collapse.append(edge)
                lock_verts |= verts

        bmesh.ops.collapse(self.bm, edges=collapse)
        bmesh.ops.beautify_fill(self.bm, faces=self.bm.faces, method="ANGLE")

    def align_verts(self, rule=(-1, -2, -3, -4)):
        # Align verts to the nearest boundary by averaging neigbor vert locations selected
        # by a specific rule,

        # Rules work by sorting edges by angle relative to the boundary.
        # Eg1. (0, 1) stands for averagiing the biggest angle and the 2nd biggest angle edges.
        # Eg2. (-1, -2, -3, -4), averages the four smallest angle edges
        for vert in self.bm.verts:
            if not vert.is_boundary:

                # min_edge = min(vert.link_edges, key=lambda e: e.calc_length())
                # other = min_edge.other_vert(vert)
                # vec = other.co - vert.co
                # vert.co -= vec * 0.1
                #

                vec = self.nearest_boundary_vector(vert.co)
                neighbor_locations = [
                    edge.other_vert(vert).co for edge in vert.link_edges
                ]
                best_locations = sorted(
                    neighbor_locations,
                    key=lambda n_loc: abs(
                        (n_loc - vert.co).normalized().dot(vec)))
                co = vert.co.copy()
                le = len(vert.link_edges)
                for i in rule:
                    co += best_locations[i % le]
                co /= len(rule) + 1
                co -= vert.co
                co -= co.dot(vert.normal) * vert.normal
                vert.co += co

        self.reproject()

    def reproject(self):
        """ Recovers original shape """
        for vert in self.bm.verts:
            if vert.is_boundary:
                continue
            location, normal, index, dist = self.bvh.find_nearest(vert.co)
            if location:
                vert.co = location

    def remesh(self, edge_length=0.05, iterations=30, quads=True):
        """ Coordenates remeshing """

        if self.edit_mode:
            bpy.ops.object.mode_set(mode='OBJECT')

        if quads:
            rule = (-1, -2, 0, 1)
        else:
            rule = (0, 1, 2, 3)

        for _ in range(iterations):
            self.enforce_edge_length(edge_length=edge_length)
            self.align_verts(rule=rule)
            self.reproject()

        if quads:
            bmesh.ops.join_triangles(self.bm,
                                     faces=self.bm.faces,
                                     angle_face_threshold=3.14,
                                     angle_shape_threshold=3.14)

        for vert in self.bm.verts:
            vert.select = True
        for face in self.bm.faces:
            face.select = True

        if self.bm1:
            self.bm1.to_mesh(self.obj.data)
            self.bm.from_mesh(self.obj.data)

            bmesh.ops.remove_doubles(
                self.bm,
                verts=[v for v in self.bm.verts if v.select],
                dist=0.00001)

        self.bm.to_mesh(self.obj.data)

        if self.edit_mode:
            bpy.ops.object.mode_set(mode='EDIT')
class BoundaryAlignedRemesher:
    def get_hold_edges(self, obj):
        sc = bpy.context.scene
        props = sc.ba_remesh

        split_edge_l = []

        # create layer
        if props.use_edge_bevel_weight:
            if self.bm.edges.layers.bevel_weight:
                bevelweight_Layer = self.bm.edges.layers.bevel_weight.verify()

        if props.use_edge_crease:
            if self.bm.edges.layers.crease:
                crease_Layer = self.bm.edges.layers.crease.verify()

        if props.use_edge_freestyle:
            if self.bm.edges.layers.freestyle:
                freestyle_Layer = self.bm.edges.layers.freestyle.verify()

        # find edge
        for edge in self.bm.edges:
            # 選択
            if props.use_edge_select:
                if edge.select:
                    split_edge_l.append(edge)

            # 角度
            if props.use_edge_angle:
                try:
                    if math.degrees(
                            edge.calc_face_angle()) >= props.edge_angle:
                        split_edge_l.append(edge)
                except:
                    pass

            # シーム
            if props.use_edge_seam:
                if edge.seam:
                    split_edge_l.append(edge)

            # シャープ
            if props.use_edge_sharp:
                if not edge.smooth:  # sharp
                    split_edge_l.append(edge)

            # ベベルウェイト
            if props.use_edge_bevel_weight:
                if self.bm.edges.layers.bevel_weight:
                    if edge[bevelweight_Layer]:
                        split_edge_l.append(edge)

            # クリース
            if props.use_edge_crease:
                if self.bm.edges.layers.crease:
                    if edge[crease_Layer]:
                        split_edge_l.append(edge)

            # Freestyle
            if props.use_edge_freestyle:
                if self.bm.edges.layers.freestyle:
                    if edge[freestyle_Layer]:
                        split_edge_l.append(edge)

        # 重複を削除
        new_split_edge_l = []
        for i in split_edge_l:
            if not i in new_split_edge_l:
                new_split_edge_l.append(i)

        return new_split_edge_l

    def split_feature_edges(self, obj):
        new_split_edge_l = self.get_hold_edges(obj)

        if new_split_edge_l:
            bmesh.ops.split_edges(self.bm, edges=new_split_edge_l)

    def __init__(self, obj):
        self.obj = object

        self.bm = bmesh.new()
        self.bm.from_mesh(obj.data)
        self.bvh = BVHTree.FromBMesh(self.bm)

        # ホールドエッジ
        self.split_feature_edges(obj)

        # 開いたエッジのガイド
        # Boundary_data is a list of directions and locations of boundaries.
        # This data will serve as guidance for the alignment
        self.boundary_data = []

        # Fill the data using boundary edges as source of directional data.
        for edge in self.bm.edges:
            if edge.is_boundary:
                vec = (edge.verts[0].co - edge.verts[1].co).normalized()
                center = (edge.verts[0].co + edge.verts[1].co) / 2

                self.boundary_data.append((center, vec))

        # Create a Kd Tree to easily locate the nearest boundary point
        self.boundary_kd_tree = KDTree(len(self.boundary_data))

        for index, (center, vec) in enumerate(self.boundary_data):
            self.boundary_kd_tree.insert(center, index)

        self.boundary_kd_tree.balance()

    def nearest_boundary_vector(self, location):
        """ Gets the nearest boundary direction """
        location, index, dist = self.boundary_kd_tree.find(location)
        location, vec = self.boundary_data[index]
        return vec

    def enforce_edge_length(self, edge_length=0.05, bias=0.333):
        """ Replicates dyntopo behaviour """
        upper_length = edge_length + edge_length * bias
        lower_length = edge_length - edge_length * bias

        # Subdivide Long edges
        subdivide = []
        for edge in self.bm.edges:
            if edge.calc_length() > upper_length:
                subdivide.append(edge)

        bmesh.ops.subdivide_edges(self.bm, edges=subdivide, cuts=1)
        bmesh.ops.triangulate(self.bm, faces=self.bm.faces)

        # Remove verts with less than 5 edges, this helps inprove mesh quality
        dissolve_verts = []
        for vert in self.bm.verts:
            if len(vert.link_edges) < 5:
                if not vert.is_boundary:
                    dissolve_verts.append(vert)

        bmesh.ops.dissolve_verts(self.bm, verts=dissolve_verts)
        bmesh.ops.triangulate(self.bm, faces=self.bm.faces)

        # 外側エッジを固定
        # Collapse short edges but ignore boundaries and never collapse two chained edges
        lock_verts = set(vert for vert in self.bm.verts if vert.is_boundary)
        collapse = []

        for edge in self.bm.edges:
            if edge.calc_length() < lower_length and not edge.is_boundary:
                verts = set(edge.verts)
                if verts & lock_verts:
                    continue
                collapse.append(edge)
                lock_verts |= verts

        bmesh.ops.collapse(self.bm, edges=collapse)
        bmesh.ops.beautify_fill(self.bm, faces=self.bm.faces, method="ANGLE")

    def align_verts(self, rule=(-1, -2, -3, -4)):
        # Align verts to the nearest boundary by averaging neigbor vert locations selected
        # by a specific rule,

        # Rules work by sorting edges by angle relative to the boundary.
        # Eg1. (0, 1) stands for averagiing the biggest angle and the 2nd biggest angle edges.
        # Eg2. (-1, -2, -3, -4), averages the four smallest angle edges
        for vert in self.bm.verts:
            if not vert.is_boundary:
                vec = self.nearest_boundary_vector(vert.co)
                neighbor_locations = [
                    edge.other_vert(vert).co for edge in vert.link_edges
                ]
                best_locations = sorted(
                    neighbor_locations,
                    key=lambda n_loc: abs(
                        (n_loc - vert.co).normalized().dot(vec)))
                co = vert.co.copy()
                le = len(vert.link_edges)
                for i in rule:
                    co += best_locations[i % le]
                co /= len(rule) + 1
                co -= vert.co
                co -= co.dot(vert.normal) * vert.normal
                vert.co += co

    def reproject(self):
        """ Recovers original shape """
        for vert in self.bm.verts:
            location, normal, index, dist = self.bvh.find_nearest(vert.co)
            if location:
                vert.co = location

    def remesh(self, edge_length=0.05, iterations=30, quads=True):
        """ Coordenates remeshing """
        if quads:
            rule = (-1, -2, 0, 1)
        else:
            rule = (0, 1, 2, 3)

        for _ in range(iterations):
            self.enforce_edge_length(edge_length=edge_length)
            try:
                self.align_verts(rule=rule)
            except:
                pass
            self.reproject()

        if quads:
            bmesh.ops.join_triangles(self.bm,
                                     faces=self.bm.faces,
                                     angle_face_threshold=3.14,
                                     angle_shape_threshold=3.14)

        bmesh.ops.remove_doubles(self.bm, verts=self.bm.verts, dist=0.001)

        return self.bm
예제 #9
0
    def execute(self, context):
        source = context.object
        try:
            sdata = source.data
            sgeom = sdata.polygons
        except AttributeError:
            self.report(
                {'ERROR_INVALID_INPUT'},
                "The active object needs to have a mesh data block.",
            )
            return {'CANCELLED'}

        # get comparison values for source
        scvals = get_vecs(sgeom, 'center')
        if self.in_wrld_crds:
            # transform source to world coordinates
            mat = np.array(source.matrix_world)
            scvals = transf_pts(mat, scvals)

        # build KD-Tree from comparison values
        kd = KDTree(len(sgeom))
        for i, v in enumerate(scvals):
            kd.insert(v, i)
        kd.balance()

        # get values to transfer from source
        stvals = get_scalars(sgeom, 'material_index', np.int8)

        all_meshless = True  # for error-reporting
        for target in context.selected_objects:
            if target is source:
                continue

            try:
                tdata = target.data
                tgeom = tdata.polygons
                all_meshless = False
            except AttributeError:
                continue

            # get comparison values for target
            tcvals = get_vecs(tgeom, 'center')

            if self.in_wrld_crds:
                # transform target to world coordinates
                mat = np.array(target.matrix_world)
                tcvals = transf_pts(mat, tcvals)
                ttvals = np.empty(len(tgeom), dtype=np.int32)

            # for every comparison point in target, find closest in
            # source and copy over its transfer value
            for ti, tv in enumerate(tcvals):
                _, si, _ = kd.find(tv)
                ttvals[ti] = stvals[si]

            # set values to transfer to target
            set_vals(tgeom, ttvals, 'material_index')

            tmats = tdata.materials
            if self.assign_mat:
                # transfer assigned materials
                for i, m in enumerate(sdata.materials):
                    if i < len(tmats):
                        tmats[i] = m
                    else:
                        tmats.append(m)

        if all_meshless:
            self.report(
                {'ERROR_INVALID_INPUT'},
                "No selected target object has a mesh data block.",
            )
            return {'CANCELLED'}
        return {'FINISHED'}
예제 #10
0
class Particle_system:
    
    
    def __init__(self, guide, ground, scale):
        
        self.GUIDE_STRENGTH = 1.0 * scale

        self.TURBULENCE_FREQUENCY = 10 * scale
        self.TURBULENCE_STRENGTH = 1.0 * scale

        self.AVOID_THRESHOLD = 0.01 * scale
        self.AVOID_STRENGTH = 0.2 * scale
        
        self.frame = 0
        
        self.particles = []
        self.guide = guide
#        self.vertex_distance = (self.guide.data.vertices[0].co - self.guide.data.vertices[1].co).length_squared
        
        self.guide_tree = KDTree(len(self.guide.data.vertices))
        for v in self.guide.data.vertices:
            self.guide_tree.insert(v.co, v.index)
        self.guide_tree.balance()
        
        self.ground = ground
        self.scale = scale
        
#        bpy.ops.mesh.primitive_ico_sphere_add(location=(0,0,0), size=0.01)
#        self.instance_obj = bpy.context.object
#        self.instance_obj = bpy.data.objects['Fleche']
        self.instance_obj = bpy.data.objects[bpy.context.scene.ant_instance]
        self.instance_mesh = self.instance_obj.data
#        self.instance_mesh.materials.append(bpy.data.materials['noir'])
        
        
    def add_particles(self, particles_number):
        '''Add a new particle to the system'''
        for p in range(particles_number):
            ind = randint(1, len(self.guide.data.vertices)-2)
            self.particles.append(Particle(ind, self.scale, self.guide.data.vertices[ind].co))
    
    def kill_particle(self, part):
        self.particles.remove(part)
    
    def create_tree(self):
        self.parts_tree = KDTree(len(self.particles))
        for i, p in enumerate(self.particles):
            self.parts_tree.insert(p.location, i)
        self.parts_tree.balance()
        
    
    def step(self):
        '''Simulate next frame'''
        self.frame += 1
        self.create_tree()
        
        for part in self.particles:
            if part.active:
                
                previous_velocity = part.velocity.copy()
                
                #guide vector
                guide_vector = self.guide.data.vertices[part.guide_index].co - part.location
                guide_vector = guide_vector.normalized() * self.GUIDE_STRENGTH

                #turbulence vector
                turbulence = noise.turbulence_vector(part.noise_seed+part.location, 2, False, 1, self.TURBULENCE_STRENGTH, self.TURBULENCE_FREQUENCY)
#                part.noise_seed += turbulence / 50
#                if part.velocity.length_squared < 0.0001:
#                    part.noise_seed = noise.random_unit_vector()
                part.noise_seed.z += 0.01
                
                #boid-like vector
                too_close = self.parts_tree.find_range(part.location, self.AVOID_THRESHOLD)
                avoid_vector = Vector()
                for p in too_close:
                    
                    other_vec = part.location - p[0]
                    if other_vec.length_squared < 0.0001:
                        continue
                    other_vec /= other_vec.length
                    avoid_vector += other_vec
                    
#                avoid_vector.normalize()
#                avoid_vector -= part.velocity
                avoid_vector *= self.AVOID_STRENGTH
                
                #velocity change
                
                part.velocity += avoid_vector
                
                part.velocity += turbulence * (1.0-part.behaviour)
                part.velocity += guide_vector * part.behaviour
                    
                #limit velocity (drag and shit)
                if part.velocity.length > part.MAX_VEL:
                    part.velocity.length = part.MAX_VEL
                
                # limit rotation
                rotation_scalar = previous_velocity.dot(part.velocity) * 0.5 + 0.5 # normalized 0-1
#                rotation_scalar **= 3
                if rotation_scalar > 0.1:
                    rotation_scalar = 0.1
#                rotation_scalar = 0
                part.velocity *= (rotation_scalar)
                part.velocity += previous_velocity * (1-rotation_scalar)
                
                # put that shit on the ground
                closest = self.ground.closest_point_on_mesh(part.location)
                part.location = closest[0]
                # velocity parallel to the ground
                vel_norm = part.velocity.length
                inter = part.velocity.cross(closest[1])
                part.velocity = closest[1].cross(inter)
                part.velocity.length = vel_norm
#                print(part.velocity)
                
                # SET NEW LOCATION
                part.location += part.velocity
                    
                # behaviour change
                part.behaviour += random()*0.1-0.05
                if part.behaviour < 0.8:
                    part.behaviour = 0.8
                if part.behaviour > 0.9:
                    part.behaviour = 0.9
                    
#                # set goal to next vertex if close enough
                pt, ind, dist = self.guide_tree.find(part.location)
                if fabs(ind - part.guide_index) < 2:
                    part.guide_index += part.direction
#                if self.frame % 20 == 0:
#                    part.guide_index += part.direction
                
#                if next_point_distance.length_squared < self.vertex_distance:
#                    part.guide_index += 1
                    
                # switch direction if end reached
                if part.guide_index >= len(self.guide.data.vertices)-1 or part.guide_index == 1:
#                    part.active = False
#                    self.kill_particle(part)
                    part.direction = -part.direction
                    part.guide_index += part.direction

        self.create_frame(self.frame)
    
    def create_frame(self, frame):
        '''
        For each frame:
            - create a new instance of the object to duplicate (eg. a sphere)
            - get a list of vertices from particles' positions
            - create a new generator objects, use the vertex list to generate mesh
                - this object will be used for duplication
            - parent the object to duplicate to the generator object
            - animate the visibility of both objects
            '''
        
        instance_obj_frame = bpy.data.objects.new('instance_{:05}'.format(frame), self.instance_mesh)
        bpy.context.scene.objects.link(instance_obj_frame)
        
    
        vertices = [(p.location, p.velocity) for p in self.particles]
        generator_mesh = bpy.data.meshes.new('generator_{:05}'.format(frame))
        
#        generator_mesh.from_pydata(vertices, [], [])
        
        ## Track to camera
#        cam = bpy.context.scene.camera
        for v in vertices:
            generator_mesh.vertices.add(1)
            generator_mesh.vertices[-1].co = v[0]
            generator_mesh.vertices[-1].normal = v[1]
#            generator_mesh.vertices[-1].normal = cam.location - v
        
        generator_obj = bpy.data.objects.new('generator_{:05}'.format(frame), generator_mesh)
        bpy.context.scene.objects.link(generator_obj)
        
        instance_obj_frame.parent = generator_obj
        generator_obj.dupli_type = "VERTS"
        generator_obj.use_dupli_vertices_rotation = True
        
        #anim
        generator_obj.keyframe_insert('hide', frame=frame)
        generator_obj.keyframe_insert('hide_render', frame=frame)
        generator_obj.hide = True
        generator_obj.hide_render = True
        generator_obj.keyframe_insert('hide', frame=frame+1)
        generator_obj.keyframe_insert('hide_render', frame=frame+1)
        generator_obj.keyframe_insert('hide', frame=frame-1)
        generator_obj.keyframe_insert('hide_render', frame=frame-1)
예제 #11
0
    def simplify_mesh(self, bm):
        class Ownership:
            def __init__(self, particle, dist):
                self.particle = particle
                self.distance = dist
                self.valid = False

        bmesh.ops.triangulate(bm, faces=bm.faces)
        last_edges = float("+inf")
        while True:
            edges = set()
            for edge in bm.edges:
                le = (edge.verts[0].co - edge.verts[1].co).length_squared
                center = edge.verts[0].co + edge.verts[1].co
                center /= 2
                for p, dist in self.get_nearest(center, 1):
                    if p.radius**2 < le:
                        edges.add(edge)
            if not len(edges) < last_edges:
                break
            last_edges = len(edges)
            bmesh.ops.subdivide_edges(bm, edges=list(edges), cuts=1)
            bmesh.ops.triangulate(bm, faces=bm.faces)

        bm.faces.ensure_lookup_table()
        bm.verts.ensure_lookup_table()
        tree = KDTree(len(bm.verts))
        for vert in bm.verts:
            tree.insert(vert.co, vert.index)
        tree.balance()

        ownership_mapping = {}
        ownership_validation_front = set()

        for vert in bm.verts:
            for p, dist in self.get_nearest(vert.co, 1):
                ownership_mapping[vert] = Ownership(p, dist)

        for particle in self.particles:
            location, index, dist = tree.find(particle.location)
            vert = bm.verts[index]
            if vert in ownership_mapping:
                if ownership_mapping[vert].particle == particle:
                    ownership_mapping[vert].valid = True
                    ownership_validation_front.add(vert)

        while True:
            new_front = set()
            for vert in ownership_validation_front:
                for edge in vert.link_edges:
                    other_vert = edge.other_vert(vert)
                    if other_vert not in ownership_mapping:
                        continue
                    if ownership_mapping[other_vert].valid:
                        continue
                    if other_vert in ownership_mapping:
                        if ownership_mapping[
                                vert].particle is ownership_mapping[
                                    other_vert].particle:
                            new_front.add(other_vert)
                            ownership_mapping[other_vert].valid = True
            ownership_validation_front = new_front
            if not new_front:
                break

        new_bm = bmesh.new()
        for particle in self.particles:
            particle.vert = new_bm.verts.new(particle.location)

        for face in bm.faces:
            connections = set()
            for vert in face.verts:
                if vert in ownership_mapping:
                    if ownership_mapping[vert].valid:
                        p = ownership_mapping[vert].particle
                        connections.add(p)
            if len(connections) == 3:
                try:
                    new_bm.faces.new(
                        [particle.vert for particle in connections])
                except ValueError:
                    pass
        while True:
            stop = True
            for vert in new_bm.verts:
                if len(vert.link_edges) < 3:
                    new_bm.verts.remove(vert)
                    stop = False
            if stop:
                break

        bmesh.ops.holes_fill(new_bm, edges=new_bm.edges)
        bmesh.ops.triangulate(new_bm, faces=new_bm.faces)
        bmesh.ops.recalc_face_normals(new_bm, faces=new_bm.faces)
        if not self.triangle_mode:
            bmesh.ops.join_triangles(new_bm,
                                     faces=new_bm.faces,
                                     angle_face_threshold=1.0,
                                     angle_shape_threshold=3.14)

        return new_bm
예제 #12
0
    def __init__(self,
                 source_bm,
                 target_bm=None,
                 max_springs=300,
                 x_mirror=False,
                 immediate_edges_max=6):
        self.max_springs = max_springs
        self.immediate_edges_max = immediate_edges_max
        self.bm = source_bm
        self.target_bm = target_bm
        self.n = len(source_bm.verts)
        self.co = np.array(list(tuple(v.co) for v in source_bm.verts),
                           dtype=np.float64)
        self.last_co = self.co.copy()
        self.springs = np.zeros((self.n, max_springs), dtype=np.int64)
        self.immediate_edges = np.full((self.n, immediate_edges_max),
                                       -1,
                                       dtype=np.int64)
        self.lengths = np.zeros((self.n, max_springs), dtype=np.float64)
        self.sizing = 1

        self.pins = []
        self.out_cache = DummyObj()

        if target_bm:
            target_bm.faces.ensure_lookup_table()
            self.bvh = BVHTree.FromBMesh(target_bm)
        else:
            self.bvh = None

        source_bm.verts.ensure_lookup_table()
        source_bm.faces.ensure_lookup_table()

        if x_mirror:
            self.mirror_table = np.full((self.n, ), -1, dtype=np.int64)
            self.x_mirr = True
            kd = KDTree(self.n)
            for vert in source_bm.verts:
                kd.insert(vert.co, vert.index)
            kd.balance()
        else:
            self.x_mirr = False
            self._mirror_table = None

        for vert in source_bm.verts:
            for j, edge in enumerate(vert.link_edges):
                if not j < immediate_edges_max:
                    break
                other = edge.other_vert(vert)
                if not vert.is_boundary or other.is_boundary == vert.is_boundary:
                    self.immediate_edges[vert.index, j] = other.index

            for j, other in enumerate(n_ring(vert, self.max_springs)):
                self.springs[vert.index, j] = other.index
                self.lengths[vert.index, j] = (other.co - vert.co).length

            if self.x_mirr:
                co = vert.co.copy()
                co.x *= -1
                mirrco, mirri, dist = kd.find(co)
                self.mirror_table[vert.index] = mirri

        self.immediate_edges_invalid_places = self.immediate_edges == -1
        self.immediate_edges_number = (
            immediate_edges_max -
            self.immediate_edges_invalid_places.sum(axis=1))
class Particle_system:


    def __init__(self, guide, ground, scale):

        self.GUIDE_STRENGTH = 1.0 * scale

        self.TURBULENCE_FREQUENCY = 10 * scale
        self.TURBULENCE_STRENGTH = 1.0 * scale

        self.AVOID_THRESHOLD = 0.01 * scale
        self.AVOID_STRENGTH = 0.2 * scale

        self.frame = 0

        self.particles = []
        self.guide = guide
#        self.vertex_distance = (self.guide.data.vertices[0].co - self.guide.data.vertices[1].co).length_squared

        self.guide_tree = KDTree(len(self.guide.data.vertices))
        for v in self.guide.data.vertices:
            self.guide_tree.insert(v.co, v.index)
        self.guide_tree.balance()

        self.ground = ground
        self.scale = scale

#        bpy.ops.mesh.primitive_ico_sphere_add(location=(0,0,0), size=0.01)
#        self.instance_obj = bpy.context.object
#        self.instance_obj = bpy.data.objects['Fleche']
        self.instance_obj = bpy.data.objects[bpy.context.scene.ant_instance]
        self.instance_mesh = self.instance_obj.data
#        self.instance_mesh.materials.append(bpy.data.materials['noir'])


    def add_particles(self, particles_number):
        '''Add a new particle to the system'''
        for p in range(particles_number):
            ind = randint(1, len(self.guide.data.vertices)-2)
            self.particles.append(Particle(ind, self.scale, self.guide.data.vertices[ind].co))

    def kill_particle(self, part):
        self.particles.remove(part)

    def create_tree(self):
        self.parts_tree = KDTree(len(self.particles))
        for i, p in enumerate(self.particles):
            self.parts_tree.insert(p.location, i)
        self.parts_tree.balance()


    def step(self):
        '''Simulate next frame'''
        self.frame += 1
        self.create_tree()

        for part in self.particles:
            if part.active:

                previous_velocity = part.velocity.copy()

                #guide vector
                guide_vector = self.guide.data.vertices[part.guide_index].co - part.location
                guide_vector = guide_vector.normalized() * self.GUIDE_STRENGTH

                #turbulence vector
                turbulence = noise.turbulence_vector(part.noise_seed+part.location, 2, False, 1, self.TURBULENCE_STRENGTH, self.TURBULENCE_FREQUENCY)
#                part.noise_seed += turbulence / 50
#                if part.velocity.length_squared < 0.0001:
#                    part.noise_seed = noise.random_unit_vector()
                part.noise_seed.z += 0.01

                #boid-like vector
                too_close = self.parts_tree.find_range(part.location, self.AVOID_THRESHOLD)
                avoid_vector = Vector()
                for p in too_close:

                    other_vec = part.location - p[0]
                    if other_vec.length_squared < 0.0001:
                        continue
                    other_vec /= other_vec.length
                    avoid_vector += other_vec

#                avoid_vector.normalize()
#                avoid_vector -= part.velocity
                avoid_vector *= self.AVOID_STRENGTH

                #velocity change

                part.velocity += avoid_vector

                part.velocity += turbulence * (1.0-part.behaviour)
                part.velocity += guide_vector * part.behaviour

                #limit velocity (drag and shit)
                if part.velocity.length > part.MAX_VEL:
                    part.velocity.length = part.MAX_VEL

                # limit rotation
                rotation_scalar = previous_velocity.dot(part.velocity) * 0.5 + 0.5 # normalized 0-1
#                rotation_scalar **= 3
                if rotation_scalar > 0.1:
                    rotation_scalar = 0.1
#                rotation_scalar = 0
                part.velocity *= (rotation_scalar)
                part.velocity += previous_velocity * (1-rotation_scalar)

                # put that shit on the ground
                closest = self.ground.closest_point_on_mesh(part.location)
                part.location = closest[0]
                # velocity parallel to the ground
                vel_norm = part.velocity.length
                inter = part.velocity.cross(closest[1])
                part.velocity = closest[1].cross(inter)
                part.velocity.length = vel_norm
#                print(part.velocity)

                # SET NEW LOCATION
                part.location += part.velocity

                # behaviour change
                part.behaviour += random()*0.1-0.05
                if part.behaviour < 0.8:
                    part.behaviour = 0.8
                if part.behaviour > 0.9:
                    part.behaviour = 0.9

#                # set goal to next vertex if close enough
                pt, ind, dist = self.guide_tree.find(part.location)
                if fabs(ind - part.guide_index) < 2:
                    part.guide_index += part.direction
#                if self.frame % 20 == 0:
#                    part.guide_index += part.direction

#                if next_point_distance.length_squared < self.vertex_distance:
#                    part.guide_index += 1

                # switch direction if end reached
                if part.guide_index >= len(self.guide.data.vertices)-1 or part.guide_index == 1:
#                    part.active = False
#                    self.kill_particle(part)
                    part.direction = -part.direction
                    part.guide_index += part.direction

        self.create_frame(self.frame)

    def create_frame(self, frame):
        '''
        For each frame:
            - create a new instance of the object to duplicate (eg. a sphere)
            - get a list of vertices from particles' positions
            - create a new generator objects, use the vertex list to generate mesh
                - this object will be used for duplication
            - parent the object to duplicate to the generator object
            - animate the visibility of both objects
            '''

        instance_obj_frame = bpy.data.objects.new('instance_{:05}'.format(frame), self.instance_mesh)
        bpy.context.scene.objects.link(instance_obj_frame)


        vertices = [(p.location, p.velocity) for p in self.particles]
        generator_mesh = bpy.data.meshes.new('generator_{:05}'.format(frame))

#        generator_mesh.from_pydata(vertices, [], [])

        ## Track to camera
#        cam = bpy.context.scene.camera
        for v in vertices:
            generator_mesh.vertices.add(1)
            generator_mesh.vertices[-1].co = v[0]
            generator_mesh.vertices[-1].normal = v[1]
#            generator_mesh.vertices[-1].normal = cam.location - v

        generator_obj = bpy.data.objects.new('generator_{:05}'.format(frame), generator_mesh)
        bpy.context.scene.objects.link(generator_obj)

        instance_obj_frame.parent = generator_obj
        generator_obj.dupli_type = "VERTS"
        generator_obj.use_dupli_vertices_rotation = True

        #anim
        generator_obj.keyframe_insert('hide', frame=frame)
        generator_obj.keyframe_insert('hide_render', frame=frame)
        generator_obj.hide = True
        generator_obj.hide_render = True
        generator_obj.keyframe_insert('hide', frame=frame+1)
        generator_obj.keyframe_insert('hide_render', frame=frame+1)
        generator_obj.keyframe_insert('hide', frame=frame-1)
        generator_obj.keyframe_insert('hide_render', frame=frame-1)
예제 #14
0
class Converter(object):

    TARGET_NUM_FACET = 2000
    DEFAULT_OCTREE = 3

    @elapsed
    def __init__(self, src):
        self.src = src
        self.decimated = None
        self.src_kd = None
        self.voxel_list = Manager().list()
        self.mesh_list = Manager().list()
        self.color_dict = {}
        self.parent = None
        self.block_map = Manager().list()
        self.unit = None
        self.join = True

        # Initial procedure
        self.__calc_decimated()
        self.__build_src_kd()
        self.__create_color_dict()
        bpy.ops.object.select_all(action="DESELECT")

    @elapsed
    def __calc_decimated(self):
        num_facet = len(self.src.data.polygons)
        ratio = float(Converter.TARGET_NUM_FACET) / float(num_facet)

        mesh = bpy.data.meshes.new("Decimated")
        self.decimated = bpy.data.objects.new("Decimated", mesh)
        self.decimated.data = self.src.data.copy()
        self.decimated.scale = self.src.scale
        self.decimated.location = self.src.location

        bpy.context.scene.objects.link(self.decimated)
        self.decimated.select = True

        self.decimated.modifiers.new("Decimate", "DECIMATE")
        self.decimated.modifiers["Decimate"].ratio = ratio
        bpy.ops.object.modifier_apply(apply_as="DATA", modifier="DECIMATE")

    @elapsed
    def __build_src_kd(self):
        mesh = self.decimated.data
        size = len(mesh.vertices)
        self.src_kd = KDTree(size)

        for i, v in enumerate(mesh.vertices):
            self.src_kd.insert(v.co, i)
        self.src_kd.balance()

    @elapsed
    def __create_color_dict(self):
        for i, loop in enumerate(self.decimated.data.loops):
            vi = loop.vertex_index
            if vi not in self.color_dict:
                self.color_dict[vi] = i

    @elapsed
    def apply_join(self):
        if self.join:
            bpy.ops.object.join()

    @elapsed
    def cleanup(self):
        bpy.context.scene.objects.unlink(self.decimated)

    @staticmethod
    def create_new_octree(box):
        box0 = (
            box[0],
            (box[0] + box[1])/2.0,
            (box[0] + box[2])/2.0,
            (box[0] + box[3])/2.0,
            (box[0] + box[4])/2.0,
            (box[0] + box[5])/2.0,
            (box[0] + box[6])/2.0,
            (box[0] + box[7])/2.0,
        )

        box1 = (
            # Left side
            (box[0] + box[1])/2.0,
            box[1],
            (box[1] + box[2])/2.0,
            (box[0] + box[2])/2.0,
            # Right side
            (box[0] + box[5])/2.0,
            (box[1] + box[5])/2.0,
            (box[1] + box[6])/2.0,
            (box[0] + box[6])/2.0,
        )

        box2 = (
            # Left side
            (box[0] + box[2])/2.0,
            (box[1] + box[2])/2.0,
            box[2],
            (box[2] + box[3])/2.0,
            # Right side
            (box[0] + box[6])/2.0,
            (box[1] + box[6])/2.0,
            (box[2] + box[6])/2.0,
            (box[3] + box[6])/2.0
        )

        box3 = (
            # Left side
            (box[0] + box[3])/2.0,
            (box[0] + box[2])/2.0,
            (box[2] + box[3])/2.0,
            box[3],
            # Right side
            (box[0] + box[7])/2.0,
            (box[0] + box[6])/2.0,
            (box[3] + box[6])/2.0,
            (box[3] + box[7])/2.0,
        )

        box4 = (
            # Left side
            (box[0] + box[4])/2.0,
            (box[0] + box[5])/2.0,
            (box[0] + box[6])/2.0,
            (box[0] + box[7])/2.0,
            # Right side
            box[4],
            (box[4] + box[5])/2.0,
            (box[4] + box[6])/2.0,
            (box[4] + box[7])/2.0,
        )

        box5 = (
            # Left side
            (box[0] + box[5])/2.0,
            (box[1] + box[5])/2.0,
            (box[1] + box[6])/2.0,
            (box[0] + box[6])/2.0,
            # Right side
            (box[4] + box[5])/2.0,
            box[5],
            (box[5] + box[6])/2.0,
            (box[4] + box[6])/2.0,
        )

        box6 = (
            # Left side
            (box[0] + box[6])/2.0,
            (box[1] + box[6])/2.0,
            (box[2] + box[6])/2.0,
            (box[3] + box[6])/2.0,
            # Right side
            (box[4] + box[6])/2.0,
            (box[5] + box[6])/2.0,
            box[6],
            (box[6] + box[7])/2.0,
        )

        box7 = (
            # Left side
            (box[0] + box[7])/2.0,
            (box[0] + box[6])/2.0,
            (box[3] + box[6])/2.0,
            (box[3] + box[7])/2.0,
            # Right side
            (box[4] + box[7])/2.0,
            (box[4] + box[6])/2.0,
            (box[6] + box[7])/2.0,
            box[7],
        )
        return box0, box1, box2, box3, box4, box5, box6, box7

    @staticmethod
    def get_bvhtree_from_box(box):
        mesh_data = bpy.data.meshes.new("cube_mesh_data")
        faces = [(0, 1, 2, 3),
                 (4, 7, 6, 5),
                 (0, 4, 5, 1),
                 (1, 5, 6, 2),
                 (2, 3, 7, 6),
                 (4, 0, 3, 7)]
        mesh_data.from_pydata([x.to_tuple() for x in box], [], faces)
        mesh_data.update()
        bm = bmesh.new()
        bm.from_mesh(mesh_data)
        return bvh.BVHTree.FromBMesh(bm)

    @staticmethod
    def check_if_overlap(obj, box):
        bvh_tree1 = bvh.BVHTree.FromObject(obj, bpy.context.scene)
        bvh_tree2 = Converter.get_bvhtree_from_box(box)
        return bvh_tree1.overlap(bvh_tree2)

    @elapsed
    def invoke(self, obj, box, max_depth):
        try:
            self.invoke_create_voxel(obj, box, max_depth)
            self.draw_voxel(origin=box[0])
        finally:
            # Post procedure
            self.apply_join()
            self.cleanup()
            return list(self.block_map)

    @elapsed
    def invoke_create_voxel(self, obj, box, max_depth):
        # Calc unit length
        self.unit = (box[1].z - box[0].z) / float(2 ** max_depth)

        overlap = Converter.check_if_overlap(obj, box)
        if overlap:
            boxes = Converter.create_new_octree(box)
            jobs = []
            for child in boxes:
                p = Process(
                    target=self.create_voxel,
                    args=(obj, child, 1, self.voxel_list, max_depth)
                )
                jobs.append(p)
                p.start()

            [job.join() for job in jobs]

    def create_voxel(self, obj, box, depth, queue, max_depth=3):
        """For multiprocessing
        :param obj:
        :param box:
        :param depth:
        :param queue:
        :param max_depth:
        :return:
        """
        depth += 1

        overlap = Converter.check_if_overlap(obj, box)
        if overlap:
            if depth == max_depth:
                queue.append([x.to_tuple() for x in box])
            else:
                boxes = Converter.create_new_octree(box)
                for _child in boxes:
                    self.create_voxel(obj, _child, depth, queue, max_depth)

    def calc_mesh_and_color(self, voxel_list, mesh_list, block_list, origin):
        """For multiprocessing
        :param list voxel_list:
        :param list mesh_list:
        :param list block_list:
        :param mathutils.Vector origin:
        """
        faces = ((0, 1, 2, 3), (4, 7, 6, 5), (0, 4, 5, 1),
                 (1, 5, 6, 2), (2, 3, 7, 6), (4, 0, 3, 7))

        for i, voxel in enumerate(voxel_list):
            mesh = bpy.data.meshes.new("cube_mesh_data")
            mesh.from_pydata(voxel, [], faces)
            mesh.update()

            # Find closest color
            co, index, dist = self.src_kd.find(voxel[0])
            if self.decimated.data.vertex_colors:
                rgb = self.decimated.data.vertex_colors["Col"].data[self.color_dict[index]].color
            else:
                rgb = (1.0, 1.0, 1.0)  # White

            mesh_list.append((voxel, tuple(rgb)))

            ix = int(round((voxel[0][0] - origin.x) / self.unit))
            iy = int(round((voxel[0][1] - origin.y) / self.unit))
            iz = int(round((voxel[0][2] - origin.z) / self.unit))
            col_def = BlockDef.find_nearest_color_block(Vector(rgb))

            block_list.append(BlockInfo(
                has_block=True,
                block_type=col_def.block_def[0],
                color=col_def.block_def[1],
                pos=(ix, iy, iz)
            ))

    @elapsed
    def draw_voxel(self, origin):
        # Add null object
        self.parent = bpy.data.objects.new("Voxcel", bpy.data.meshes.new("Voxcel"))
        bpy.context.scene.objects.link(self.parent)
        bpy.context.scene.objects.active = self.parent
        self.parent.select = True

        def chunks(l, n):
            """Yield successive n-sized chunks from l."""
            for i in range(0, len(l), n):
                yield l[i:i+n]

        parallels = 8
        chunk_list = chunks(
            self.voxel_list,
            len(self.voxel_list)//parallels
        )

        jobs = []
        for chunk in chunk_list:
            job = Process(
                target=self.calc_mesh_and_color,
                args=(chunk, self.mesh_list, self.block_map, origin)
            )
            jobs.append(job)
            job.start()

        [job.join() for job in jobs]

        @elapsed
        def add_voxels():
            for i, item in enumerate(self.mesh_list):
                vertices = item[0]
                color = item[1]
                name = "Cube.%010d" % i

                voxel.Voxel(name, vertices, color).create().add(
                    scene=bpy.context.scene,
                    parent=self.parent
                )

        add_voxels()
예제 #15
0
    def evaluate(self, target_pc_kdtree: KDTree,
                 use_filtered_cloud: bool) -> Dict:
        """Evaluate the point cloud w.r.t. the target point cloud.
        The evaluation is done in terms of euclidean distance between the clouds' points.

        Arguments:
            target_pc_kdtree {KDTree} -- target (ground truth) point cloud KDTree
            use_filtered_cloud {bool} -- if {True} the filtered cloud is used for evaluation, the full one otherwise

        Returns:
            Dict -- evaluation result dictionary containing:
                        'dist_mean' {float}: mean distance
                        'dist_std' {float}: standard deviation
                        'dist_min' {float}: minimum distance
                        'dist_max' {float}: maximum distance
                        'used_filtered_cloud' {bool}: if the evaluation used only the filtered cloud
                        'filter_threshold' {float}: the distance threshold used to filter the point cloud
                        'full_cloud_size' {int}: size of the whole reconstructed cloud
                        'used_cloud_size' {int}: size of the cloud used for the evaluation
                        'used_cloud_size_percent' {float}: percentage of cloud used for the evaluation (in range [0-1])
                        'discarded_points' {int}: number of point not used in the evaluation
                        'elapsed_time' {float}: elapsed time in seconds
                    note that the measure unit depends on the unit set in the scene.
        """
        src_pc = self.vertices_filtered if use_filtered_cloud else self.vertices
        #
        # initial alignment
        src = PointCloud.transform(
            src_pc, self._object_matrix @ self._initial_centroid_matrix)
        #
        # get distances
        d = [euclidean_distance(v,
                                target_pc_kdtree.find(v)[0])
             for v in src]  # no need to normalize points are 3D
        # d = [target_pc_kdtree.find(v)[2] for v in src]
        #
        # compute statistics
        d_mean = mean(d)
        d_std = stdev(d, d_mean) if len(d) > 1 else 0.
        d_min = min(d)
        d_max = max(d)
        #
        results = {
            "dist_mean":
            d_mean,
            "dist_std":
            d_std,
            "dist_min":
            d_min,
            "dist_max":
            d_max,
            "used_filtered_cloud":
            use_filtered_cloud,
            "filter_threshold":
            self._filter_distance if use_filtered_cloud else float('inf'),
            "full_cloud_size":
            len(self.vertices),
            "used_cloud_size":
            len(src),
            "used_cloud_size_percent":
            len(src) / len(self.vertices),
            "discarded_points":
            len(self.vertices) - len(src)
        }
        logger.debug(
            "Point cloud eval end. mean=%.3f, std=%.3f, min=%.3f, max=%.3f.",
            d_mean, d_std, d_min, d_max)
        return results
예제 #16
0
    def get_regsitration_to_target(
            self,
            target_pc: List[Vector],
            initial_alignment: Matrix,
            target_pc_kdtree: KDTree = None,
            max_iterations: int = 100,
            samples: int = 0,
            use_filtered_cloud: bool = True) -> Tuple[Matrix, float]:
        """Get the registration matrix to a target point cloud. Optionally apply an initial alignment.
        Implements a variant of the Iterative Closest Point algorithm.

        Arguments:
            target_pc {List[Vector]} -- the point cloud to align to
            initial_alignment {Matrix} -- initial manual alignment, usually from the UI control empty

        Keyword Arguments:
            target_pc_kdtree {KDTree} -- KDTree of the point cloud to align to, if {None} will be
                                         created internally starting from `target_pc` (default: {None})
            max_iterations {int} -- maximum iterations allowed to the algorithm (default: {50})
            samples {int} -- number of random vertices to be used for alignment,
                             if <= 0 use the whole cloud (default: {0})
            use_filtered_cloud {bool} -- if {True} the filtered point cloud is used to run the alignment,
                                         otherwise the full cloud is used (default: {True})

        Returns:
            Matrix -- the combined transformation matrix to align the point cloud
            float  -- registration error
        """
        logger.info("Starting ICP, samples=%i, max_iterations=%i", samples,
                    max_iterations)
        src_pc = self.vertices_filtered if use_filtered_cloud else self.vertices
        #
        target_pc = np.array(target_pc)
        src = np.ones((src_pc.shape[0], 4))
        target = np.ones((len(target_pc), 4))
        src[:, :3] = np.copy(src_pc)
        target[:, :3] = np.copy(target_pc)
        #
        # initial alignment
        src = PointCloud.transform(src, initial_alignment)
        #
        # build KDTree for target point cloud
        kdtree = target_pc_kdtree
        if kdtree is None:
            size = len(target_pc)
            kdtree = KDTree(size)
            for i, v in enumerate(target_pc):
                kdtree.insert(v, i)
            kdtree.balance()
        #
        # define samples
        if samples <= 0 or samples > src[:].shape[0]:
            logger.warning("Using %i points but were required %i!",
                           src[:].shape[0], samples)
            samples = src[:].shape[0]
        #
        # randomize points
        indices = list(range(0, src[:].shape[0]))
        #
        current_iter = 0
        previous_error = float('inf')
        transforms = []
        while current_iter < max_iterations:
            shuffle(indices)
            s = list(
                zip(*[kdtree.find(src[i][0:3]) for i in indices[:samples]]))
            # s_vertices = s[0]
            s_indices = s[1]
            s_distances = s[2]
            #
            # get error
            mean_error = np.mean(s_distances)
            logger.info("ICP iteration %i, mean error: %f", current_iter,
                        mean_error)
            if (previous_error -
                    mean_error) < 0.0001:  # best alignment reached
                break
            previous_error = mean_error
            #
            # find fit transform
            T = self.find_fit_transform(src[indices[:len(s_indices)]],
                                        target[s_indices, :])
            transforms.append(T)
            #
            # update the current source cloud
            src = PointCloud.transform(src, T)
            #
            current_iter += 1
        #
        # self._show_as_vertices_mesh(src)
        align_matrix = Matrix(
            reduce(lambda am, t: t @ am,
                   transforms).tolist())  # aggregate transformations
        return align_matrix, previous_error