def optimize_vertices(self, data): self.toaster.msg("removing duplicate vertices") return unique_map(data.get_vertex_hash_generator( vertexprecision=self.VERTEXPRECISION, normalprecision=self.NORMALPRECISION, uvprecision=self.UVPRECISION, vcolprecision=self.VCOLPRECISION))
def optimize_vertices(self, data): self.toaster.msg("removing duplicate vertices") # get map, deleting unused vertices return unique_map( vhash for i, vhash in enumerate(data.get_vertex_hash_generator( vertexprecision=self.VERTEXPRECISION, normalprecision=self.NORMALPRECISION, uvprecision=self.UVPRECISION, vcolprecision=self.VCOLPRECISION)))
def optimize_mopp(self, mopp): """Optimize a bhkMoppBvTreeShape.""" shape = mopp.shape data = shape.data self.toaster.msg(_("removing duplicate vertices")) v_map, v_map_inverse = unique_map( shape.get_vertex_hash_generator(self.VERTEXPRECISION)) new_numvertices = len(v_map_inverse) self.toaster.msg(_("(num vertices in collision shape was %i and is now %i)") % (len(v_map), new_numvertices)) # copy old data oldverts = [[v.x, v.y, v.z] for v in data.vertices] # set new data data.num_vertices = new_numvertices data.vertices.update_size() for old_i, v in zip(v_map_inverse, data.vertices): v.x = oldverts[old_i][0] v.y = oldverts[old_i][1] v.z = oldverts[old_i][2] del oldverts # update vertex indices in triangles for tri in data.triangles: tri.triangle.v_1 = v_map[tri.triangle.v_1] tri.triangle.v_2 = v_map[tri.triangle.v_2] tri.triangle.v_3 = v_map[tri.triangle.v_3] # remove duplicate triangles self.toaster.msg(_("removing duplicate triangles")) t_map, t_map_inverse = unique_map(shape.get_triangle_hash_generator()) new_numtriangles = len(t_map_inverse) self.toaster.msg(_("(num triangles in collision shape was %i and is now %i)") % (len(t_map), new_numtriangles)) # copy old data oldtris = [[tri.triangle.v_1, tri.triangle.v_2, tri.triangle.v_3, tri.normal.x, tri.normal.y, tri.normal.z] for tri in data.triangles] # set new data data.num_triangles = new_numtriangles data.triangles.update_size() for old_i, tri in zip(t_map_inverse, data.triangles): if old_i is None: continue tri.triangle.v_1 = oldtris[old_i][0] tri.triangle.v_2 = oldtris[old_i][1] tri.triangle.v_3 = oldtris[old_i][2] tri.normal.x = oldtris[old_i][3] tri.normal.y = oldtris[old_i][4] tri.normal.z = oldtris[old_i][5] # note: welding updated later when calling the mopper del oldtris # fix subshape counts if shape.num_sub_shapes == 1: # quick way shape.sub_shapes[0].num_vertices = shape.data.num_vertices else: # slow way if there are two or more subshapes # XXX check that this algorithm actually works and find # XXX possibly a faster method old_max_index = -1 new_i = 0 for sub_shape in shape.sub_shapes: num_vertices = 0 # calculate maximal index + 1 in old vertex array old_max_index += sub_shape.num_vertices # let's include all vertices that have old index # strictly less than old_max_index try: while v_map_inverse[new_i] < old_max_index: # ok, new_i has admissible old index so # include it: increase number of vertices in # this subshape num_vertices += 1 # and increment new index to check next vertex new_i += 1 except IndexError: # new_i overflow, so we're done pass sub_shape.num_vertices = num_vertices # update mopp data and welding info mopp.update_mopp_welding()