def testPickle(self): import pickle # Test pickling of transforms of various types. testXforms = [ openvdb.createLinearTransform(voxelSize=0.1), openvdb.createLinearTransform(matrix=[[1, 0, 0, 0], [0, 2, 0, 0], [0, 0, 3, 0], [4, 3, 2, 1]]), openvdb.createFrustumTransform((0, 0, 0), (10, 10, 10), taper=0.8, depth=10.0), ] for xform in testXforms: s = pickle.dumps(xform) restoredXform = pickle.loads(s) self.assertEqual(restoredXform, xform) # Test pickling of grids of various types. for factory in openvdb.GridTypes: # Construct a grid. grid = factory() # Add some metadata to the grid. meta = {'name': 'test', 'saveFloatAsHalf': True, 'xyz': (-1, 0, 1)} grid.metadata = meta # Add some voxel data to the grid. active = True for width in range(63, 0, -10): val = valueFactory(grid.zeroValue, width) grid.fill((0, 0, 0), (width, ) * 3, val, active) active = not active # Pickle the grid to a string, then unpickle the string. s = pickle.dumps(grid) restoredGrid = pickle.loads(s) # Verify that the original and unpickled grids' metadata are equal. self.assertEqual(restoredGrid.metadata, meta) # Verify that the original and unpickled grids have the same active values. for restored, original in zip(restoredGrid.iterOnValues(), grid.iterOnValues()): self.assertEqual(restored, original) # Verify that the original and unpickled grids have the same inactive values. for restored, original in zip(restoredGrid.iterOffValues(), grid.iterOffValues()): self.assertEqual(restored, original)
def convert_vdb_with_yt(datafilename, outfilename, level, variable_out, log_the_variable=False, variable_tol=None, renorm=True, renorm_box=True, renorm_box_size=10.0): # load your selected data file and grab the data ds = yt.load(datafilename) dd = ds.all_data() all_data = ds.covering_grid(level=level, left_edge=ds.domain_left_edge, dims=ds.domain_dimensions * ds.refine_by**level) # to take the log or to not take the log, that is the question if log_the_variable is True: pointdata = np.log10(all_data[variable_out].v) if variable_tol is not None: variable_tol = np.log10(variable_tol) else: pointdata = (all_data[variable_out].v) # rescale from 0->1 for plotting if renorm: minp = pointdata.min() maxp = pointdata.max() pointdata = (pointdata - minp) / (maxp - minp) if variable_tol is not None: variable_tol = (variable_tol - minp) / (maxp - minp) # take out threshold data -> set to 0 if variable_tol is not None: pointdata[pointdata < variable_tol] = 0.0 # generate vdb domain_box = vdb.FloatGrid() domain_box.background = 0.0 domain_box.copyFromArray(pointdata, ijk=(0, 0, 0), tolerance=0) # rescale to voxel size if renorm_box: vsize = renorm_box_size / float( pointdata.shape[0]) # assumes square box/shifting to x-axis units! domain_box.transform = vdb.createLinearTransform( voxelSize=vsize) # tolist is for formatting #print('Writing vdb file...') outvdbname = outfilename + '_' + variable_out + '_one_level_is_' + str( level).zfill(3) + '.vdb' vdb.write(outvdbname, grids=domain_box) #print('... done with writing vdb file to ' + outvdbname) return outvdbname
def testPickle(self): import pickle # Test pickling of transforms of various types. testXforms = [ openvdb.createLinearTransform(voxelSize=0.1), openvdb.createLinearTransform(matrix=[[1,0,0,0],[0,2,0,0],[0,0,3,0],[4,3,2,1]]), openvdb.createFrustumTransform((0,0,0), (10,10,10), taper=0.8, depth=10.0), ] for xform in testXforms: s = pickle.dumps(xform) restoredXform = pickle.loads(s) self.assertEqual(restoredXform, xform) # Test pickling of grids of various types. for factory in openvdb.GridTypes: # Construct a grid. grid = factory() # Add some metadata to the grid. meta = { 'name': 'test', 'saveFloatAsHalf': True, 'xyz': (-1, 0, 1) } grid.metadata = meta # Add some voxel data to the grid. active = True for width in range(63, 0, -10): val = valueFactory(grid.zeroValue, width) grid.fill((0, 0, 0), (width,)*3, val, active) active = not active # Pickle the grid to a string, then unpickle the string. s = pickle.dumps(grid) restoredGrid = pickle.loads(s) # Verify that the original and unpickled grids' metadata are equal. self.assertEqual(restoredGrid.metadata, meta) # Verify that the original and unpickled grids have the same active values. for restored, original in zip(restoredGrid.iterOnValues(), grid.iterOnValues()): self.assertEqual(restored, original) # Verify that the original and unpickled grids have the same inactive values. for restored, original in zip(restoredGrid.iterOffValues(), grid.iterOffValues()): self.assertEqual(restored, original)
def testTransform(self): xform1 = openvdb.createLinearTransform( [[.5, 0, 0, 0], [0, 1, 0, 0], [0, 0, 2, 0], [1, 2, 3, 1]]) self.assertTrue(xform1.typeName != '') self.assertEqual(xform1.indexToWorld((1, 1, 1)), (1.5, 3, 5)) xform2 = xform1 self.assertEqual(xform2, xform1) xform2 = xform1.deepCopy() self.assertEqual(xform2, xform1) xform2 = openvdb.createFrustumTransform(taper=0.5, depth=100, xyzMin=(0, 0, 0), xyzMax=(100, 100, 100), voxelSize=0.25) self.assertNotEqual(xform2, xform1) worldp = xform2.indexToWorld((10, 10, 10)) worldp = [int(round(x * 1000000)) for x in worldp] self.assertEqual(worldp, [-110000, -110000, 2500000]) grid = openvdb.FloatGrid() self.assertEqual(grid.transform, openvdb.createLinearTransform()) grid.transform = openvdb.createLinearTransform(2.0) self.assertEqual(grid.transform, openvdb.createLinearTransform(2.0))
def _read(verts, tris, quads, vxsize): print("vdb: read voxels from mesh") vtransform = vdb.createLinearTransform(voxelSize=vxsize) if len(tris) == 0: grid = vdb.FloatGrid.createLevelSetFromPolygons(verts, quads=quads, transform=vtransform) elif len(quads) == 0: grid = vdb.FloatGrid.createLevelSetFromPolygons(verts, triangles=tris, transform=vtransform) else: grid = vdb.FloatGrid.createLevelSetFromPolygons(verts, tris, quads, transform=vtransform) bb = grid.evalActiveVoxelBoundingBox() bb_size = (bb[1][0]-bb[0][0], bb[1][1]-bb[0][1], bb[1][2]-bb[0][2]) print("vdb_remesh: new grid {} voxels".format(bb_size)) return grid
def convert_to_grid(vertices, triangles, quads, grid_size): if len(triangles) == 0: triangles = None if len(quads) == 0: quads = None transform = vdb.createLinearTransform(voxelSize=(1.0/(grid_size-14))) grid = vdb.FloatGrid.createLevelSetFromPolygons(vertices, triangles = triangles, quads = quads, transform=transform, halfWidth = 0.6) outside = grid.background width = 2.0 * outside # Visit and update all of the grid's active values, which correspond to # voxels in the narrow band. for iter in grid.iterOnValues(): dist = iter.value iter.value = (outside - dist) / width # Visit all of the grid's inactive tile and voxel values and update # the values that correspond to the interior region. for iter in grid.iterOffValues(): if iter.value < 0.0: iter.value = 1.0 grid.background = 0.0 return grid
def isosurf(context): scn = bpy.context.scene stime = time.clock() SurfList = [] for i, obj in enumerate(bpy.context.scene.objects): if 'IsoSurfer' in obj: obsurf = obj mesurf = obj.data res = obj.IsoSurf_res SurfList.append([(obsurf, mesurf, res)]) for item in obj.IsoSurf: if item.active == True: if item.obj != '': if item.psys != '': SurfList[-1].append((item.obj, item.psys)) for surfobj in SurfList: print("Calculating isosurface, for frame:", bpy.context.scene.frame_current) for obj, psys in surfobj[1:]: psys = bpy.data.objects[obj].particle_systems[psys] ploc = [] stime = time.clock() palive = False for par in range(len(psys.particles)): if psys.particles[par].alive_state == 'ALIVE': ploc.append(psys.particles[par].location) palive = True if palive: print(' pack particles:',time.clock() - stime,'sec') vxsize = scn.isosurface_voxelsize sradius = scn.isosurface_sphereradius ssteps = scn.isosurface_smoothsteps vtransform = vdb.createLinearTransform(voxelSize=vxsize) grid = vdb.FloatGrid.createLevelSetFromPoints(np.array(ploc), transform=vtransform, radius=sradius) # iso, adaptivity, gaussian iterations, gaussian kernel size, gaussian sigma verts, tris, quads = grid.convertToComplex(0.0, 0.01, ssteps, 4, 0.8) print(' vdb remesh:',time.clock() - stime,'sec') stime = time.clock() # TODO: eats all memory & resets materials # obsurf.data = write_fast(verts, tris, quads) # bpy.ops.object.shade_smooth() # scn.update() bm = bmesh.new() bm.from_mesh(mesurf) bm.clear() for co in verts.tolist(): bm.verts.new(co) bm.verts.ensure_lookup_table() bm.faces.ensure_lookup_table() for face_indices in tris.tolist() + quads.tolist(): bm.faces.new(tuple(bm.verts[index] for index in face_indices[::-1])) for f in bm.faces: f.smooth = True bm.to_mesh(mesurf) bm.free() mesurf.calc_normals() scn.update() print(' write:',time.clock() - stime,'sec')
def testMeshConversion(self): import time # Skip this test if NumPy is not available. try: import numpy as np except ImportError: return # Test mesh to volume conversion. # Generate the vertices of a cube. cubeVertices = [(x, y, z) for x in (0, 100) for y in (0, 100) for z in (0, 100)] cubePoints = np.array(cubeVertices, float) # Generate the faces of a cube. cubeQuads = np.array([ (0, 1, 3, 2), # left (0, 2, 6, 4), # front (4, 6, 7, 5), # right (5, 7, 3, 1), # back (2, 3, 7, 6), # top (0, 4, 5, 1), # bottom ], float) voxelSize = 2.0 halfWidth = 3.0 xform = openvdb.createLinearTransform(voxelSize) # Only scalar, floating-point grids support createLevelSetFromPolygons() # (and the OpenVDB module might have been compiled without DoubleGrid support). grids = [] for gridType in [n for n in openvdb.GridTypes if n.__name__ in ('FloatGrid', 'DoubleGrid')]: # Skip this test if the OpenVDB module was built without NumPy support. try: grid = gridType.createLevelSetFromPolygons( cubePoints, quads=cubeQuads, transform=xform, halfWidth=halfWidth) except NotImplementedError: return #openvdb.write('/tmp/testMeshConversion.vdb', grid) self.assertEqual(grid.transform, xform) self.assertEqual(grid.background, halfWidth * voxelSize) dim = grid.evalActiveVoxelDim() self.assertTrue(50 < dim[0] < 58) self.assertTrue(50 < dim[1] < 58) self.assertTrue(50 < dim[2] < 58) grids.append(grid) # Boolean-valued grids can't be used to store level sets. self.assertRaises(TypeError, lambda: openvdb.BoolGrid.createLevelSetFromPolygons( cubePoints, quads=cubeQuads, transform=xform, halfWidth=halfWidth)) # Vector-valued grids can't be used to store level sets. self.assertRaises(TypeError, lambda: openvdb.Vec3SGrid.createLevelSetFromPolygons( cubePoints, quads=cubeQuads, transform=xform, halfWidth=halfWidth)) # The "points" argument to createLevelSetFromPolygons() must be a NumPy array. self.assertRaises(TypeError, lambda: openvdb.FloatGrid.createLevelSetFromPolygons( cubeVertices, quads=cubeQuads, transform=xform, halfWidth=halfWidth)) # The "points" argument to createLevelSetFromPolygons() must be a NumPy float or int array. self.assertRaises(TypeError, lambda: openvdb.FloatGrid.createLevelSetFromPolygons( np.array(cubeVertices, bool), quads=cubeQuads, transform=xform, halfWidth=halfWidth)) # The "triangles" argument to createLevelSetFromPolygons() must be an N x 3 NumPy array. self.assertRaises(TypeError, lambda: openvdb.FloatGrid.createLevelSetFromPolygons( cubePoints, triangles=cubeQuads, transform=xform, halfWidth=halfWidth)) # Test volume to mesh conversion. # Vector-valued grids can't be meshed. self.assertRaises(TypeError, lambda: openvdb.Vec3SGrid().convertToQuads()) for grid in grids: points, quads = grid.convertToQuads() # These checks are intended mainly to test the Python/C++ bindings, # not the OpenVDB volume to mesh converter. self.assertTrue(len(points) > 8) self.assertTrue(len(quads) > 6) pmin, pmax = points.min(0), points.max(0) self.assertTrue(-2 < pmin[0] < 2) self.assertTrue(-2 < pmin[1] < 2) self.assertTrue(-2 < pmin[2] < 2) self.assertTrue(98 < pmax[0] < 102) self.assertTrue(98 < pmax[1] < 102) self.assertTrue(98 < pmax[2] < 102) points, triangles, quads = grid.convertToPolygons(adaptivity=1) self.assertTrue(len(points) > 8) pmin, pmax = points.min(0), points.max(0) self.assertTrue(-2 < pmin[0] < 2) self.assertTrue(-2 < pmin[1] < 2) self.assertTrue(-2 < pmin[2] < 2) self.assertTrue(98 < pmax[0] < 102) self.assertTrue(98 < pmax[1] < 102) self.assertTrue(98 < pmax[2] < 102)
vSize = 1 / float(resolution[0]) # Keep track of level 0 voxel size if level == minLevel: largestVSize = vSize # Scale and translate dataMatrix = [[vSize, 0, 0, 0], [0, vSize, 0, 0], [0, 0, vSize, 0], [ -vSize / 2 - largestVSize, -vSize / 2 - largestVSize, -vSize / 2 - largestVSize, 1 ]] maskMatrix = [[vSize, 0, 0, 0], [0, vSize, 0, 0], [0, 0, vSize, 0], [ vSize / 2 - largestVSize, vSize / 2 - largestVSize, vSize / 2 - largestVSize, 1 ]] dataCube.transform = vdb.createLinearTransform(dataMatrix) maskCube.transform = vdb.createLinearTransform(maskMatrix) # Write out the generated VDB output = [] dataCube.name = "density" maskCube.name = "mask" output.append(maskCube) output.append(dataCube) outFilePath = "%s/%s_level%d.vdb" % (outFileDir, variable, level) vdb.write(outFilePath, grids=output) # Give feedback to see progress print "Finished level " + str(level)
def isosurf(context): scn = bpy.context.scene stime = time.clock() SurfList = [] for i, obj in enumerate(bpy.context.scene.objects): if 'IsoSurfer' in obj: obsurf = obj mesurf = obj.data res = obj.IsoSurf_res SurfList.append([(obsurf, mesurf, res)]) for item in obj.IsoSurf: if item.active == True: if item.obj != '': if item.psys != '': SurfList[-1].append((item.obj, item.psys)) for surfobj in SurfList: print("Calculating isosurface, for frame:", bpy.context.scene.frame_current) for obj, psys in surfobj[1:]: psys = bpy.data.objects[obj].particle_systems[psys] ploc = [] stime = time.clock() palive = False for par in range(len(psys.particles)): if psys.particles[par].alive_state == 'ALIVE': ploc.append(psys.particles[par].location) palive = True if palive: print(' pack particles:', time.clock() - stime, 'sec') vxsize = scn.isosurface_voxelsize sradius = scn.isosurface_sphereradius ssteps = scn.isosurface_smoothsteps vtransform = vdb.createLinearTransform(voxelSize=vxsize) grid = vdb.FloatGrid.createLevelSetFromPoints( np.array(ploc), transform=vtransform, radius=sradius) # iso, adaptivity, gaussian iterations, gaussian kernel size, gaussian sigma verts, tris, quads = grid.convertToComplex( 0.0, 0.01, ssteps, 4, 0.8) print(' vdb remesh:', time.clock() - stime, 'sec') stime = time.clock() # TODO: eats all memory & resets materials # obsurf.data = write_fast(verts, tris, quads) # bpy.ops.object.shade_smooth() # scn.update() bm = bmesh.new() bm.from_mesh(mesurf) bm.clear() for co in verts.tolist(): bm.verts.new(co) bm.verts.ensure_lookup_table() bm.faces.ensure_lookup_table() for face_indices in tris.tolist() + quads.tolist(): bm.faces.new( tuple(bm.verts[index] for index in face_indices[::-1])) for f in bm.faces: f.smooth = True bm.to_mesh(mesurf) bm.free() mesurf.calc_normals() scn.update() print(' write:', time.clock() - stime, 'sec')