def toVTK(self, fileName, pointData=None, cellData=None, format='binary'): """Save to a VTK file. Parameters ---------- fileName : str Filename to save to. pointData : geobipy.StatArray or list of geobipy.StatArray, optional Data at each node in the mesh. Each entry is saved as a separate vtk attribute. cellData : geobipy.StatArray or list of geobipy.StatArray, optional Data at each cell in the mesh. Each entry is saved as a separate vtk attribute. format : str, optional "ascii" or "binary" format. Ascii is readable, binary is not but results in smaller files. Raises ------ TypeError If pointData or cellData is not a geobipy.StatArray or list of them. ValueError If any pointData (cellData) entry does not have size equal to the number of points (cells). ValueError If any StatArray does not have a name or units. This is needed for the vtk attribute. """ vtk = self.vtkStructure() if not pointData is None: assert isinstance(pointData, (StatArray, list)), TypeError("pointData must a geobipy.StatArray or a list of them.") if isinstance(pointData, list): for p in pointData: assert isinstance(p, StatArray), TypeError("pointData entries must be a geobipy.StatArray") assert all(p.shape == [self.z.nEdges, self.x.nEdges]), ValueError("pointData entries must have shape {}".format([self.z.nEdges, self.x.nEdges])) assert p.hasLabels(), ValueError("StatArray needs a name") vtk.point_data.append(Scalars(p.reshape(self.nNodes), p.getNameUnits())) else: assert all(pointData.shape == [self.z.nEdges, self.x.nEdges]), ValueError("pointData entries must have shape {}".format([self.z.nEdges, self.x.nEdges])) assert pointData.hasLabels(), ValueError("StatArray needs a name") vtk.point_data.append(Scalars(pointData.reshape(self.nNodes), pointData.getNameUnits())) if not cellData is None: assert isinstance(cellData, (StatArray, list)), TypeError("cellData must a geobipy.StatArray or a list of them.") if isinstance(cellData, list): for p in cellData: assert isinstance(p, StatArray), TypeError("cellData entries must be a geobipy.StatArray") assert all(p.shape == self.dims), ValueError("cellData entries must have shape {}".format(self.dims)) assert p.hasLabels(), ValueError("StatArray needs a name") vtk.cell_data.append(Scalars(p.reshape(self.nCells), p.getNameUnits())) else: assert all(cellData.shape == self.dims), ValueError("cellData entries must have shape {}".format(self.dims)) assert cellData.hasLabels(), ValueError("StatArray needs a name") vtk.cell_data.append(Scalars(cellData.reshape(self.nCells), cellData.getNameUnits())) vtk.tofile(fileName, format)
def toVTK(self, fileName, pointData=None, format='binary'): """Save the PointCloud3D to a VTK file. Parameters ---------- fileName : str Filename to save to. pointData : geobipy.StatArray or list of geobipy.StatArray, optional Data at each point in the point cloud. Each entry is saved as a separate vtk attribute. format : str, optional "ascii" or "binary" format. Ascii is readable, binary is not but results in smaller files. Raises ------ TypeError If pointData is not a geobipy.StatArray or list of them. ValueError If any pointData entry does not have size equal to the number of points. ValueError If any StatArray does not have a name or units. This is needed for the vtk attribute. """ vtk = self.vtkStructure() if not pointData is None: assert isinstance( pointData, (StatArray.StatArray, list)), TypeError( "pointData must a geobipy.StatArray or a list of them.") if isinstance(pointData, list): for p in pointData: assert isinstance(p, StatArray.StatArray), TypeError( "pointData entries must be a geobipy.StatArray") assert p.size == self.nPoints, ValueError( "pointData entries must have size {}".format( self.nPoints)) assert p.hasLabels(), ValueError("StatArray needs a name") vtk.point_data.append(Scalars(p, p.getNameUnits())) else: assert pointData.size == self.nPoints, ValueError( "pointData entries must have sizd {}".format(self.nPoints)) assert pointData.hasLabels(), ValueError( "StatArray needs a name") vtk.point_data.append( Scalars(pointData, pointData.getNameUnits())) vtk.tofile(fileName, format=format)
def _write_legacy_vtu(x, fname): """ Write a legacy VTK unstructured grid file. """ # Voxel local points relative to its centre of geometry: voxel_local_points = asarray([[-1,-1,-1],[ 1,-1,-1],[-1, 1,-1],[ 1, 1,-1], [-1,-1, 1],[ 1,-1, 1],[-1, 1, 1],[ 1, 1, 1]])\ * 0.5 # scaling # Voxel world points: points = [] # Culled input array -- as list: xculled = [] try: depth, rows, columns = x.shape except ValueError: sys.exit('Array dimensions not equal to 3, possibly 2-dimensional.\n') for i in xrange(depth): for j in xrange(rows): for k in xrange(columns): if x[i, j, k] > THRESHOLD: xculled.append(x[i, j, k]) points += (voxel_local_points + [k, j, i]).tolist() voxels = arange(len(points)).reshape(len(xculled), 8).tolist() topology = UnstructuredGrid(points, voxel=voxels) file_header = \ 'ToPy data, created '\ + str(datetime.now()).rsplit('.')[0] scalars = CellData(Scalars(xculled, name='Densities', lookup_table =\ 'default')) vtk = VtkData(topology, file_header, scalars) vtk.tofile(fname, 'binary')
def save_vtk(fname, points, colors): """N.B.: Paraview is a good VTK viewer, which supports ray-tracing.""" structure = PolyData(points=points, vertices=np.arange(len(points))) values = PointData(Scalars(colors, name="colors")) vtk = VtkData(structure, values) vtk.tofile(folder + fname, "binary")
def add_data(self, vtkfile, scalars=[], vectors=[], scale_factor=1): from pyvtk import Scalars, Vectors vtkfile.pointdata.extend( Scalars(np.array(scale_factor * field), name=name, lookup_table="default") for name, field in scalars) vtkfile.pointdata.extend( Vectors([_three_vector(scale_factor * v) for v in zip(field)], name=name) for name, field in vectors)
def vtkStructure(self): """Generates a vtk mesh structure that can be used in a vtk file. Returns ------- out : pyvtk.VtkData Vtk data structure """ nodes = np.vstack([self.x, self.y, self.z]).T vtk = VtkData(UnstructuredGrid(nodes, vertex=np.arange(self._nPoints))) vtk.point_data.append(Scalars(self.z, self.z.getNameUnits())) return vtk
def patched_scalars_fromfile(f, n, sl): dataname = sl[0] datatype = sl[1].lower() assert datatype in [ 'bit', 'unsigned_char', 'char', 'unsigned_short', 'short', 'unsigned_int', 'int', 'unsigned_long', 'long', 'float', 'double' ], repr(datatype) if len(sl) > 2: numcomp = eval(sl[2]) else: numcomp = 1 l = common._getline(f) l = l.split() assert len(l) == 2 and l[0].lower().decode('UTF-8') == 'lookup_table' tablename = l[1].decode('UTF-8') scalars = [] while len(scalars) < n: scalars += list(map(eval, common._getline(f).split())) assert len(scalars) == n return Scalars(scalars, dataname, tablename)
def addToVTK(self, vtk, prop=['data', 'predicted', 'std'], system=None): """Adds a member to a VTK handle. Parameters ---------- vtk : pyvtk.VtkData vtk handle returned from self.vtkStructure() prop : str or list of str, optional List of the member to add to a VTK handle, either "data", "predicted", or "std". system : int, optional The system for which to add the data """ if isinstance(prop, str): prop = [prop] for p in prop: assert p in [ 'data', 'predicted', 'std' ], ValueError("prop must be either 'data', 'predicted' or 'std'.") if p == "data": tmp = self.data elif p == "predicted": tmp = self.predictedData elif p == "std": tmp = self.std if system is None: r = range(self.nChannels) else: assert system < self.nSystems, ValueError( "system must be < nSystems {}".format(self.nSystems)) r = range(self._systemOffset[system], self._systemOffset[system + 1]) for i in r: vtk.point_data.append( Scalars( tmp[:, i], "{} {}".format(self.channelNames[i], tmp.getNameUnits())))
def write_vtk(filename, points, tetra, vals, name=None): """Writes a vtk from the given set of grid locations, values and connectivity :param points: An ndarray containing all the grid locations in cartesion coordinates in the form of: | x0 y0 z0 | | : : : | | xn yn zn | :param vals: an array containing the values to be specified on the mesh :param tetra: connectivity of the mesh """ import pyvtk from pyvtk import PointData, Scalars vtkElements = pyvtk.VtkData( pyvtk.UnstructuredGrid( points, tetra=tetra), PointData(Scalars(vals, name)), "Mesh") vtkElements.tofile(filename)
def runner(parser, options, args): if not hasattr(parser, 'runner'): options.output_path = None if args: if len(args) == 1: if options.input_path: print >> sys.stderr, "WARNING: overwriting input path %r with %r" % ( options.input_path, args[0]) options.input_path = args[0] elif len(args) == 2: if options.input_path: print >> sys.stderr, "WARNING: overwriting input path %r with %r" % ( options.input_path, args[0]) options.input_path = args[0] if options.output_path: print >> sys.stderr, "WARNING: overwriting output path %r with %r" % ( options.output_path, args[1]) options.output_path = args[1] else: parser.error( "Incorrect number of arguments (expected upto 2 but got %s)" % (len(args))) if options.input_path is None: parser.error('Expected --input-path but got nothing') options.input_path = fix_path(options.input_path) stack = ImageStack.load(options.input_path, options=options) numpy_types = numpy.typeDict.values() if options.output_type in ['<detect>', None]: if str(stack.images.dtype).startswith('float'): output_type_name = 'float32' elif str(stack.images.dtype).startswith('int'): output_type_name = 'int32' elif str(stack.images.dtype).startswith('uint'): output_type_name = 'uint32' else: output_type_name = 'int32' else: output_type_name = options.output_type.lower() output_type = getattr(numpy, output_type_name, None) mn, mx = stack.images.min(), stack.images.max() print 'Input minimum and maximum: %s, %s' % (mn, mx) if options.scale and 'int' in output_type_name: tmn, tmx = get_dtype_min_max(output_type) new_images = (tmn + float(tmx - tmn) * (stack.images - float(mn)) / (mx - mn)).astype(output_type) else: new_images = stack.images.astype(output_type) print 'Output minimum and maximum: %s, %s' % (new_images.min(), new_images.max()) output_path = options.output_path output_ext = options.output_ext if output_path is None: dn = os.path.dirname(options.input_path) bn = os.path.basename(options.input_path) if os.path.isfile(options.input_path): fn, ext = os.path.splitext(bn) type_part = None for t in numpy_types: if fn.endswith('_' + t.__name__): type_part = t.__name__ break if type_part is None: output_path = os.path.join( dn, fn + '_' + output_type_name + '.' + output_ext) else: output_path = os.path.join( dn, fn[:-len(type_part)] + output_type_name + '.' + output_ext) elif os.path.isdir(options.input_path): output_path = os.path.join( dn, bn + '_' + output_type_name + '.' + output_ext) else: raise NotImplementedError('%s is not file nor directory' % (options.input_path)) output_path = fix_path(output_path) print 'Saving new stack to', output_path if output_ext == 'tif': ImageStack(new_images, stack.pathinfo, options=options).save(output_path) elif output_ext == 'data': from iocbio.microscope.psf import normalize_unit_volume, discretize value_resolution = stack.pathinfo.get_value_resolution() normal_images = normalize_unit_volume(new_images, stack.get_voxel_sizes()) discrete = discretize(new_images / value_resolution) signal_indices = numpy.where(discrete > 0) new_value_resolution = value_resolution * normal_images.max( ) / new_images.max() ImageStack(normal_images, stack.pathinfo, value_resolution=new_value_resolution).save( output_path, zip(*signal_indices)) elif output_ext == 'vtk': from pyvtk import VtkData, StructuredPoints, PointData, Scalars vtk = VtkData(StructuredPoints(new_images.shape), PointData(Scalars(new_images.T.ravel()))) vtk.tofile(output_path, 'binary') else: raise NotImplementedError( ` output_ext `)
get_size() to_string(format = 'ascii') append(<DataSetAttr instance>) """ data_type = 'POINT_DATA' class CellData(Data): """ Usage: CellData(<DataSetAttr instances>) Attributes: data - list of DataSetAttr instances Public methods: get_size() to_string(format = 'ascii') append(<DataSetAttr instance>) """ data_type = 'CELL_DATA' def is_pointdata(obj): return isinstance(obj,PointData) def is_celldata(obj): return isinstance(obj,CellData) if __name__ == "__main__": print(PointData(Scalars.Scalars([2,3])))
def Scalars(self,func,name = None,lookup_table = None): return Scalars.Scalars([func(*p) for p in self.get_points()],name,lookup_table)
#============================================================ #- Triangulation. #============================================================ for i in range(0, m.shape[0]): S = m[i, :] outfile = outfilename + '.' + str(i) + '.vtk' print('Compute Delauney triangulation ...') x = 6371.0 * np.cos(lat * np.pi / 180.0) * np.cos(lon * np.pi / 180.0) y = 6371.0 * np.cos(lat * np.pi / 180.0) * np.sin(lon * np.pi / 180.0) z = 6371.0 * np.sin(lat * np.pi / 180.0) pts = np.array((x, y, z)).T mesh_info = MeshInfo() mesh_info.set_points(pts) opts = Options("Q") mesh = build(mesh_info, options=opts) elements = mesh.elements #============================================================ #- Write vtk file. #============================================================ print('Write vtk file ...') vtkElements = pyvtk.VtkData(pyvtk.UnstructuredGrid(pts, tetra=elements), PointData(Scalars(S, 'grad_PSD_ZZ')), "Mesh") vtkElements.tofile(outfile)
def _data_item(is_cell_data, data, step): sd = data[step, :] if is_cell_data: return CellData(Scalars(sd, 'cell_data', lookup_table='default')) return PointData(Scalars(sd, 'vertex_data'))
def runner (parser, options, args): if not hasattr(parser, 'runner'): options.output_path = None if args: if len (args)==1: if options.input_path: print >> sys.stderr, "WARNING: overwriting input path %r with %r" % (options.input_path, args[0]) options.input_path = args[0] elif len(args)==2: if options.input_path: print >> sys.stderr, "WARNING: overwriting input path %r with %r" % (options.input_path, args[0]) options.input_path = args[0] if options.output_path: print >> sys.stderr, "WARNING: overwriting output path %r with %r" % (options.output_path, args[1]) options.output_path = args[1] else: parser.error("Incorrect number of arguments (expected upto 2 but got %s)" % (len(args))) if options.input_path is None: parser.error('Expected --input-path but got nothing') options.input_path = fix_path (options.input_path) stack = ImageStack.load(options.input_path, options=options) numpy_types = numpy.typeDict.values() if options.output_type in ['<detect>', None]: output_type_name = stack.images.dtype.name else: output_type_name = options.output_type.lower() output_type = getattr (numpy, output_type_name, None) nof_stacks = stack.get_nof_stacks() old_shape = stack.images.shape new_shape = (nof_stacks, old_shape[0]//nof_stacks) + old_shape[1:] new_images = numpy.zeros (new_shape[1:], dtype=output_type_name) first_stack = None last_stack = None for i, stacki in enumerate(stack.images.reshape(new_shape)): if i==0: first_stack = stacki.astype (float) new_images[:] = stacki else: err_first = abs(stacki - first_stack).mean() err_last = abs(stacki - last_stack).mean() print ('Stack %i: mean abs difference from first and last stack: %.3f, %.3f' % (i+1, err_first, err_last)) new_images += stacki last_stack = stacki.astype(float) output_path = options.output_path output_ext = options.output_ext if output_path is None: dn = os.path.dirname(options.input_path) bn = os.path.basename(options.input_path) if os.path.isfile(options.input_path): fn, ext = os.path.splitext (bn) fn += '_sumstacks%s' % (nof_stacks) type_part = None for t in numpy_types: if fn.endswith('_' + t.__name__): type_part = t.__name__ break if type_part is None: output_path = os.path.join(dn, fn + '_' + output_type_name + '.' + output_ext) else: output_path = os.path.join(dn, fn[:-len(type_part)] + output_type_name + '.' + output_ext) elif os.path.isdir (options.input_path): bn += '_sumstacks%s' % (nof_stacks) output_path = os.path.join (dn, bn+'_'+output_type_name + '.' + output_ext) else: raise NotImplementedError ('%s is not file nor directory' % (options.input_path)) output_path = fix_path(output_path) print 'Saving new stack to',output_path if output_ext=='tif': ImageStack(new_images, stack.pathinfo, options=options).save(output_path) elif output_ext=='data': from iocbio.microscope.psf import normalize_unit_volume, discretize value_resolution = stack.pathinfo.get_value_resolution() normal_images = normalize_unit_volume(new_images, stack.get_voxel_sizes()) discrete = discretize(new_images / value_resolution) signal_indices = numpy.where(discrete>0) new_value_resolution = value_resolution * normal_images.max() / new_images.max() ImageStack(normal_images, stack.pathinfo, value_resolution = new_value_resolution).save(output_path, zip(*signal_indices)) elif output_ext=='vtk': from pyvtk import VtkData, StructuredPoints, PointData, Scalars vtk = VtkData (StructuredPoints (new_images.shape), PointData(Scalars(new_images.T.ravel()))) vtk.tofile(output_path, 'binary') else: raise NotImplementedError (`output_ext`)
def toVTK(self, fName, dx, dy, mask=False, clip=False, force=False, method='ct'): """ Convert a 3D volume of interpolated values to vtk for visualization in Paraview """ print('toVTK') self.getAttribute(xy=True, elevation=True, force=force) self.getMean3D(dx=dx, dy=dy, mask=mask, clip=clip, force=force, method=method) self.getZGrid() self.points.getBounds() x,y,intPoints = interpolation.getGridLocations2D(self.points.bounds, dx, dy) z=self.zGrid from pyvtk import VtkData, UnstructuredGrid, PointData, CellData, Scalars # Get the 3D dimensions mx = x.size my = y.size mz = z.size nPoints = mx*my*mz nCells = (mx-1)*(my-1)*(mz-1) # Interpolate the elevation to the grid nodes if (method == 'ct'): tx,ty, vals = self.points.interpCloughTocher(self.elevation, dx = dx,dy=dy, mask = mask, clip = clip, extrapolate='nearest') elif (method == 'mc'): tx,ty, vals = self.points.interpMinimumCurvature(self.elevation, dx = dx, dy=dy, mask = mask, clip = clip) vals = vals[:my,:mx] vals = vals.reshape(mx*my) # Set up the nodes and voxel indices points = np.zeros([nPoints,3], order='F') points[:,0] = np.tile(x,my*mz) points[:,1] = np.tile(y.repeat(mx),mz) points[:,2] = np.tile(vals,mz)-z.repeat(mx*my) # Create the cell indices into the points p = np.arange(nPoints).reshape((mz,my,mx)) voxels = np.zeros([nCells,8],dtype=np.int) iCell = 0 for k in range(mz-1): k1 = k + 1 for j in range(my-1): j1 = j + 1 for i in range(mx-1): i1 = i + 1 voxels[iCell,:] = [p[k1,j,i],p[k1,j,i1],p[k1,j1,i1],p[k1,j1,i], p[k,j,i],p[k,j,i1],p[k,j1,i1],p[k,j1,i]] iCell += 1 # Create the various point data pointID = Scalars(np.arange(nPoints),name='Point iD') pointElev = Scalars(points[:,2],name='Point Elevation (m)') tmp=self.mean3D.reshape(np.size(self.mean3D)) tmp1 = np.log10(1.0/tmp) pointRes = Scalars(tmp1, name = 'log10(Resistivity) (Ohm m)') tmp1 = np.log10(tmp) pointCon = Scalars(tmp1, name = 'log10(Conductivity) (S/m)') PData = PointData(pointID, pointElev, pointRes, pointCon) CData = CellData(Scalars(np.arange(nCells),name='Cell iD')) vtk = VtkData( UnstructuredGrid(points, hexahedron=voxels), # ), PData, CData, 'Some Name' ) vtk.tofile(fName, 'ascii')
# -2, -2, -2, -2, +2, -2, +2, +2, -2, +2, -2, -2, -2, -2, +2, +2, -2, # +2, +2, +2, +2, -2, +2, +2; # # specfem = Specfem(interp_method='trilinear_interpolation') # first_element = specfem.connectivity[0,:] # element_vtcs = specfem.nodes[first_element] # # permutation = [0,3,2,1,4,5,6,7] # looks like this is the correct one # i = np.argsort(permutation) # element_perturbed = first_element[i] # element_vtcs = specfem.nodes[element_perturbed] # # #pnt = np.mean(element_vtcs, axis=0) # pnt = (element_vtcs[4,:] + element_vtcs[6,:]) / 2 # solution = tlp.check_hull(pnt, element_vtcs) # print(solution) # print(tlp.interpolate_at_point(solution[1])) # import pyvtk from pyvtk import PointData, Scalars vtkElements = pyvtk.VtkData(pyvtk.UnstructuredGrid( specfem.nodes, hexahedron=specfem.connectivity), PointData(Scalars(grid_data.get_component('vsv'), 'node_number')), "Mesh") vtkElements.tofile('specfem_mesh.vtk')
def save_vtk(fname, xyz, triangles=None, values=None, vectors=None, triangle_values=None): """Saves a point cloud or triangle mesh as a .vtk file. Files can be opened with Paraview or displayed using the PyVista library. Args: fname (string): filename. xyz (Tensor): (N,3) point cloud or vertices. triangles (integer Tensor, optional): (T,3) mesh connectivity. Defaults to None. values (Tensor, optional): (N,D) values, supported by the vertices. Defaults to None. vectors (Tensor, optional): (N,3) vectors, supported by the vertices. Defaults to None. triangle_values (Tensor, optional): (T,D) values, supported by the triangles. Defaults to None. """ # Encode the points/vertices as a VTK structure: if triangles is None: # Point cloud structure = PolyData(points=numpy(xyz), vertices=np.arange(len(xyz))) else: # Surface mesh structure = PolyData(points=numpy(xyz), polygons=numpy(triangles)) data = [structure] pointdata, celldata = [], [] # Point values - one channel per column of the `values` array: if values is not None: values = numpy(values) if len(values.shape) == 1: values = values[:, None] features = values.T pointdata += [ Scalars(f, name=f"features_{i:02d}") for i, f in enumerate(features) ] # Point vectors - one vector per point: if vectors is not None: pointdata += [Vectors(numpy(vectors), name="vectors")] # Store in the VTK object: if pointdata != []: pointdata = PointData(*pointdata) data.append(pointdata) # Triangle values - one channel per column of the `triangle_values` array: if triangle_values is not None: triangle_values = numpy(triangle_values) if len(triangle_values.shape) == 1: triangle_values = triangle_values[:, None] features = triangle_values.T celldata += [ Scalars(f, name=f"features_{i:02d}") for i, f in enumerate(features) ] celldata = CellData(*celldata) data.append(celldata) # Write to hard drive: vtk = VtkData(*data) os.makedirs(os.path.dirname(fname), exist_ok=True) vtk.tofile(fname)