def _test(self, fname): reader = vtk.vtkXMLUnstructuredGridReader() reader.SetFileName(VTK_DATA_ROOT + fname) elev = vtk.vtkElevationFilter() elev.SetInputConnection(reader.GetOutputPort()) elev.SetLowPoint(-0.05, 0.05, 0) elev.SetHighPoint(0.05, 0.05, 0) grad = vtk.vtkGradientFilter() grad.SetInputConnection(elev.GetOutputPort()) grad.SetInputArrayToProcess(0, 0, 0, vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS, "Elevation") grad.Update() vals = (10, 0, 0) for i in range(3): r = grad.GetOutput().GetPointData().GetArray("Gradients").GetRange(i) self.assertTrue(abs(r[0] - vals[i]) < 1E-4) self.assertTrue(abs(r[1] - vals[i]) < 1E-4) elev.SetLowPoint(0.05, -0.05, 0) elev.SetHighPoint(0.05, 0.05, 0) grad.Update() vals = (0, 10, 0) for i in range(3): r = grad.GetOutput().GetPointData().GetArray("Gradients").GetRange(i) self.assertTrue(abs(r[0] - vals[i]) < 1E-4) self.assertTrue(abs(r[1] - vals[i]) < 1E-4)
def __init__(self, module_manager): SimpleVTKClassModuleBase.__init__( self, module_manager, vtk.vtkGradientFilter(), 'Processing.', ('vtkDataSet',), ('vtkDataSet',), replaceDoc=True, inputFunctions=None, outputFunctions=None)
def test_contours(self): cell = vtk.vtkUnstructuredGrid() cell.ShallowCopy(self.Cell) np = self.Cell.GetNumberOfPoints() ncomb = pow(2, np) scalar = vtk.vtkDoubleArray() scalar.SetName("scalar") scalar.SetNumberOfTuples(np) cell.GetPointData().SetScalars(scalar) incorrectCases = [] for i in range(1, ncomb - 1): c = Combination(np, i) for p in range(np): scalar.SetTuple1(p, c[p]) gradientFilter = vtk.vtkGradientFilter() gradientFilter.SetInputData(cell) gradientFilter.SetInputArrayToProcess(0, 0, 0, 0, 'scalar') gradientFilter.SetResultArrayName('grad') gradientFilter.Update() contourFilter = vtk.vtkContourFilter() contourFilter.SetInputConnection(gradientFilter.GetOutputPort()) contourFilter.SetNumberOfContours(1) contourFilter.SetValue(0, 0.5) contourFilter.Update() normalsFilter = vtk.vtkPolyDataNormals() normalsFilter.SetInputConnection(contourFilter.GetOutputPort()) normalsFilter.SetConsistency(0) normalsFilter.SetFlipNormals(0) normalsFilter.SetSplitting(0) calcFilter = vtk.vtkArrayCalculator() calcFilter.SetInputConnection(normalsFilter.GetOutputPort()) calcFilter.SetAttributeTypeToPointData() calcFilter.AddVectorArrayName('grad') calcFilter.AddVectorArrayName('Normals') calcFilter.SetResultArrayName('dir') calcFilter.SetFunction('dot(grad,Normals)') calcFilter.Update() out = vtk.vtkUnstructuredGrid() out.ShallowCopy(calcFilter.GetOutput()) numPts = out.GetNumberOfPoints() if numPts > 0: dirArray = out.GetPointData().GetArray('dir') for p in range(numPts): if (dirArray.GetTuple1(p) > 0.0): # all normals are reversed incorrectCases.append(i) break self.assertEquals(','.join([str(i) for i in incorrectCases]), '')
def __init__(self, module_manager): SimpleVTKClassModuleBase.__init__(self, module_manager, vtk.vtkGradientFilter(), 'Processing.', ('vtkDataSet', ), ('vtkDataSet', ), replaceDoc=True, inputFunctions=None, outputFunctions=None)
def test_contours(self): cell = vtk.vtkUnstructuredGrid() cell.ShallowCopy(self.Cell) np = self.Cell.GetNumberOfPoints() ncomb = pow(2, np) scalar = vtk.vtkDoubleArray() scalar.SetName("scalar") scalar.SetNumberOfTuples(np) cell.GetPointData().SetScalars(scalar) incorrectCases = [] for i in range(1,ncomb-1): c = Combination(np, i) for p in range(np): scalar.SetTuple1(p, c[p]) gradientFilter = vtk.vtkGradientFilter() gradientFilter.SetInputData(cell) gradientFilter.SetInputArrayToProcess(0,0,0,0,'scalar') gradientFilter.SetResultArrayName('grad') gradientFilter.Update() contourFilter = vtk.vtkContourFilter() contourFilter.SetInputConnection(gradientFilter.GetOutputPort()) contourFilter.SetNumberOfContours(1) contourFilter.SetValue(0, 0.5) contourFilter.Update() normalsFilter = vtk.vtkPolyDataNormals() normalsFilter.SetInputConnection(contourFilter.GetOutputPort()) normalsFilter.SetConsistency(0) normalsFilter.SetFlipNormals(0) normalsFilter.SetSplitting(0) calcFilter = vtk.vtkArrayCalculator() calcFilter.SetInputConnection(normalsFilter.GetOutputPort()) calcFilter.SetAttributeTypeToPointData() calcFilter.AddVectorArrayName('grad') calcFilter.AddVectorArrayName('Normals') calcFilter.SetResultArrayName('dir') calcFilter.SetFunction('grad.Normals') calcFilter.Update() out = vtk.vtkUnstructuredGrid() out.ShallowCopy(calcFilter.GetOutput()) numPts = out.GetNumberOfPoints() if numPts > 0: dirArray = out.GetPointData().GetArray('dir') for p in range(numPts): if(dirArray.GetTuple1(p) > 0.0): # all normals are reversed incorrectCases.append(i) break self.assertEquals(','.join([str(i) for i in incorrectCases]), '')
def gradient(polydata, inputarray, outputarray, iscelldata=False): """Compute the gradient of a scalar or vector field.""" gradients = vtk.vtkGradientFilter() gradients.SetInput(polydata) fieldassociation = 1 if iscelldata == True else 0 gradients.SetInputScalars(fieldassociation, inputarray) gradients.SetResultArrayName(outputarray) gradients.Update() return gradients.GetOutput()
def computeVarGradient(data_outVTK, var): # test if the field "var" is present if data_outVTK.GetPointData().HasArray(var) != 1: raise ValueError("Error : field %s not present" % var) gradientFilter = vtk.vtkGradientFilter() gradientFilter.SetInputData(data_outVTK) gradientFilter.SetInputArrayToProcess(0, 0, 0, 0, var) gradientFilter.SetResultArrayName('GRAD_%s' % var) gradientFilter.Update() data_outVTK = gradientFilter.GetOutput() return data_outVTK
def vtk_compute_normal_gradients(cell_normals, use_faster_approximation=False): """ Compute gradients of the normals Args: cell_normals (vtkPolyData): Surface to compute normals on use_faster_approximation (bool): Use a less accurate algorithm that performs fewer calculations, but faster. """ gradient_filter = vtk.vtkGradientFilter() gradient_filter.SetInputData(cell_normals) gradient_filter.SetInputArrayToProcess(0, 0, 0, 1, "Normals") if use_faster_approximation: gradient_filter.FasterApproximationOn() gradient_filter.Update() gradients = gradient_filter.GetOutput() return gradients
def buildPipeline(self): """ execute() -> None Dispatch the vtkRenderer to the actual rendering widget """ print " Gradient.execute, input Arrays: " pointData = self.input().GetPointData() for iA in range( pointData.GetNumberOfArrays() ): array_name = pointData.GetArrayName(iA) array = pointData.GetArray(iA) print " Array %s: ntup = %d, ncomp = %d, type = %s, range = %s " % ( array_name, array.GetNumberOfTuples(), array.GetNumberOfComponents(), array.GetDataTypeAsString(), str(array.GetRange()) ) computeVorticity = wmod.forceGetInputFromPort( "computeVorticity", 1 ) self.gradient = vtk.vtkGradientFilter() self.gradient.SetComputeVorticity( computeVorticity ) self.inputModule().inputToAlgorithm( self.gradient ) if computeVorticity: self.gradient.SetResultArrayName('vorticity') self.set3DOutput( output=self.gradient.GetOutput() )
def compute_vorticity(dataset, scalars, vorticity_name='vorticity'): """(DEPRECATED) Compute Vorticity, only needed till my PR gets merged .. deprecated:: Use the `compute_derivative` method in pyvista's `UnstructuredGrid` class """ warnings.warn( "This function is deprecated. Use the 'compute_derivative'" " method in pyvista's 'UnstructuredGrid' class", FutureWarning) alg = vtk.vtkGradientFilter() alg.SetComputeVorticity(True) alg.SetVorticityArrayName(vorticity_name) _, field = dataset.get_array(scalars, preference='point', info=True) # args: (idx, port, connection, field, name) alg.SetInputArrayToProcess(0, 0, 0, field.value, scalars) alg.SetInputData(dataset) alg.Update() return pv.filters._get_output(alg)
def _test(self, fname): reader = vtk.vtkXMLUnstructuredGridReader() reader.SetFileName(VTK_DATA_ROOT + fname) elev = vtk.vtkElevationFilter() elev.SetInputConnection(reader.GetOutputPort()) elev.SetLowPoint(-0.05, 0.05, 0) elev.SetHighPoint(0.05, 0.05, 0) grad = vtk.vtkGradientFilter() grad.SetInputConnection(elev.GetOutputPort()) grad.SetInputArrayToProcess(0, 0, 0, vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS, "Elevation") grad.Update() vals = (10, 0, 0) for i in range(3): r = grad.GetOutput().GetPointData().GetArray("Gradients").GetRange( i) self.assertTrue(abs(r[0] - vals[i]) < 1E-4) self.assertTrue(abs(r[1] - vals[i]) < 1E-4) elev.SetLowPoint(0.05, -0.05, 0) elev.SetHighPoint(0.05, 0.05, 0) grad.Update() vals = (0, 10, 0) for i in range(3): r = grad.GetOutput().GetPointData().GetArray("Gradients").GetRange( i) self.assertTrue(abs(r[0] - vals[i]) < 1E-4) self.assertTrue(abs(r[1] - vals[i]) < 1E-4)
mesh_streeter = rd1.GetOutput() print("Reading another mesh") rd2 = vtk.vtkDataSetReader() rd2.SetFileName(mesh_temp_longitudinal) rd2.Update() print("Reading yet another mesh") rd3 = vtk.vtkDataSetReader() rd3.SetFileName(mesh_complete) rd3.Update() mesh_full = rd3.GetOutput() print("Gradient filter") grad_filter = vtk.vtkGradientFilter() grad_filter.SetInputData(rd2.GetOutput()) grad_filter.SetInputArrayToProcess(0, 0, 0, vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS, 'temperature') grad_filter.Update() grad_mesh = grad_filter.GetOutput() grad = grad_mesh.GetPointData().GetArray('Gradients') fibers_full = vtk.vtkFloatArray() fibers_full.SetName('Fibers') fibers_full.SetNumberOfComponents(3) fibers_full.SetNumberOfTuples(mesh_full.GetNumberOfPoints()) fibers_mask = numpy.zeros(mesh_full.GetNumberOfPoints(), dtype=numpy.int8)
def vis2hdf5_nosurface(fname, step_start, step_stop, step_step, mesh_fname): # Command line arguments # surface_fname = sys.argv[6] compute_lambdatwo = 1 comm = mpi4py.MPI.COMM_WORLD rank = comm.Get_rank() def printRankZero(message): if rank == 0: print(message) nprocs = comm.Get_size() nsteps = old_div((step_stop-step_start+step_step),step_step) nstepsperproc = old_div(nsteps,nprocs) rest = nsteps % nprocs if rest : nstepsperproc += 1 curstep_start = step_start mesh_reader = vtk.vtkXMLUnstructuredGridReader() mesh_reader.SetFileName(mesh_fname) mesh_reader.Update() # surface_reader = vtk.vtkXMLPolyDataReader() # surface_reader.SetFileName(surface_fname) # surface_reader.Update() # surface_npts = surface_reader.GetOutput().GetNumberOfPoints() npts = mesh_reader.GetOutput().GetNumberOfPoints() ncells = mesh_reader.GetOutput().GetNumberOfCells() # Open results file and in dictionary declare computed fields (name:n_of_components) printRankZero("Preparing hdf5 file...") fout = h5py.File("%s.h5" % fname,'w',driver='mpio',comm=comm) groups = {"pressure":1, "velocity":3, "wall shear stress":3, "traction":3, "displacement":3, "frobenius":1, "lambda two":1, } # Create groups and datasets # Creating group and datasets are collective operations, all processes need to do it for group in groups: fout.create_group(group) for i in range(step_start,step_stop+step_step,step_step): fout[group].create_dataset("%d" % i, (npts,groups[group]),dtype='float64') # Create Mesh topology and coordinates printRankZero("Adding mesh data to hdf5 file...") fout.create_group("Mesh") fout["Mesh"].create_dataset("topology", (ncells,4), dtype='int64') fout["Mesh"].create_dataset("coordinates", (npts,3),dtype='float64') curnsteps=nstepsperproc if rank >= rest and rest: curnsteps -= 1 curstep_start += rest*(curnsteps+1)*step_step+(rank-rest)*curnsteps*step_step else: curstep_start += rank*curnsteps*step_step tmp_tawss = np.zeros(npts) tmp_wssint = np.zeros((npts,3)) tmp_lambdatwo = np.zeros((npts,1)) tmp_frobenius = np.zeros((npts,1)) ######################################################### ####### PARSE VIS FILES ################################# ######################################################### for i in tqdm.trange(curstep_start,curstep_start+curnsteps*step_step,step_step): currentVisFileName = "%s-%d.vis" % (fname,i) tqdm.tqdm.write("Rank {} Reading file {}".format(rank, currentVisFileName)) fin = open(currentVisFileName,'r') while True: # Get Group Name for line in fin: line_split = line.split('"') if (" analysis results " in line_split) : break if line == '\n' : break # Reached end of file" group_name = line_split[-2] if group_name not in fout: tqdm.tqdm.write("ERROR: GROUP '%s' IS NOT ON YOUR LIST" % group_name) # Determine size of vector and initialize line_split = (next(fin)).split() npts = int(line_split[-1]) for line in fin: line_split = line.split() if "length" in line_split: break ncols = int(line_split[-1]) tmp_vec = np.zeros((npts,ncols)) # Get to the data for line in fin: line_split = line.split() if "data" in line_split: break #for j in xrange(npts): #tmp_vec[j] = [float(x) for x in fin.next().split()] tmp_vec = np.genfromtxt(islice(fin,None,npts)) fout["/%s/%d" % (group_name, i)][:] = np.reshape(tmp_vec,(npts,ncols)) if group_name == "wall shear stress": tmp_tawss += np.linalg.norm(tmp_vec,axis=1) tmp_wssint += tmp_vec if group_name == "velocity" and compute_lambdatwo==1: # Compute gradient of velocity tmp_vec_vtk = numpy_support.numpy_to_vtk(tmp_vec) tmp_vec_vtk.SetName("tmp_velocity") mesh_reader.GetOutput().GetPointData().AddArray(tmp_vec_vtk) gradient_filter = vtk.vtkGradientFilter() gradient_filter.SetInputConnection(mesh_reader.GetOutputPort()) gradient_filter.SetInputScalars(0,"tmp_velocity") gradient_filter.SetResultArrayName("tmp_velocity_gradient") gradient_filter.Update() tmp_vel_gradient = numpy_support.vtk_to_numpy(gradient_filter.GetOutput().GetPointData().GetArray("tmp_velocity_gradient")) tmp_vel_gradient = np.reshape(tmp_vel_gradient, (npts,3,3)) tmp_vel_gradient_t = np.transpose(tmp_vel_gradient, (0,2,1)) tmp_S = 0.5*(tmp_vel_gradient+tmp_vel_gradient_t) tmp_W = 0.5*(tmp_vel_gradient-tmp_vel_gradient_t) tmp_frobenius[:] = np.reshape(np.linalg.norm(tmp_S, axis=(1,2)),(npts,1)) tmp_S2 = np.einsum('abj,ajc->abc',tmp_S,tmp_S) tmp_W2 = np.einsum('abj,ajc->abc',tmp_W,tmp_W) eigvals = np.linalg.eigvals(tmp_S2+tmp_W2) eigvals = np.sort(eigvals,axis=1) tmp_lambdatwo = eigvals[:,1] fout["/lambda two/%d" % (i,)][:] = np.reshape(tmp_lambdatwo,(npts,1)) fout["/frobenius/%d" % (i,)][:] = tmp_frobenius fin.close() ####### END OF PARSING ################################# printRankZero("Finished reading vis files.") # Average indices TAWSS and OSI can be conveniently computed through reduction operations if rank == 0: total_tawss = np.zeros(npts) total_wssint = np.zeros((npts,3)) else: total_tawss = None total_wssint = None comm.Reduce([tmp_tawss, mpi4py.MPI.DOUBLE], [total_tawss, mpi4py.MPI.DOUBLE], op = mpi4py.MPI.SUM, root = 0) comm.Reduce([tmp_wssint, mpi4py.MPI.DOUBLE], [total_wssint, mpi4py.MPI.DOUBLE], op = mpi4py.MPI.SUM, root = 0) printRankZero("Adding TAWSS and OSI data to hdf5 file...") fout.create_group("TAWSS") fout["TAWSS"].create_dataset("0", (npts,),dtype='float64') fout.create_group("WSSINT") fout["WSSINT"].create_dataset("0",(npts,3),dtype='float64') fout.create_group("OSI") fout["OSI"].create_dataset("0",(npts,),dtype='float64') if rank == 0: # Also add TAWSS and OSI to HDF5 file total_tawss /= nsteps total_wssint /= nsteps idx_ok = np.where(total_tawss>1e-6) total_wssint_norm = np.linalg.norm(total_wssint,axis=1) total_osi = np.zeros(npts) total_osi[idx_ok] = 0.5*(1.0-old_div(total_wssint_norm[idx_ok],total_tawss[idx_ok])) fout["/TAWSS/0"][:] = total_tawss fout["/OSI/0"][:] = total_osi fout["/WSSINT/0"][:] = total_wssint pts = numpy_support.vtk_to_numpy(mesh_reader.GetOutput().GetPoints().GetData()) cells = numpy_support.vtk_to_numpy(mesh_reader.GetOutput().GetCells().GetData()) cells = np.reshape(cells, (ncells,5)) cells = cells[:,1:] fout["/Mesh/coordinates"][:] = pts fout["/Mesh/topology"][:] = cells mesh_locator = vtk.vtkPointLocator() mesh_locator.SetDataSet(mesh_reader.GetOutput()) mesh_locator.BuildLocator() # map_surface_to_mesh = np.zeros(surface_npts, dtype=np.int64) # for i in xrange(surface_npts): # pt = surface_reader.GetOutput().GetPoint(i) # closept = mesh_locator.FindClosestPoint(pt) # map_surface_to_mesh[i] = closept # tawss_surface = total_tawss[map_surface_to_mesh] # osi_surface = total_osi[map_surface_to_mesh] # wssint_surface = total_wssint[map_surface_to_mesh] # tawss_surface_vtk = numpy_support.numpy_to_vtk(tawss_surface) # tawss_surface_vtk.SetName("TAWSS") # osi_surface_vtk = numpy_support.numpy_to_vtk(osi_surface) # osi_surface_vtk.SetName("OSI") # wssint_surface_vtk = numpy_support.numpy_to_vtk(wssint_surface) # wssint_surface_vtk.SetName("WSSINT") # Save the WSS based indices in VTK polydata format # surface_reader.GetOutput().GetPointData().AddArray(tawss_surface_vtk) # surface_reader.GetOutput().GetPointData().AddArray(osi_surface_vtk) # surface_reader.GetOutput().GetPointData().AddArray(wssint_surface_vtk) # surface_writer = vtk.vtkXMLPolyDataWriter() # surface_writer.SetInputConnection(surface_reader.GetOutputPort()) # surface_writer.SetFileName("%s-wss.vtp" %fname ) # surface_writer.Update() # Write XDMF File for visualization in Paraview xdmf_out = open("%s.xdmf" % fname, 'w') # Header xdmf_out.write("""<?xml version="1.0"?> <Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude"> <Domain> <Grid Name="TimeSeries" GridType="Collection" CollectionType="Temporal"> <Time TimeType="List">\n""") # Line of timesteps timesteps_str = ' '.join(str(i) for i in range(step_start,step_stop+step_step,step_step)) xdmf_out.write('<DataItem Format="XML" Dimensions="%d">%s</DataItem>\n</Time>' %(nsteps,timesteps_str) ) # For each timestep point to grid topology and geometry, and attributes for i in range(step_start,step_stop+step_step,step_step): xdmf_out.write('<Grid Name="grid_%d" GridType="Uniform">\n' % i) xdmf_out.write('<Topology NumberOfElements="%d" TopologyType="Tetrahedron">\n' % ncells) xdmf_out.write('<DataItem Format="HDF" Dimensions="%d 4">%s.h5:/Mesh/topology</DataItem>\n' % (ncells,fname)) xdmf_out.write('</Topology>\n<Geometry GeometryType="XYZ">\n') xdmf_out.write('<DataItem Format="HDF" Dimensions="%d 3">%s.h5:/Mesh/coordinates</DataItem>\n' % (npts, fname)) xdmf_out.write('</Geometry>\n') for group in groups: if groups[group] == 1: xdmf_out.write('<Attribute Name="%s" AttributeType="Scalar" Center="Node">\n' % group) if groups[group] == 3: xdmf_out.write('<Attribute Name="%s" AttributeType="Vector" Center="Node">\n' % group) xdmf_out.write('<DataItem Format="HDF" Dimensions="%d %d">%s.h5:/%s/%d</DataItem>\n' % (npts,groups[group],fname,group,i)) xdmf_out.write('</Attribute>\n') xdmf_out.write('</Grid>\n') xdmf_out.write('</Grid>\n</Domain>\n</Xdmf>') xdmf_out.close() fout.close()
def main(filename): print("Loading", filename) reader = vtk.vtkUnstructuredGridReader() reader.SetFileName(filename) edges = vtk.vtkExtractEdges() edges.SetInputConnection(reader.GetOutputPort()) tubes = vtk.vtkTubeFilter() tubes.SetInputConnection(edges.GetOutputPort()) tubes.SetRadius(0.0625) tubes.SetVaryRadiusToVaryRadiusOff() tubes.SetNumberOfSides(32) tubesMapper = vtk.vtkPolyDataMapper() tubesMapper.SetInputConnection(tubes.GetOutputPort()) tubesMapper.SetScalarRange(0.0, 26.0) tubesActor = vtk.vtkActor() tubesActor.SetMapper(tubesMapper) gradients = vtk.vtkGradientFilter() gradients.SetInputConnection(reader.GetOutputPort()) vectors = vtk.vtkAssignAttribute() vectors.SetInputConnection(gradients.GetOutputPort()) vectors.Assign("Gradients", vtk.vtkDataSetAttributes.VECTORS, \ vtk.vtkAssignAttribute.POINT_DATA) arrow = vtk.vtkArrowSource() glyphs = vtk.vtkGlyph3D() glyphs.SetInputConnection(0, vectors.GetOutputPort()) glyphs.SetInputConnection(1, arrow.GetOutputPort()) glyphs.ScalingOn() glyphs.SetScaleModeToScaleByVector() glyphs.SetScaleFactor(0.25) glyphs.OrientOn() glyphs.ClampingOff() glyphs.SetVectorModeToUseVector() glyphs.SetIndexModeToOff() glyphMapper = vtk.vtkPolyDataMapper() glyphMapper.SetInputConnection(glyphs.GetOutputPort()) glyphMapper.ScalarVisibilityOff() glyphActor = vtk.vtkActor() glyphActor.SetMapper(glyphMapper) renderer = vtk.vtkRenderer() renderer.AddActor(tubesActor) renderer.AddActor(glyphActor) renderer.SetBackground(0.328125, 0.347656, 0.425781) renwin = vtk.vtkRenderWindow() renwin.AddRenderer(renderer) renwin.SetSize(350, 500) renderer.ResetCamera() camera = renderer.GetActiveCamera() camera.Elevation(-80.0) camera.OrthogonalizeViewUp() camera.Azimuth(135.0) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renwin) iren.Initialize() iren.Start() return 1
def main(argv): #Just get something working for testing... try: opts, args = getopt.getopt(argv, "hi:", ["ifile="]) except getopt.GetoptError as err: print 'tviewer.py -i <inputfile.vtk>' print(str(err)) for opt, arg in opts: if opt == '-h': print 'tviewer.py -i <inputfile.vtk>' sys.exit() elif opt in ("-i", "--ifile"): inputfile = arg print("Going to load and view ", inputfile) #Read data reader = vtk.vtkXMLImageDataReader() reader.SetFileName(inputfile) reader.Update() #lut = vtk.vtkLookupTable() #lut.SetNumberOfColors(65535) #lut.SetHueRange(0.0, 2.667) #lut.SetVectorMode(vtk.vtkScalarsToColors.MAGNITUDE) #lut.Build() #Setup offscreen rendering graphics_factory = vtk.vtkGraphicsFactory() graphics_factory.SetOffScreenOnlyMode(1) graphics_factory.SetUseMesaClasses(1) #imaging_factory = vtk.vtkImagingFactory() #imaging_factory.SetUseMesaClasses(1) #Get image from reader image = reader.GetOutput() #image.SetSpacing(1,1,1) #image.GetPointData().SetScalars(image.GetPointData().GetVectors()) #Compute Q Criterion for texture mapping vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS, "Velocity") vorticity.ComputeQCriterionOn() #vorticity.SetComputeGradient(0) vorticity.Update() #Generate contour for comparison c = vtk.vtkContourFilter() #c.SetValue(0,1128) c.SetValue(0, 450) image.GetPointData().SetScalars( vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) c.SetInputData(image) c.Update() contour = c.GetOutput() #contour.GetCellData().SetScalars(image.GetPointData().GetVectors("Velocity")) normals = vtk.vtkPolyDataNormals() normals.SetInputData(contour) normals.SetFeatureAngle(45) #? normals.Update() #print normals.GetOutput() mapper = vtk.vtkPolyDataMapper() mapper.SetInputData(normals.GetOutput()) mapper.ScalarVisibilityOn() mapper.SetScalarRange(-1, 1) mapper.SetScalarModeToUsePointFieldData() mapper.ColorByArrayComponent("Velocity", 0) #print image #print contour #mapper.SelectColorArray("Q-criterion") #mapper.SetLookupTable(lut) #print mapper actor = vtk.vtkActor() actor.SetMapper(mapper) ren = vtk.vtkRenderer() ren.AddActor(actor) ren.SetBackground(1, 1, 1) ren.ResetCamera() renWin = vtk.vtkRenderWindow() renWin.SetSize(400, 400) renWin.SetOffScreenRendering(1) renWin.AddRenderer(ren) renWin.Render() windowToImageFilter = vtk.vtkWindowToImageFilter() windowToImageFilter.SetInput(renWin) windowToImageFilter.Update() w = vtk.vtkPNGWriter() w.SetFileName("cube.png") w.SetInputConnection(windowToImageFilter.GetOutputPort()) w.Write()
windowToImageFilter.Update() writer = vtk.vtkPNGWriter() writer.SetWriteToMemory(1) writer.SetInputConnection(windowToImageFilter.GetOutputPort()) writer.Write() data = str(buffer(writer.GetResult())) return Image(data) reader = vtk.vtkXMLImageDataReader() reader.SetFileName("/home/stephenh/turbulence/sc/datasets/vtkscripts/iso64.vti") reader.Update() image = reader.GetOutput() vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") vorticity.ComputeQCriterionOn() vorticity.Update() #Generate contour for comparison c = vtk.vtkContourFilter() c.SetValue(0,1128) image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) c.SetInputData(image) c.Update() contour = c.GetOutput() normals = vtk.vtkPolyDataNormals() normals.SetInputData(contour) normals.SetFeatureAngle(35) #?
def getthresh(args): inputfile = args[0] outputfile = args[1] sx = args[2] ex = args[3] sy = args[4] ey = args[5] sz = args[6] ez = args[7] dataset = args[8] comptype = args[9] print ("Loading file, %s" % inputfile) #Determine if file is h5 or numpy if (inputfile.split(".")[1] == "npy"): rs = timeit.default_timer() vel = np.load(inputfile) re = timeit.default_timer() else: #read in file rs = timeit.default_timer() data_file = h5py.File(inputfile, 'r') vel = np.array(data_file[dataset]) data_file.close() re = timeit.default_timer() cs = timeit.default_timer() #convert numpy array to vtk vtkdata = numpy_support.numpy_to_vtk(vel.flat, deep=True, array_type=vtk.VTK_FLOAT) vtkdata.SetNumberOfComponents(3) vtkdata.SetName("Velocity") image = vtk.vtkImageData() image.GetPointData().SetVectors(vtkdata) image.SetExtent(sx,ex,sy,ey,sz,ez) #NOTE: Hardcoding Spacing image.SetSpacing(.006135923, .006135923, .006135923) print ("Doing computation") ce = timeit.default_timer() vs = timeit.default_timer() if (comptype == "v"): vorticity = vtk.vtkCellDerivatives() vorticity.SetVectorModeToComputeVorticity() vorticity.SetTensorModeToPassTensors() vorticity.SetInputData(image) vorticity.Update() elif (comptype == "q"): vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") vorticity.ComputeQCriterionOn() vorticity.SetComputeGradient(0) vorticity.Update() ve = timeit.default_timer() #Generate contour for comparison c = vtk.vtkContourFilter() c.SetValue(0,1128) if (comptype == "q"): image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) else: image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors()) c.SetInputData(image) c.Update() w = vtk.vtkXMLPolyDataWriter() w.SetEncodeAppendedData(0) #turn of base 64 encoding for fast write w.SetFileName("contour.vtp") w.SetInputData(c.GetOutput()) ws = timeit.default_timer() w.Write() ms = timeit.default_timer() if (comptype == "v"): mag = vtk.vtkImageMagnitude() cp = vtk.vtkCellDataToPointData() cp.SetInputData(vorticity.GetOutput()) cp.Update() image.GetPointData().SetScalars(cp.GetOutput().GetPointData().GetVectors()) mag.SetInputData(image) mag.Update() m = mag.GetOutput() m.GetPointData().RemoveArray("Velocity") else: image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) me = timeit.default_timer() print ("Thresholding.") ts = timeit.default_timer() t = vtk.vtkImageThreshold() #t = vtk.vtkThreshold() #sparse representation if (comptype == "q"): t.SetInputData(image) t.ThresholdByUpper(783.3) #.25*67.17^2 = 1127 #t.SetInputArrayToProcess(0,0,0, vorticity.GetOutput().FIELD_ASSOCIATION_POINTS, "Q-criterion") print("q criterion") else: t.SetInputData(m) t.SetInputArrayToProcess(0,0,0, mag.GetOutput().FIELD_ASSOCIATION_POINTS, "Magnitude") t.ThresholdByUpper(44.79) #44.79) #Set values in range to 1 and values out of range to 0 t.SetInValue(1) t.SetOutValue(0) #t.ReplaceInOn() #t.ReplaceOutOn() print("Update thresh") t.Update() #wt = vtk.vtkXMLImageDataWriter() #wt.SetInputData(t.GetOutput()) #wt.SetFileName("thresh.vti") #wt.Write() d = vtk.vtkImageDilateErode3D() d.SetInputData(t.GetOutput()) d.SetKernelSize(3,3,3) d.SetDilateValue(1) d.SetErodeValue(0) print ("Update dilate") d.Update() iis = vtk.vtkImageToImageStencil() iis.SetInputData(d.GetOutput()) iis.ThresholdByUpper(1) stencil = vtk.vtkImageStencil() stencil.SetInputConnection(2, iis.GetOutputPort()) stencil.SetBackgroundValue(0) #image.GetPointData().RemoveArray("Vorticity") #Set scalars to velocity so it can be cut by the stencil image.GetPointData().SetScalars(image.GetPointData().GetVectors()) #if (comptype == "q"): #Use this to get just q-criterion data instead of velocity data. Do we need both? # image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetScalars("Q-criterion")) stencil.SetInputData(image) print ("Update stencil") stencil.Update() te = timeit.default_timer() print("Setting up write") ws = timeit.default_timer() #Make velocity a vector again velarray = stencil.GetOutput().GetPointData().GetScalars() image.GetPointData().RemoveArray("Velocity") image.GetPointData().SetVectors(velarray) w = vtk.vtkXMLImageDataWriter() w.SetCompressorTypeToZLib() #w.SetCompressorTypeToNone() Need to figure out why this fails. w.SetEncodeAppendedData(0) #turn of base 64 encoding for fast write w.SetFileName(outputfile) w.SetInputData(image) if (0): w.SetCompressorTypeToZfp() w.GetCompressor().SetNx(ex-sx+1) w.GetCompressor().SetNy(ey-sy+1) w.GetCompressor().SetNz(ez-sz+1) w.GetCompressor().SetTolerance(1e-1) w.GetCompressor().SetNumComponents(3) w.Write() we = timeit.default_timer() print("Results:") print("Read time: %s" % str(re-rs)) print ("Convert to vtk: %s" % str(ce-cs)) if (comptype == "q"): print ("Q Computation: %s" % str(ve-vs)) print ("Q Magnitude: %s" % str(me-ms)) else: print ("Vorticity Computation: %s" % str(ve-vs)) print ("Vorticity Magnitude: %s" % str(me-ms)) print ("Threshold: %s" % str(te-ts)) print ("Write %s" % str(we-ws)) print ("Total time: %s" % str(we-rs))
def vortmesh(args): p = args[0] cubenum = args[1] print("Cube", cubenum) #Check for additonal parameters if (p["param1"] != ''): comptype = p["param1"] else: comptype = "q" #Default to q criterion if (p["param2"] != ''): thresh = float(p["param2"]) else: thresh = 783.3 #Default for q threshold on isotropic data inputfile = p["inputfile"] +str(cubenum) + ".npy" #Used so we can set either npy input or h5 input outputfile = p["outputfile"] + str(cubenum) + ".vtp" #always VTK Image Data for this. sx = p["sx"] sy = p["sy"] sz = p["sz"] ex = p["ex"] ey = p["ey"] ez = p["ez"] print ("Loading file, %s" % inputfile) #Determine if file is h5 or numpy rs = timeit.default_timer() #In case file isn't here, catch this. try: vel = np.load(inputfile) except: p["message"] = "Failed to find file" return p re = timeit.default_timer() #convert numpy array to vtk cs = timeit.default_timer() #convert numpy array to vtk vtkdata = numpy_support.numpy_to_vtk(vel.flat, deep=True, array_type=vtk.VTK_FLOAT) vtkdata.SetNumberOfComponents(3) vtkdata.SetName("Velocity") image = vtk.vtkImageData() image.GetPointData().SetVectors(vtkdata) image.SetExtent(sx,ex,sy,ey,sz,ez) #NOTE: Hardcoding Spacing TODO: Add to parameters image.SetSpacing(.006135923, .006135923, .006135923) ce = timeit.default_timer() vs = timeit.default_timer() if (comptype == "v"): vorticity = vtk.vtkCellDerivatives() vorticity.SetVectorModeToComputeVorticity() vorticity.SetTensorModeToPassTensors() vorticity.SetInputData(image) vorticity.Update() elif (comptype == "q"): vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") vorticity.ComputeQCriterionOn() vorticity.SetComputeGradient(0) vorticity.Update() ve = timeit.default_timer() #Generate contour for comparison c = vtk.vtkContourFilter() c.SetValue(0,thresh) if (comptype == "q"): image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) else: image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors()) c.SetInputData(image) c.Update() w = vtk.vtkXMLPolyDataWriter() w.SetEncodeAppendedData(0) #turn of base 64 encoding for fast write w.SetFileName(outputfile) w.SetInputData(c.GetOutput()) ws = timeit.default_timer() result = w.Write() we = timeit.default_timer() print("Read time: %s" % str(re-rs)) print ("Convert to vtk: %s" % str(ce-cs)) if (comptype == "q"): print ("Q Computation: %s" % str(ve-vs)) else: print ("Vorticity Computation: %s" % str(ve-vs)) print ("Write %s" % str(we-ws)) print ("Total time: %s" % str(we-rs)) if (result): p["message"] = "Success" p["computetime"] = str(we-rs) return p #return the packet
def vortmesh(args): p = args[0] cubenum = args[1] print("Cube", cubenum) #Check for additonal parameters if (p["param1"] != ''): comptype = p["param1"] else: comptype = "q" #Default to q criterion if (p["param2"] != ''): thresh = float(p["param2"]) else: thresh = 783.3 #Default for q threshold on isotropic data inputfile = p["inputfile"] + str( cubenum) + ".npy" #Used so we can set either npy input or h5 input outputfile = p["outputfile"] + str( cubenum) + ".vtp" #always VTK Image Data for this. sx = p["sx"] sy = p["sy"] sz = p["sz"] ex = p["ex"] ey = p["ey"] ez = p["ez"] print("Loading file, %s" % inputfile) #Determine if file is h5 or numpy rs = timeit.default_timer() #In case file isn't here, catch this. try: vel = np.load(inputfile) except: p["message"] = "Failed to find file" return p re = timeit.default_timer() #convert numpy array to vtk cs = timeit.default_timer() #convert numpy array to vtk vtkdata = numpy_support.numpy_to_vtk(vel.flat, deep=True, array_type=vtk.VTK_FLOAT) vtkdata.SetNumberOfComponents(3) vtkdata.SetName("Velocity") image = vtk.vtkImageData() image.GetPointData().SetVectors(vtkdata) image.SetExtent(sx, ex, sy, ey, sz, ez) #NOTE: Hardcoding Spacing TODO: Add to parameters image.SetSpacing(.006135923, .006135923, .006135923) ce = timeit.default_timer() vs = timeit.default_timer() if (comptype == "v"): vorticity = vtk.vtkCellDerivatives() vorticity.SetVectorModeToComputeVorticity() vorticity.SetTensorModeToPassTensors() vorticity.SetInputData(image) vorticity.Update() elif (comptype == "q"): vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS, "Velocity") vorticity.ComputeQCriterionOn() vorticity.SetComputeGradient(0) vorticity.Update() ve = timeit.default_timer() #Generate contour for comparison c = vtk.vtkContourFilter() c.SetValue(0, thresh) if (comptype == "q"): image.GetPointData().SetScalars( vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) else: image.GetPointData().SetScalars( vorticity.GetOutput().GetPointData().GetVectors()) c.SetInputData(image) c.Update() w = vtk.vtkXMLPolyDataWriter() w.SetEncodeAppendedData(0) #turn of base 64 encoding for fast write w.SetFileName(outputfile) w.SetInputData(c.GetOutput()) ws = timeit.default_timer() result = w.Write() we = timeit.default_timer() print("Read time: %s" % str(re - rs)) print("Convert to vtk: %s" % str(ce - cs)) if (comptype == "q"): print("Q Computation: %s" % str(ve - vs)) else: print("Vorticity Computation: %s" % str(ve - vs)) print("Write %s" % str(we - ws)) print("Total time: %s" % str(we - rs)) if (result): p["message"] = "Success" p["computetime"] = str(we - rs) return p #return the packet
def getvtkimage(self, webargs, timestep): #Setup query DBSTRING = os.environ['db_connection_string'] conn = pyodbc.connect(DBSTRING, autocommit=True) cursor = conn.cursor() #url = "http://localhost:8000/cutout/getcutout/"+ token + "/" + dataset + "/" + datafield + "/" + ts + "," +te + "/" + xs + "," + xe +"/" + ys + "," + ye +"/" + zs + "," + ze w = webargs.split("/") ts = int(w[3].split(',')[0]) te = int(w[3].split(',')[1]) xs = int(w[4].split(',')[0]) xe = int(w[4].split(',')[1]) ys = int(w[5].split(',')[0]) ye = int(w[5].split(',')[1]) zs = int(w[6].split(',')[0]) ze = int(w[6].split(',')[1]) extent = (xs, ys, zs, xe, ye, ze) overlap = 2 #Used only on contours--vorticity and Q-criterion #Look for step parameters if (len(w) > 9): step = True; s = w[8].split(",") tstep = s[0] xstep = float(s[1]) ystep = float(s[2]) zstep = float(s[3]) filterwidth = w[9] else: step = False; xstep = 1 ystep = 1 zstep = 1 filterwidth = 1 cfieldlist = w[2].split(",") firstval = cfieldlist[0] maxrange = self.getmaxrange(w[1]) if ((firstval == 'vo') or (firstval == 'qc') or (firstval == 'cvo') or (firstval == 'qcc')): component = 'u' computation = firstval #We are doing a computation, so we need to know which one. #check to see if we have a threshold (only for contours) if (len(cfieldlist) > 1): threshold = float(cfieldlist[1]) else: threshold = .6 #New: We need an expanded cutout if contouring. Push the cutout out by 2 in all directions (unless at boundary). if ((firstval == 'cvo') or (firstval == 'qcc')): newextent = self.expandcutout(extent, maxrange[0], maxrange[1], maxrange[2], overlap) contour = True else: component = w[2] #There could be multiple components, so we will have to loop computation = '' #Split component into list and add them to the image #Check to see if we have a value for vorticity or q contour fieldlist = list(component) for field in fieldlist: print("Field = %s" % field) cursor.execute("{CALL turbdev.dbo.GetAnyCutout(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)}",w[1], field, timestep, extent[0], extent[1], extent[2], xstep, ystep, zstep, 1,1,extent[3], extent[4], extent[5],filterwidth,1) #If data spans across multiple servers, we get multiple sets, so concatenate them. row = cursor.fetchone() raw = row[0] part = 0 print ("First part size is %d" % len(row[0])) while(cursor.nextset()): row = cursor.fetchone() raw = raw + row[0] part = part +1 print ("added part %d" % part) print ("Part size is %d" % len(row[0])) print ("Raw size is %d" % len(raw)) data = np.frombuffer(raw, dtype=np.float32) conn.close() vtkdata = numpy_support.numpy_to_vtk(data, deep=True, array_type=vtk.VTK_FLOAT) components = self.numcomponents(field) vtkdata.SetNumberOfComponents(components) vtkdata.SetName(self.componentname(field)) image = vtk.vtkImageData() if (step): xes = int(extent[3])/int(xstep)-1 yes = int(extent[4])/int(ystep)-1 zes = int(extent[5])/int(zstep)-1 image.SetExtent(extent[0], extent[0]+extent[3], extent[1], extent[1]+extent[4], extent[2], extenet[2]+extenet[5]) print("Step extent=" +str(xes)) print("xs=" + str(xstep) + " ys = "+ str(ystep) +" zs = " + str(zstep)) else: image.SetExtent(extent[0], extent[0]+extent[3]-1, extent[1], extent[1]+extent[4]-1, extent[2], extent[2]+extent[5]-1) image.GetPointData().SetVectors(vtkdata) if (step): #Magnify to original size image.SetSpacing(xstep,ystep,zstep) #Check if we need a rectilinear grid, and set it up if so. if (w[1] == 'channel'): ygrid = self.getygrid() #print("Ygrid: ") #print (ygrid) rg = vtk.vtkRectilinearGrid() #Not sure about contouring channel yet, so we are going back to original variables at this point. rg.SetExtent(xs, xs+xe-1, ys, ys+ye-1, zs, zs+ze-1) rg.GetPointData().SetVectors(vtkdata) xg = np.arange(float(xs),float(xe)) zg = np.arange(float(zs),float(ze)) for x in xg: xg[x] = 8*3.141592654/2048*x for z in zg: zg[z] = 3*3.141592654/2048*z vtkxgrid=numpy_support.numpy_to_vtk(xg, deep=True, array_type=vtk.VTK_FLOAT) vtkzgrid=numpy_support.numpy_to_vtk(zg, deep=True, array_type=vtk.VTK_FLOAT) vtkygrid=numpy_support.numpy_to_vtk(ygrid, deep=True, array_type=vtk.VTK_FLOAT) rg.SetXCoordinates(vtkxgrid) rg.SetZCoordinates(vtkzgrid) rg.SetYCoordinates(vtkygrid) image = rg #we rewrite the image since we may be doing a #computation below #See if we are doing a computation if (computation == 'vo'): vorticity = vtk.vtkCellDerivatives() vorticity.SetVectorModeToComputeVorticity() vorticity.SetTensorModeToPassTensors() vorticity.SetInputData(image) print("Computing Vorticity") vorticity.Update() elif (computation == 'cvo'): vorticity = vtk.vtkCellDerivatives() vorticity.SetVectorModeToComputeVorticity() vorticity.SetTensorModeToPassTensors() vorticity.SetInputData(image) print("Computing Voricity") vorticity.Update() mag = vtk.vtkImageMagnitude() cp = vtk.vtkCellDataToPointData() cp.SetInputData(vorticity.GetOutput()) print("Computing magnitude") cp.Update() image.GetPointData().SetScalars(cp.GetOutput().GetPointData().GetVectors()) mag.SetInputData(image) mag.Update() c = vtk.vtkContourFilter() c.SetValue(0,threshold) c.SetInputData(mag.GetOutput()) print("Computing Contour") c.Update() #Now we need to clip out the overlap box = vtk.vtkBox() #set box to requested size box.SetBounds(xs, xs+xe-1, ys, ys+ye-1, zs,zs+ze-1) clip = vtk.vtkClipPolyData() clip.SetClipFunction(box) clip.GenerateClippedOutputOn() clip.SetInputData(c.GetOutput()) clip.InsideOutOn() clip.Update() cropdata = clip.GetOutput() return cropdata elif (computation == 'qcc'): q = vtk.vtkGradientFilter() q.SetInputData(image) q.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") q.ComputeQCriterionOn() q.Update() #newimage = vtk.vtkImageData() image.GetPointData().SetScalars(q.GetOutput().GetPointData().GetVectors("Q-criterion")) mag = vtk.vtkImageMagnitude() mag.SetInputData(image) mag.Update() c = vtk.vtkContourFilter() c.SetValue(0,threshold) c.SetInputData(mag.GetOutput()) c.Update() #clip out the overlap here box = vtk.vtkBox() #set box to requested size box.SetBounds(xs, xs+xe-1, ys, ys+ye-1, zs,zs+ze-1) clip = vtk.vtkClipPolyData() clip.SetClipFunction(box) clip.GenerateClippedOutputOn() clip.SetInputData(c.GetOutput()) clip.InsideOutOn() clip.Update() cropdata = clip.GetOutput() return cropdata else: return image
def main(argv): try: opts, args = getopt.getopt(argv,"hi:o:x:a:y:b:z:c:d:u:", ["ifile=","ofile=","sx=","ex=","sy=","ez=","dataset=","comp="]) except getopt.GetoptError as err: print 'getmultithresh.py -i <inputfile.h5> -o <outputfile.vti> -sx -ex -sy -ey -sz -ez -dataset -comptype' print (str(err)) for opt, arg in opts: if opt == '-h': print 'getmultithresh.py -i <inputfile.h5> -o <outputfile.vti> -sx -ex -sy -ey -sz -ez -dataset' sys.exit() elif opt in ("-i", "--ifile"): inputfile = arg elif opt in ("-o", "--ofile"): outputfile = arg elif opt in ("-x", "--sx"): sx = int(arg) elif opt in ("-a", "--ex"): ex = int(arg) elif opt in ("-y", "--sy"): sy = int(arg) elif opt in ("-b", "--ey"): ey = int(arg) elif opt in ("-z", "--sz"): sz = int(arg) elif opt in ("-c", "--ez"): ez = int(arg) elif opt in ("-d", "--dataset"): dataset = str(arg) elif opt in ("-u", "--du"): comptype = str(arg) print ("Loading file, %s" % inputfile) #Determine if file is h5 or numpy if (inputfile.split(".")[1] == "npy"): rs = timeit.default_timer() vel = np.load(inputfile) re = timeit.default_timer() else: #read in file rs = timeit.default_timer() data_file = h5py.File(inputfile, 'r') vel = np.array(data_file[dataset]) data_file.close() re = timeit.default_timer() cs = timeit.default_timer() #convert numpy array to vtk vtkdata = numpy_support.numpy_to_vtk(vel.flat, deep=True, array_type=vtk.VTK_FLOAT) vtkdata.SetNumberOfComponents(3) vtkdata.SetName("Velocity") image = vtk.vtkImageData() image.GetPointData().SetVectors(vtkdata) image.SetExtent(sx,ex,sy,ey,sz,ez) #NOTE: Hardcoding Spacing image.SetSpacing(.006135923, .006135923, .006135923) print ("Doing computation") ce = timeit.default_timer() vs = timeit.default_timer() ve = timeit.default_timer() print ("Generating contour") ms = timeit.default_timer() mag = vtk.vtkImageMagnitude() for x in range (0,1): start = timeit.default_timer() threshold = (22.39 * (2.5)) if (comptype == "q"): threshold= (783.3) vort = vtk.vtkGradientFilter() vort.SetInputData(image) vort.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") vort.ComputeQCriterionOn() vort.Update() image.GetPointData().SetScalars(vort.GetOutput().GetPointData().GetVectors("Q-criterion")) else: v = vtk.vtkCellDerivatives() v.SetVectorModeToComputeVorticity() v.SetTensorModeToPassTensors() v.SetInputData(image) v.Update() vort = vtk.vtkImageMagnitude() cp = vtk.vtkCellDataToPointData() cp.SetInputData(v.GetOutput()) cp.Update() image.GetPointData().SetScalars(cp.GetOutput().GetPointData().GetVectors()) vort.SetInputData(image) vort.Update() #ni = vtk.vtkImageData() #ni.SetSpacing(.006135923, .006135923, .006135923) #ni.SetExtent(sx,ex,sy,ey,sz,ez) #ni.GetPointData().SetScalars(q.GetOutput().GetPointData().GetVectors("Q-criterion")) mend = timeit.default_timer() me = mend comptime = mend-start print("Magnitude Computation time: " + str(comptime) + "s") c = vtk.vtkContourFilter() c.SetValue(0,threshold) if (comptype == "q"): c.SetInputData(image) else: c.SetInputData(vort.GetOutput()) print("Computing Contour with threshold", threshold) c.Update() w = vtk.vtkXMLPolyDataWriter() w.SetEncodeAppendedData(0) #turn of base 64 encoding for fast write w.SetFileName(outputfile + str(x) + ".vtp ") w.SetInputData(c.GetOutput()) ws = timeit.default_timer() w.Write() we = timeit.default_timer() print("Results:") print("Read time: %s" % str(re-rs)) print ("Convert to vtk: %s" % str(ce-cs)) if (comptype == "q"): print ("Q Computation: %s" % str(ve-vs)) print ("Q Magnitude: %s" % str(me-ms)) else: print ("Vorticity Computation: %s" % str(ve-vs)) print ("Vorticity Magnitude: %s" % str(me-ms)) #print ("Threshold: %s" % str(te-ts)) print ("Write %s" % str(we-ws)) print ("Total time: %s" % str(we-rs))
def getthresh(args): inputfile = args[0] outputfile = args[1] sx = args[2] ex = args[3] sy = args[4] ey = args[5] sz = args[6] ez = args[7] dataset = args[8] comptype = args[9] print("Loading file, %s" % inputfile) #Determine if file is h5 or numpy if (inputfile.split(".")[1] == "npy"): rs = timeit.default_timer() vel = np.load(inputfile) re = timeit.default_timer() else: #read in file rs = timeit.default_timer() data_file = h5py.File(inputfile, 'r') vel = np.array(data_file[dataset]) data_file.close() re = timeit.default_timer() cs = timeit.default_timer() #convert numpy array to vtk vtkdata = numpy_support.numpy_to_vtk(vel.flat, deep=True, array_type=vtk.VTK_FLOAT) vtkdata.SetNumberOfComponents(3) vtkdata.SetName("Velocity") image = vtk.vtkImageData() image.GetPointData().SetVectors(vtkdata) image.SetExtent(sx, ex, sy, ey, sz, ez) #NOTE: Hardcoding Spacing image.SetSpacing(.006135923, .006135923, .006135923) print("Doing computation") ce = timeit.default_timer() vs = timeit.default_timer() if (comptype == "v"): vorticity = vtk.vtkCellDerivatives() vorticity.SetVectorModeToComputeVorticity() vorticity.SetTensorModeToPassTensors() vorticity.SetInputData(image) vorticity.Update() elif (comptype == "q"): vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS, "Velocity") vorticity.ComputeQCriterionOn() vorticity.SetComputeGradient(0) vorticity.Update() ve = timeit.default_timer() #Generate contour for comparison c = vtk.vtkContourFilter() c.SetValue(0, 1128) if (comptype == "q"): image.GetPointData().SetScalars( vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) else: image.GetPointData().SetScalars( vorticity.GetOutput().GetPointData().GetVectors()) c.SetInputData(image) c.Update() w = vtk.vtkXMLPolyDataWriter() w.SetEncodeAppendedData(0) #turn of base 64 encoding for fast write w.SetFileName("contour.vtp") w.SetInputData(c.GetOutput()) ws = timeit.default_timer() w.Write() ms = timeit.default_timer() if (comptype == "v"): mag = vtk.vtkImageMagnitude() cp = vtk.vtkCellDataToPointData() cp.SetInputData(vorticity.GetOutput()) cp.Update() image.GetPointData().SetScalars( cp.GetOutput().GetPointData().GetVectors()) mag.SetInputData(image) mag.Update() m = mag.GetOutput() m.GetPointData().RemoveArray("Velocity") else: image.GetPointData().SetScalars( vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) me = timeit.default_timer() print("Thresholding.") ts = timeit.default_timer() t = vtk.vtkImageThreshold() #t = vtk.vtkThreshold() #sparse representation if (comptype == "q"): t.SetInputData(image) t.ThresholdByUpper(783.3) #.25*67.17^2 = 1127 #t.SetInputArrayToProcess(0,0,0, vorticity.GetOutput().FIELD_ASSOCIATION_POINTS, "Q-criterion") print("q criterion") else: t.SetInputData(m) t.SetInputArrayToProcess(0, 0, 0, mag.GetOutput().FIELD_ASSOCIATION_POINTS, "Magnitude") t.ThresholdByUpper(44.79) #44.79) #Set values in range to 1 and values out of range to 0 t.SetInValue(1) t.SetOutValue(0) #t.ReplaceInOn() #t.ReplaceOutOn() print("Update thresh") t.Update() #wt = vtk.vtkXMLImageDataWriter() #wt.SetInputData(t.GetOutput()) #wt.SetFileName("thresh.vti") #wt.Write() d = vtk.vtkImageDilateErode3D() d.SetInputData(t.GetOutput()) d.SetKernelSize(3, 3, 3) d.SetDilateValue(1) d.SetErodeValue(0) print("Update dilate") d.Update() iis = vtk.vtkImageToImageStencil() iis.SetInputData(d.GetOutput()) iis.ThresholdByUpper(1) stencil = vtk.vtkImageStencil() stencil.SetInputConnection(2, iis.GetOutputPort()) stencil.SetBackgroundValue(0) #image.GetPointData().RemoveArray("Vorticity") #Set scalars to velocity so it can be cut by the stencil image.GetPointData().SetScalars(image.GetPointData().GetVectors()) #if (comptype == "q"): #Use this to get just q-criterion data instead of velocity data. Do we need both? # image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetScalars("Q-criterion")) stencil.SetInputData(image) print("Update stencil") stencil.Update() te = timeit.default_timer() print("Setting up write") ws = timeit.default_timer() #Make velocity a vector again velarray = stencil.GetOutput().GetPointData().GetScalars() image.GetPointData().RemoveArray("Velocity") image.GetPointData().SetVectors(velarray) w = vtk.vtkXMLImageDataWriter() w.SetCompressorTypeToZLib() #w.SetCompressorTypeToNone() Need to figure out why this fails. w.SetEncodeAppendedData(0) #turn of base 64 encoding for fast write w.SetFileName(outputfile) w.SetInputData(image) if (0): w.SetCompressorTypeToZfp() w.GetCompressor().SetNx(ex - sx + 1) w.GetCompressor().SetNy(ey - sy + 1) w.GetCompressor().SetNz(ez - sz + 1) w.GetCompressor().SetTolerance(1e-1) w.GetCompressor().SetNumComponents(3) w.Write() we = timeit.default_timer() print("Results:") print("Read time: %s" % str(re - rs)) print("Convert to vtk: %s" % str(ce - cs)) if (comptype == "q"): print("Q Computation: %s" % str(ve - vs)) print("Q Magnitude: %s" % str(me - ms)) else: print("Vorticity Computation: %s" % str(ve - vs)) print("Vorticity Magnitude: %s" % str(me - ms)) print("Threshold: %s" % str(te - ts)) print("Write %s" % str(we - ws)) print("Total time: %s" % str(we - rs))
print("Reading another mesh") rd2 = vtk.vtkDataSetReader() rd2.SetFileName(mesh_temp_longitudinal) rd2.Update() print("Reading yet another mesh") rd3 = vtk.vtkDataSetReader() rd3.SetFileName(mesh_complete) rd3.Update() mesh_full = rd3.GetOutput() print("Gradient filter") grad_filter = vtk.vtkGradientFilter() grad_filter.SetInputData(rd2.GetOutput()) grad_filter.SetInputArrayToProcess(0,0,0,vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS,'temperature') grad_filter.Update() grad_mesh = grad_filter.GetOutput() grad = grad_mesh.GetPointData().GetArray('Gradients') fibers_full = vtk.vtkFloatArray() fibers_full.SetName('Fibers') fibers_full.SetNumberOfComponents(3) fibers_full.SetNumberOfTuples(mesh_full.GetNumberOfPoints()) fibers_mask = numpy.zeros(mesh_full.GetNumberOfPoints(), dtype=numpy.int8) fibers_mask.fill(0)
def vortvelvolume(args): #inputfile, outputfile, sx, ex, sy, ey, sz, ez, dataset): p = args[0] cubenum = args[1] print("Cube", cubenum) #Check for additonal parameters if (p["param1"] != ''): comptype = p["param1"] else: comptype = "q" #Default to q criterion if (p["param2"] != ''): thresh = float(p["param2"]) else: thresh = 783.3 #Default for q threshold on isotropic data #We use 0 for structured grid (vti) and 1 for unstructured grid (vtu) if (p["param3"] != ''): grid = 0 else: grid = 1 if (p["param4"] != ''): kernelsize = int(p["param4"]) else: kernelsize = 3 inputfile = p["inputfile"] +str(cubenum) + ".npy" outputfile = p["outputfile"] + str(cubenum) + ".vti" #always VTK Image Data for this. sx = p["sx"] sy = p["sy"] sz = p["sz"] ex = p["ex"] ey = p["ey"] ez = p["ez"] print ("Loading file, %s" % inputfile) #Determine if file is h5 or numpy rs = timeit.default_timer() vel = np.load(inputfile) re = timeit.default_timer() #convert numpy array to vtk cs = timeit.default_timer() #convert numpy array to vtk vtkdata = numpy_support.numpy_to_vtk(vel.flat, deep=True, array_type=vtk.VTK_FLOAT) vtkdata.SetNumberOfComponents(3) vtkdata.SetName("Velocity") image = vtk.vtkImageData() image.GetPointData().SetVectors(vtkdata) image.SetExtent(sx,ex,sy,ey,sz,ez) #NOTE: Hardcoding Spacing image.SetSpacing(.006135923, .006135923, .006135923) ce = timeit.default_timer() vs = timeit.default_timer() if (comptype == "v"): vorticity = vtk.vtkCellDerivatives() vorticity.SetVectorModeToComputeVorticity() vorticity.SetTensorModeToPassTensors() vorticity.SetInputData(image) vorticity.Update() elif (comptype == "q"): vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") vorticity.ComputeQCriterionOn() vorticity.SetComputeGradient(0) vorticity.Update() ve = timeit.default_timer() ms = timeit.default_timer() if (comptype == "v"): mag = vtk.vtkImageMagnitude() cp = vtk.vtkCellDataToPointData() cp.SetInputData(vorticity.GetOutput()) cp.Update() image.GetPointData().SetScalars(cp.GetOutput().GetPointData().GetVectors()) mag.SetInputData(image) mag.Update() m = mag.GetOutput() m.GetPointData().RemoveArray("Velocity") else: image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) me = timeit.default_timer() print ("Thresholding.") ts = timeit.default_timer() t = vtk.vtkImageThreshold() #t = vtk.vtkThreshold() #sparse representation if (comptype == "q"): t.SetInputData(image) t.ThresholdByUpper(thresh) #.25*67.17^2 = 1127 #t.SetInputArrayToProcess(0,0,0, vorticity.GetOutput().FIELD_ASSOCIATION_POINTS, "Q-criterion") print("q criterion") else: t.SetInputData(m) t.SetInputArrayToProcess(0,0,0, mag.GetOutput().FIELD_ASSOCIATION_POINTS, "Magnitude") t.ThresholdByUpper(thresh) #44.79) #Set values in range to 1 and values out of range to 0 t.SetInValue(1) t.SetOutValue(0) #t.ReplaceInOn() #t.ReplaceOutOn() print("Update thresh") t.Update() #wt = vtk.vtkXMLImageDataWriter() #wt.SetInputData(t.GetOutput()) #wt.SetFileName("thresh.vti") #wt.Write() d = vtk.vtkImageDilateErode3D() d.SetInputData(t.GetOutput()) d.SetKernelSize(kernelsize,kernelsize,kernelsize) d.SetDilateValue(1) d.SetErodeValue(0) print ("Update dilate") d.Update() iis = vtk.vtkImageToImageStencil() iis.SetInputData(d.GetOutput()) iis.ThresholdByUpper(1) stencil = vtk.vtkImageStencil() stencil.SetInputConnection(2, iis.GetOutputPort()) stencil.SetBackgroundValue(0) #image.GetPointData().RemoveArray("Vorticity") #Set scalars to velocity so it can be cut by the stencil image.GetPointData().SetScalars(image.GetPointData().GetVectors()) #if (comptype == "q"): #Use this to get just q-criterion data instead of velocity data. Do we need both? # image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetScalars("Q-criterion")) stencil.SetInputData(image) print ("Update stencil") stencil.Update() te = timeit.default_timer() print("Setting up write") ws = timeit.default_timer() #Make velocity a vector again velarray = stencil.GetOutput().GetPointData().GetScalars() image.GetPointData().RemoveArray("Velocity") image.GetPointData().SetVectors(velarray) if (grid == 0): w = vtk.vtkXMLImageDataWriter() else: w = vtk.vtkXMLUnstructuredGridWriter() w.SetCompressorTypeToZLib() #w.SetCompressorTypeToNone() Need to figure out why this fails. w.SetEncodeAppendedData(0) #turn of base 64 encoding for fast write w.SetFileName(outputfile) w.SetInputData(image) if (0): w.SetCompressorTypeToZfp() w.GetCompressor().SetNx(ex-sx+1) w.GetCompressor().SetNy(ey-sy+1) w.GetCompressor().SetNz(ez-sz+1) w.GetCompressor().SetTolerance(1e-2) w.GetCompressor().SetNumComponents(3) result = w.Write() result = 1 #don't write for benchmarking we = timeit.default_timer() print("Results:") print("Read time: %s" % str(re-rs)) print ("Convert to vtk: %s" % str(ce-cs)) if (comptype == "q"): print ("Q Computation: %s" % str(ve-vs)) print ("Q Magnitude: %s" % str(me-ms)) else: print ("Vorticity Computation: %s" % str(ve-vs)) print ("Vorticity Magnitude: %s" % str(me-ms)) print ("Threshold: %s" % str(te-ts)) print ("Write %s" % str(we-ws)) print ("Total time: %s" % str(we-rs)) if (result): p["message"] = "Success" p["computetime"] = str(we-rs) return p #return the packet
#grid = reader.GetOutput() #wallshear = grid.GetCellData().GetArray("x_wall_shear") #print(wallshear) calc1 = vtk.vtkArrayCalculator() calc1.SetFunction("sqrt(x_wall_shear^2+y_wall_shear^2+y_wall_shear^2)") calc1.AddScalarVariable("x_wall_shear", "x_wall_shear", 0) calc1.AddScalarVariable("y_wall_shear", "y_wall_shear", 0) calc1.AddScalarVariable("z_wall_shear", "z_wall_shear", 0) calc1.SetResultArrayName("WSS") calc1.SetInputConnection(reader.GetOutputPort()) calc1.SetAttributeModeToUsePointData() #calc1.SetAttributeModeToUseCellData() calc1.SetResultArrayType(vtk.VTK_FLOAT) x_WSS_grad = vtk.vtkGradientFilter() x_WSS_grad.SetInputConnection(calc1.GetOutputPort()) x_WSS_grad.ComputeGradientOn() x_WSS_grad.FasterApproximationOff() x_WSS_grad.SetResultArrayName("x_WSS_grad") x_WSS_grad.SetInputArrayToProcess(0, 0, 0, vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS, "x_wall_shear") y_WSS_grad = vtk.vtkGradientFilter() y_WSS_grad.SetInputConnection(x_WSS_grad.GetOutputPort()) y_WSS_grad.ComputeGradientOn() y_WSS_grad.FasterApproximationOff() y_WSS_grad.SetResultArrayName("y_WSS_grad") x_WSS_grad.SetInputArrayToProcess(0, 0, 0, vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS,
def post_proc_cfd(dir_path, vtu_input, cell_type="point", vtu_output_1="calc_test_node.vtu", vtu_output_2="calc_test_node_stats.vtu", N_peak=3): reader = vtk.vtkXMLUnstructuredGridReader() reader.SetFileName(os.path.join(dir_path, vtu_input)) reader.Update() N = reader.GetNumberOfTimeSteps() print(N) #N = test.GetNumberOfBlocks()ls #block = test.GetBlock(0) #for i in range(N): # print(i, test.GetMetaData(i).Get(vtk.vtkCompositeDataSet.NAME())) #grid = reader.GetOutput() #wallshear = grid.GetCellData().GetArray("x_wall_shear") #print(wallshear) calc1 = vtk.vtkArrayCalculator() calc1.SetFunction("sqrt(x_wall_shear^2+y_wall_shear^2+z_wall_shear^2)") calc1.AddScalarVariable("x_wall_shear", "x_wall_shear",0) calc1.AddScalarVariable("y_wall_shear", "y_wall_shear",0) calc1.AddScalarVariable("z_wall_shear", "z_wall_shear",0) calc1.SetResultArrayName("WSS") calc1.SetInputConnection(reader.GetOutputPort()) if(cell_type == "cell"): calc1.SetAttributeModeToUseCellData() vtk_process = vtk.vtkDataObject.FIELD_ASSOCIATION_CELLS vtk_data_type = vtk.vtkDataObject.CELL else: calc1.SetAttributeModeToUsePointData() vtk_process = vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS vtk_data_type = vtk.vtkDataObject.POINT calc1.SetResultArrayType(vtk.VTK_DOUBLE) x_WSS_grad = vtk.vtkGradientFilter() x_WSS_grad.SetInputConnection(calc1.GetOutputPort()) x_WSS_grad.ComputeGradientOn() x_WSS_grad.FasterApproximationOff() x_WSS_grad.SetResultArrayName("x_WSS_grad") x_WSS_grad.SetInputArrayToProcess(0, 0, 0, vtk_process, "x_wall_shear") y_WSS_grad = vtk.vtkGradientFilter() y_WSS_grad.SetInputConnection(x_WSS_grad.GetOutputPort()) y_WSS_grad.ComputeGradientOn() y_WSS_grad.FasterApproximationOff() y_WSS_grad.SetResultArrayName("y_WSS_grad") x_WSS_grad.SetInputArrayToProcess(0, 0, 0, vtk_process, "y_wall_shear") z_WSS_grad = vtk.vtkGradientFilter() z_WSS_grad.SetInputConnection(y_WSS_grad.GetOutputPort()) z_WSS_grad.ComputeGradientOn() z_WSS_grad.FasterApproximationOff() z_WSS_grad.SetResultArrayName("z_WSS_grad") z_WSS_grad.SetInputArrayToProcess(0, 0, 0, vtk_process, "z_wall_shear") calc2 = vtk.vtkArrayCalculator() calc2.AddScalarVariable("x_component", "x_WSS_grad",0) calc2.AddScalarVariable("y_component", "y_WSS_grad",1) calc2.AddScalarVariable("z_component", "z_WSS_grad",2) calc2.SetFunction("sqrt(x_component^2+y_component^2+z_component^2)") calc2.SetResultArrayName("WSSG") calc2.SetInputConnection(z_WSS_grad.GetOutputPort()) if(cell_type == "cell"): calc2.SetAttributeModeToUseCellData() else: calc2.SetAttributeModeToUsePointData() calc2.SetResultArrayType(vtk.VTK_DOUBLE) # initialize the output to include the peak values grid = vtk.vtkUnstructuredGrid() #N_peak = 3 reader.SetTimeStep(N_peak) print("loading {0}th timestep to copy data".format(N_peak)) calc2.Update() grid.DeepCopy(calc2.GetOutput()) #grid.SetNumberOfTimeSteps(1) #grid.SetTimeStep(0) #grid.Update() #sqrt((ddx({Wall shear-1}))**2 + (ddy({Wall shear-2}))**2 + (ddz({Wall shear-3}))**2)' def init_zero(in_array, sz_array): for i in range(sz_array): in_array.SetValue(i,0.0) def array_sum(out_array, in_array, sz_array): for i in range(sz_array): out_array.SetValue(i, out_array.GetValue(i) + in_array.GetValue(i)) def array_division(out_array, in_array, sz_array): for i in range(sz_array): out_array.SetValue(i, out_array.GetValue(i) / in_array.GetValue(i)) def array_avg(out_array, N): float_N = float(N) for i in range(N): out_array.SetValue(i, out_array.GetValue(i) / float_N) reader.SetTimeStep(0) print("loading {0}th timestep for averaging initialization".format(0)) reader.Update() calc2.Update() if(cell_type == "cell"): calc_data = calc2.GetOutput().GetCellData() grid_data = grid.GetCellData() n_sz = grid.GetNumberOfCells() else: calc_data = calc2.GetOutput().GetPointData() grid_data = grid.GetPointData() n_sz = grid.GetNumberOfPoints() TAWSS = vtk.vtkDoubleArray() TAWSS.DeepCopy(calc_data.GetArray("WSS")) TAWSS.SetName("TAWSS") TAWSSG = vtk.vtkDoubleArray() TAWSSG.DeepCopy(calc_data.GetArray("WSSG")) TAWSSG.SetName("TAWSSG") x_shear_avg = vtk.vtkDoubleArray() x_shear_avg.DeepCopy(calc_data.GetArray("x_wall_shear")) x_shear_avg.SetName("x_shear_avg") y_shear_avg = vtk.vtkDoubleArray() y_shear_avg.DeepCopy(calc_data.GetArray("y_wall_shear")) y_shear_avg.SetName("y_shear_avg") z_shear_avg = vtk.vtkDoubleArray() z_shear_avg.DeepCopy(calc_data.GetArray("z_wall_shear")) z_shear_avg.SetName("z_shear_avg") #TAWSSVector = vtk.vtkDoubleArray() #TAWSSVector.DeepCopy(calc_data.GetArray("z_wall_shear")) #TAWSSVector.SetName("TAWSSVector") #grid_data.AddArray(TAWSSVector) # def get_array_names(input): # N_point_array = input.GetOutput().GetPointData().GetNumberOfArrays() # N_WSS = 9999999 # for i in range(N_point_array): # name_WSS = input.GetOutput().GetPointData().GetArrayName(i) # if (name_WSS == "WSS"): # N_WSS = i # print(name_WSS) # # def array_sum(output, input_calc, N): # for i in range(N): # calc = output.GetValue(i) + input_calc.GetValue(i) # output.SetValue(i, calc) writer = vtk.vtkXMLUnstructuredGridWriter() #writer.SetFileName(os.path.join(out_dir,'test_outfile.vtu')) writer.SetFileName(os.path.join(dir_path, vtu_output_1)) writer.SetNumberOfTimeSteps(N) #writer.SetTimeStepRange(0,len(filelist)-1) writer.SetInputConnection(calc2.GetOutputPort()) writer.Start() #avg_map = {"TAWSS":"WSS", "TAWSSG": "WSSG", "x_shear_avg":"x_wall_shear", # "y_shear_avg":"y_wall_shear" , "z_shear_avg":"z_wall_shear"} for i in range(1,N): reader.SetTimeStep(i) print("Time step {0} for average calc".format(i)) reader.Update() calc2.Update() if(cell_type == "cell"): calc_data = calc2.GetOutput().GetCellData() else: calc_data = calc2.GetOutput().GetPointData() #get_array_names(calc2) array_sum(TAWSS, calc_data.GetArray("WSS"), n_sz) array_sum(TAWSSG, calc_data.GetArray("WSSG"), n_sz) array_sum(x_shear_avg, calc_data.GetArray("x_wall_shear"), n_sz) array_sum(y_shear_avg, calc_data.GetArray("y_wall_shear"), n_sz) array_sum(z_shear_avg, calc_data.GetArray("z_wall_shear"), n_sz) writer.WriteNextTime(reader.GetTimeStep()) writer.Stop() array_avg(TAWSS, N) array_avg(TAWSSG, N) array_avg(x_shear_avg, N) array_avg(y_shear_avg, N) array_avg(z_shear_avg, N) WSS_peak2mean = vtk.vtkDoubleArray() WSS_peak2mean.DeepCopy(grid_data.GetArray("WSS")) WSS_peak2mean.SetName("WSS_peak2mean") array_division(WSS_peak2mean, TAWSS, n_sz) WSSG_peak2mean = vtk.vtkDoubleArray() WSSG_peak2mean.DeepCopy(grid_data.GetArray("WSSG")) WSSG_peak2mean.SetName("WSSG_peak2mean") array_division(WSSG_peak2mean, TAWSSG, n_sz) grid_data.AddArray(TAWSS) grid_data.AddArray(TAWSSG) grid_data.AddArray(x_shear_avg) grid_data.AddArray(y_shear_avg) grid_data.AddArray(z_shear_avg) grid_data.AddArray(WSS_peak2mean) grid_data.AddArray(WSSG_peak2mean) print("got here") calc3 = vtk.vtkArrayCalculator() calc3.AddScalarVariable("x_shear_avg", "x_shear_avg",0) calc3.AddScalarVariable("y_shear_avg", "y_shear_avg",0) calc3.AddScalarVariable("z_shear_avg", "z_shear_avg",0) calc3.SetFunction("sqrt(x_shear_avg^2+y_shear_avg^2+z_shear_avg^2)") calc3.SetResultArrayName("TAWSSVector") calc3.SetInputData(grid) if(cell_type == "cell"): calc3.SetAttributeModeToUseCellData() else: calc3.SetAttributeModeToUsePointData() calc3.SetResultArrayType(vtk.VTK_DOUBLE) calc3.Update() calc4 = vtk.vtkArrayCalculator() calc4.AddScalarVariable("TAWSSVector", "TAWSSVector",0) calc4.AddScalarVariable("TAWSS", "TAWSS",0) calc4.SetFunction("0.5*(1.0-(TAWSSVector/(TAWSS)))") calc4.SetResultArrayName("OSI") calc4.SetInputConnection(calc3.GetOutputPort()) if(cell_type == "cell"): calc4.SetAttributeModeToUseCellData() else: calc4.SetAttributeModeToUsePointData() calc4.SetResultArrayType(vtk.VTK_DOUBLE) calc4.Update() pass_filt = vtk.vtkPassArrays() pass_filt.SetInputConnection(calc4.GetOutputPort()) pass_filt.AddArray(vtk_data_type, "WSS") pass_filt.AddArray(vtk_data_type, "WSSG") pass_filt.AddArray(vtk_data_type, "absolute_pressure") pass_filt.AddArray(vtk_data_type, "TAWSS") pass_filt.AddArray(vtk_data_type, "TAWSSG") pass_filt.AddArray(vtk_data_type, "OSI") pass_filt.AddArray(vtk_data_type, "WSS_peak2mean") pass_filt.AddArray(vtk_data_type, "WSSG_peak2mean") pass_filt.Update() #if(cell_type == "cell"): # print(pass_filt.GetOutput().GetCellData().GetArray("OSI").GetValue(0)) #else: # print(pass_filt.GetOutput().GetPointData().GetArray("OSI").GetValue(0)) writer2 = vtk.vtkXMLUnstructuredGridWriter() writer2.SetFileName(os.path.join(dir_path, vtu_output_2)) writer2.SetInputConnection(pass_filt.GetOutputPort()) writer2.Update()
def vortvelvolumei(args): #inputfile, outputfile, sx, ex, sy, ey, sz, ez, dataset): p = args[0] cubenum = args[1] print("Cube", cubenum) #Check for additonal parameters if (p["param1"] != ''): comptype = p["param1"] else: comptype = "q" #Default to q criterion if (p["param2"] != ''): thresh = float(p["param2"]) else: thresh = 783.3 #Default for q threshold on isotropic data inputfile = p["inputfile"] +str(cubenum) + ".npy" outputfile = p["outputfile"] + str(cubenum) + ".vti" #always VTK Image Data for this. sx = p["sx"] sy = p["sy"] sz = p["sz"] ex = p["ex"] ey = p["ey"] ez = p["ez"] print ("Loading file, %s" % inputfile) #Determine if file is h5 or numpy rs = timeit.default_timer() vel = np.load(inputfile) print ("File Loaded") re = timeit.default_timer() #convert numpy array to vtk cs = timeit.default_timer() #convert numpy array to vtk vtkdata = numpy_support.numpy_to_vtk(vel.flat, deep=True, array_type=vtk.VTK_FLOAT) vtkdata.SetNumberOfComponents(3) vtkdata.SetName("Velocity") image = vtk.vtkImageData() image.GetPointData().SetVectors(vtkdata) image.SetExtent(sx,ex,sy,ey,sz,ez) #NOTE: Hardcoding Spacing image.SetSpacing(.006135923, .006135923, .006135923) ce = timeit.default_timer() vs = timeit.default_timer() print ("Beginning computation: " + comptype) if (comptype == "v"): vorticity = vtk.vtkCellDerivatives() vorticity.SetVectorModeToComputeVorticity() vorticity.SetTensorModeToPassTensors() vorticity.SetInputData(image) vorticity.Update() elif (comptype == "q"): vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") vorticity.ComputeQCriterionOn() vorticity.SetComputeGradient(0) vorticity.Update() ve = timeit.default_timer() print("Initial calculation done") ms = timeit.default_timer() if (comptype == "v"): mag = vtk.vtkImageMagnitude() cp = vtk.vtkCellDataToPointData() cp.SetInputData(vorticity.GetOutput()) cp.Update() image.GetPointData().SetScalars(cp.GetOutput().GetPointData().GetVectors()) mag.SetInputData(image) mag.Update() m = mag.GetOutput() m.GetPointData().RemoveArray("Velocity") else: image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) me = timeit.default_timer() print("Generating screenshot") c = vtk.vtkContourFilter() c.SetValue(0,thresh) c.SetInputData(image) c.Update() contour = c.GetOutput() mapper = vtk.vtkPolyDataMapper() mapper.SetInputData(contour) mapper.ScalarVisibilityOn() mapper.SetScalarRange(-1,1) mapper.SetScalarModeToUsePointFieldData() mapper.ColorByArrayComponent("Velocity", 0) actor = vtk.vtkActor() actor.SetMapper(mapper) ren = vtk.vtkRenderer() ren.AddActor(actor) ren.SetBackground(1,1,1) camera = vtk.vtkCamera() ren.SetActiveCamera(camera) ren.ResetCamera() camera.Zoom(1.5) #This reduces the whitespace around the image renWin = vtk.vtkRenderWindow() renWin.SetSize(1024,1024) renWin.AddRenderer(ren) renWin.SetOffScreenRendering(1) windowToImageFilter = vtk.vtkWindowToImageFilter() windowToImageFilter.SetInput(renWin) windowToImageFilter.Update() w = vtk.vtkPNGWriter() pngfilename = p["outputfile"] + str(cubenum) + ".png" w.SetFileName(pngfilename) w.SetInputConnection(windowToImageFilter.GetOutputPort()) w.Write() #Shift camera angle and take snapshots around the cube. for aznum in range(4): camera.Azimuth(90) windowToImageFilter = vtk.vtkWindowToImageFilter() windowToImageFilter.SetInput(renWin) windowToImageFilter.Update() pngfilename = p["outputfile"] + str(cubenum) + "-r" + str(aznum)+ ".png" w.SetFileName(pngfilename) w.SetInputConnection(windowToImageFilter.GetOutputPort()) w.Write() camera.Elevation(90) #Rotate camera to top windowToImageFilter = vtk.vtkWindowToImageFilter() windowToImageFilter.SetInput(renWin) windowToImageFilter.Update() pngfilename = p["outputfile"] + str(cubenum) + "-t1.png" w.SetFileName(pngfilename) w.SetInputConnection(windowToImageFilter.GetOutputPort()) w.Write() camera.Elevation(180) #Rotate camera to bottom windowToImageFilter = vtk.vtkWindowToImageFilter() windowToImageFilter.SetInput(renWin) windowToImageFilter.Update() pngfilename = p["outputfile"] + str(cubenum) + "-b1.png" w.SetFileName(pngfilename) w.SetInputConnection(windowToImageFilter.GetOutputPort()) w.Write() print ("Thresholding.") ts = timeit.default_timer() t = vtk.vtkImageThreshold() #Dense represenation (0's included, structured grid) #t = vtk.vtkThreshold() #sparse representation if (comptype == "q"): t.SetInputData(image) t.ThresholdByUpper(thresh) #.25*67.17^2 = 1127 #t.SetInputArrayToProcess(0,0,0, vorticity.GetOutput().FIELD_ASSOCIATION_POINTS, "Q-criterion") print("q criterion") else: t.SetInputData(m) t.SetInputArrayToProcess(0,0,0, mag.GetOutput().FIELD_ASSOCIATION_POINTS, "Magnitude") t.ThresholdByUpper(thresh) #44.79) #Set values in range to 1 and values out of range to 0 t.SetInValue(1) t.SetOutValue(0) #t.ReplaceInOn() #t.ReplaceOutOn() print("Update thresh") t.Update() #wt = vtk.vtkXMLImageDataWriter() #wt.SetInputData(t.GetOutput()) #wt.SetFileName("thresh.vti") #wt.Write() d = vtk.vtkImageDilateErode3D() d.SetInputData(t.GetOutput()) d.SetKernelSize(4,4,4) d.SetDilateValue(1) d.SetErodeValue(0) print ("Update dilate") d.Update() iis = vtk.vtkImageToImageStencil() iis.SetInputData(d.GetOutput()) iis.ThresholdByUpper(1) stencil = vtk.vtkImageStencil() stencil.SetInputConnection(2, iis.GetOutputPort()) stencil.SetBackgroundValue(0) #image.GetPointData().RemoveArray("Vorticity") #Set scalars to velocity so it can be cut by the stencil image.GetPointData().SetScalars(image.GetPointData().GetVectors()) #if (comptype == "q"): #Use this to get just q-criterion data instead of velocity data. Do we need both? # image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetScalars("Q-criterion")) stencil.SetInputData(image) print ("Update stencil") stencil.Update() te = timeit.default_timer() print("Setting up write") ws = timeit.default_timer() #Make velocity a vector again velarray = stencil.GetOutput().GetPointData().GetScalars() image.GetPointData().RemoveArray("Velocity") image.GetPointData().SetVectors(velarray) w = vtk.vtkXMLImageDataWriter() w.SetCompressorTypeToZLib() #w.SetCompressorTypeToNone() Need to figure out why this fails. w.SetEncodeAppendedData(0) #turn of base 64 encoding for fast write w.SetFileName(outputfile) w.SetInputData(image) if (0): w.SetCompressorTypeToZfp() w.GetCompressor().SetNx(ex-sx+1) w.GetCompressor().SetNy(ey-sy+1) w.GetCompressor().SetNz(ez-sz+1) w.GetCompressor().SetTolerance(1e-2) w.GetCompressor().SetNumComponents(3) #result = w.Write() result = 1 #don't write for benchmarking we = timeit.default_timer() print("Results:") print("Read time: %s" % str(re-rs)) print ("Convert to vtk: %s" % str(ce-cs)) if (comptype == "q"): print ("Q Computation: %s" % str(ve-vs)) print ("Q Magnitude: %s" % str(me-ms)) else: print ("Vorticity Computation: %s" % str(ve-vs)) print ("Vorticity Magnitude: %s" % str(me-ms)) print ("Threshold: %s" % str(te-ts)) print ("Write %s" % str(we-ws)) print ("Total time: %s" % str(we-rs)) if (result): p["message"] = "Success" p["computetime"] = str(we-rs) return p #return the packet
def main(argv): #Just get something working for testing... try: opts, args = getopt.getopt(argv,"hi:", ["ifile="]) except getopt.GetoptError as err: print 'tviewer.py -i <inputfile.vtk>' print (str(err)) for opt, arg in opts: if opt == '-h': print 'tviewer.py -i <inputfile.vtk>' sys.exit() elif opt in ("-i", "--ifile"): inputfile = arg print("Going to load and view ", inputfile) #Read data reader = vtk.vtkXMLImageDataReader() reader.SetFileName(inputfile) reader.Update() #lut = vtk.vtkLookupTable() #lut.SetNumberOfColors(65535) #lut.SetHueRange(0.0, 2.667) #lut.SetVectorMode(vtk.vtkScalarsToColors.MAGNITUDE) #lut.Build() image = reader.GetOutput() #image.SetSpacing(1,1,1) #image.GetPointData().SetScalars(image.GetPointData().GetVectors()) #Compute Q Criterion for texture mapping vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") vorticity.ComputeQCriterionOn() #vorticity.SetComputeGradient(0) vorticity.Update() #Generate contour for comparison c = vtk.vtkContourFilter() #c.SetValue(0,1128) c.SetValue(0,450) image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) c.SetInputData(image) c.Update() contour = c.GetOutput() #contour.GetCellData().SetScalars(image.GetPointData().GetVectors("Velocity")) normals = vtk.vtkPolyDataNormals() normals.SetInputData(contour) normals.SetFeatureAngle(45) #? normals.Update() print normals.GetOutput() mapper = vtk.vtkPolyDataMapper() mapper.SetInputData(normals.GetOutput()) mapper.ScalarVisibilityOn() mapper.SetScalarRange(-1,1) mapper.SetScalarModeToUsePointFieldData() mapper.ColorByArrayComponent("Velocity", 0) #print image #print contour #mapper.SelectColorArray("Q-criterion") #mapper.SetLookupTable(lut) print mapper actor = vtk.vtkActor() actor.SetMapper(mapper) ren = vtk.vtkRenderer() ren.AddActor(actor) ren.SetBackground(1,1,1) ren.ResetCamera() renWin = vtk.vtkRenderWindow() renWin.SetSize(600,600) renWin.AddRenderer(ren) iren = vtk.vtkRenderWindowInteractor() def MouseMove(self, data): print("Load Cache %s" % data ) print ("Iren data") #print iren #addcube #print ren print ren.GetViewPoint() print ren.GetDisplayPoint() print ren.WorldToView() print ren.ComputeVisiblePropBounds() ysize = renWin.GetSize()[1] c.SetValue(0,ysize) c.Update() normals = vtk.vtkPolyDataNormals() normals.SetInputData(c.GetOutput()) normals.SetFeatureAngle(25) #? normals.Update() normals.SetFeatureAngle(45) #? normals.Update() mapper2 = vtk.vtkPolyDataMapper() mapper2.SetInputData(normals.GetOutput()) mapper2.ScalarVisibilityOn() mapper2.SetScalarRange(-.5,1) mapper2.SetScalarModeToUsePointFieldData() mapper2.ColorByArrayComponent("Velocity", 0) actor2 = vtk.vtkActor() actor2.SetMapper(mapper2) ren.AddActor(actor2) iren.AddObserver("MiddleButtonPressEvent", MouseMove) iren.SetRenderWindow(renWin) iren.Initialize() iren.Start() #time.sleep(2) print("adding another cube")
#iso64p2 iso64p2k1 #.006135923 import sys import vtk from vtk.util import numpy_support r = vtk.vtkXMLImageDataReader() r.SetFileName("iso128thresh.vti") r.Update() image = r.GetOutput() #Copy velocity into vectors... #image.GetPointData().SetCopyVectors(image.GetPointData().GetScalars()) threshold= (783.3) vort = vtk.vtkGradientFilter() vort.SetInputData(image) vort.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") vort.ComputeQCriterionOn() vort.Update() image.GetPointData().SetScalars(vort.GetOutput().GetPointData().GetArray("Q-criterion")) c = vtk.vtkContourFilter() c.SetValue(0,threshold) c.SetInputData(image) c.Update() box = vtk.vtkBox() #box.SetBounds(0,0.380427226,0,63,0,63) box.SetBounds(0,64,0,64,0,64)
writer = vtk.vtkPNGWriter() writer.SetWriteToMemory(1) writer.SetInputConnection(windowToImageFilter.GetOutputPort()) writer.Write() data = str(buffer(writer.GetResult())) return Image(data) reader = vtk.vtkXMLImageDataReader() reader.SetFileName( "/home/stephenh/turbulence/sc/datasets/vtkscripts/iso64.vti") reader.Update() image = reader.GetOutput() vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS, "Velocity") vorticity.ComputeQCriterionOn() vorticity.Update() #Generate contour for comparison c = vtk.vtkContourFilter() c.SetValue(0, 1128) image.GetPointData().SetScalars( vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) c.SetInputData(image) c.Update() contour = c.GetOutput() normals = vtk.vtkPolyDataNormals() normals.SetInputData(contour)
def main(argv): try: opts, args = getopt.getopt(argv, "hi:o:x:a:y:b:z:c:d:u:", [ "ifile=", "ofile=", "sx=", "ex=", "sy=", "ez=", "dataset=", "comp=" ]) except getopt.GetoptError as err: print 'getmultithresh.py -i <inputfile.h5> -o <outputfile.vti> -sx -ex -sy -ey -sz -ez -dataset -comptype' print(str(err)) for opt, arg in opts: if opt == '-h': print 'getmultithresh.py -i <inputfile.h5> -o <outputfile.vti> -sx -ex -sy -ey -sz -ez -dataset' sys.exit() elif opt in ("-i", "--ifile"): inputfile = arg elif opt in ("-o", "--ofile"): outputfile = arg elif opt in ("-x", "--sx"): sx = int(arg) elif opt in ("-a", "--ex"): ex = int(arg) elif opt in ("-y", "--sy"): sy = int(arg) elif opt in ("-b", "--ey"): ey = int(arg) elif opt in ("-z", "--sz"): sz = int(arg) elif opt in ("-c", "--ez"): ez = int(arg) elif opt in ("-d", "--dataset"): dataset = str(arg) elif opt in ("-u", "--du"): comptype = str(arg) print("Loading file, %s" % inputfile) #Determine if file is h5 or numpy if (inputfile.split(".")[1] == "npy"): rs = timeit.default_timer() vel = np.load(inputfile) re = timeit.default_timer() else: #read in file rs = timeit.default_timer() data_file = h5py.File(inputfile, 'r') vel = np.array(data_file[dataset]) data_file.close() re = timeit.default_timer() cs = timeit.default_timer() #convert numpy array to vtk vtkdata = numpy_support.numpy_to_vtk(vel.flat, deep=True, array_type=vtk.VTK_FLOAT) vtkdata.SetNumberOfComponents(3) vtkdata.SetName("Velocity") image = vtk.vtkImageData() image.GetPointData().SetVectors(vtkdata) image.SetExtent(sx, ex, sy, ey, sz, ez) #NOTE: Hardcoding Spacing image.SetSpacing(.006135923, .006135923, .006135923) print("Doing computation") ce = timeit.default_timer() vs = timeit.default_timer() ve = timeit.default_timer() print("Generating contour") ms = timeit.default_timer() mag = vtk.vtkImageMagnitude() for x in range(0, 1): start = timeit.default_timer() threshold = (22.39 * (2.5)) if (comptype == "q"): threshold = (783.3) vort = vtk.vtkGradientFilter() vort.SetInputData(image) vort.SetInputScalars(image.FIELD_ASSOCIATION_POINTS, "Velocity") vort.ComputeQCriterionOn() vort.Update() image.GetPointData().SetScalars( vort.GetOutput().GetPointData().GetVectors("Q-criterion")) else: v = vtk.vtkCellDerivatives() v.SetVectorModeToComputeVorticity() v.SetTensorModeToPassTensors() v.SetInputData(image) v.Update() vort = vtk.vtkImageMagnitude() cp = vtk.vtkCellDataToPointData() cp.SetInputData(v.GetOutput()) cp.Update() image.GetPointData().SetScalars( cp.GetOutput().GetPointData().GetVectors()) vort.SetInputData(image) vort.Update() #ni = vtk.vtkImageData() #ni.SetSpacing(.006135923, .006135923, .006135923) #ni.SetExtent(sx,ex,sy,ey,sz,ez) #ni.GetPointData().SetScalars(q.GetOutput().GetPointData().GetVectors("Q-criterion")) mend = timeit.default_timer() me = mend comptime = mend - start print("Magnitude Computation time: " + str(comptime) + "s") c = vtk.vtkContourFilter() c.SetValue(0, threshold) if (comptype == "q"): c.SetInputData(image) else: c.SetInputData(vort.GetOutput()) print("Computing Contour with threshold", threshold) c.Update() w = vtk.vtkXMLPolyDataWriter() w.SetEncodeAppendedData(0) #turn of base 64 encoding for fast write w.SetFileName(outputfile + str(x) + ".vtp ") w.SetInputData(c.GetOutput()) ws = timeit.default_timer() w.Write() we = timeit.default_timer() print("Results:") print("Read time: %s" % str(re - rs)) print("Convert to vtk: %s" % str(ce - cs)) if (comptype == "q"): print("Q Computation: %s" % str(ve - vs)) print("Q Magnitude: %s" % str(me - ms)) else: print("Vorticity Computation: %s" % str(ve - vs)) print("Vorticity Magnitude: %s" % str(me - ms)) #print ("Threshold: %s" % str(te-ts)) print("Write %s" % str(we - ws)) print("Total time: %s" % str(we - rs))
def main(argv): #Just get something working for testing... try: opts, args = getopt.getopt(argv, "hi:", ["ifile="]) except getopt.GetoptError as err: print 'tviewer.py -i <inputfile.vtk>' print(str(err)) for opt, arg in opts: if opt == '-h': print 'tviewer.py -i <inputfile.vtk>' sys.exit() elif opt in ("-i", "--ifile"): inputfile = arg print("Going to load and view ", inputfile) #Read data reader = vtk.vtkXMLImageDataReader() reader.SetFileName(inputfile) reader.Update() #Setup offscreen rendering graphics_factory = vtk.vtkGraphicsFactory() graphics_factory.SetOffScreenOnlyMode(1) graphics_factory.SetUseMesaClasses(1) #Get image from reader image = reader.GetOutput() print("Image read in") #compute q-criterion vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS, "Velocity") vorticity.ComputeQCriterionOn() vorticity.SetComputeGradient(0) vorticity.Update() print("Vorticity done") #Get magnitude not sure we need it now. lets see. #mag = vtk.vtkImageMagnitude() #cp = vtk.vtkCellDataToPointData() #cp.SetInputData(vorticity.GetOutput()) #cp.Update() #image.GetPointData().SetScalars(cp.GetOutput().GetPointData().GetVectors()) #mag.SetInputData(image) #mag.Update() #m = mag.GetOutput() image.GetPointData().SetScalars( vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) print image #image.GetPointData().SetScalars(image.GetPointData().GetVectors("Velocity")) c = vtk.vtkContourFilter() #c.SetValue(0,1128) c.SetValue(0, 600) c.SetInputData(image) c.Update() #import pdb; pdb.set_trace() contour = c.GetOutput() #contour.GetCellData().SetScalars(image.GetPointData().GetVectors("Velocity")) print "Contour done" #normals = vtk.vtkPolyDataNormals() #normals.SetInputData(contour) #normals.SetFeatureAngle(45) #? #normals.Update() #print normals.GetOutput() mapper = vtk.vtkPolyDataMapper() mapper.SetInputData(contour) mapper.ScalarVisibilityOn() mapper.SetScalarRange(-1, 1) mapper.SetScalarModeToUsePointFieldData() mapper.ColorByArrayComponent("Velocity", 0) #import pdb; pdb.set_trace() print("mapped") #print mapper actor = vtk.vtkActor() actor.SetMapper(mapper) ren = vtk.vtkRenderer() ren.AddActor(actor) ren.SetBackground(1, 1, 1) ren.ResetCamera() #camera = vtk.vtkCamera() #camera.SetPosition(0,0,0) #camera.SetFocalPoint(0,0,0) #ren.SetActiveCamera(camera) renWin = vtk.vtkRenderWindow() renWin.SetSize(600, 600) renWin.AddRenderer(ren) renWin.SetOffScreenRendering(1) #import pdb; pdb.set_trace() #iren = vtk.vtkRenderWindowInteractor() #iren.SetRenderWindow(renWin) #iren.Initialize() #iren.Start() windowToImageFilter = vtk.vtkWindowToImageFilter() windowToImageFilter.SetInput(renWin) windowToImageFilter.Update() w = vtk.vtkPNGWriter() w.SetFileName("cube.png") w.SetInputConnection(windowToImageFilter.GetOutputPort()) w.Write()
sorted_plap = final_plap[sort_indices] output["PLAP/Final/%d" % (nprocs+1,)][:,1] = sorted_plap output["PRT/Final/%d" % (nprocs+1,)][:,0] = final_indices[sort_indices] sorted_prt = final_prt[sort_indices] output["PRT/Final/%d" % (nprocs+1,)][:,1] = sorted_prt coordinates_vtu_vtk = numpy_support.numpy_to_vtk(sorted_coordinates) coordinates_vtu_vtk.SetName("Coordinates") plap_vtu_vtk = numpy_support.numpy_to_vtk(sorted_plap) plap_vtu_vtk.SetName("PLAP") prt_vtu_vtk = numpy_support.numpy_to_vtk(sorted_prt) prt_vtu_vtk.SetName("PRT") domain_particles.GetOutput().GetPointData().AddArray(coordinates_vtu_vtk) domain_particles.GetOutput().GetPointData().AddArray(plap_vtu_vtk) domain_particles.GetOutput().GetPointData().AddArray(prt_vtu_vtk) # Compute Right Cauchy Green tensor for FTLE deformation_gradient = vtk.vtkGradientFilter() deformation_gradient.SetInputConnection(domain_particles.GetOutputPort()) deformation_gradient.SetInputArrayToProcess(0,0,0,vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS,"Coordinates") deformation_gradient.Update() deformation_gradient_tensor = numpy_support.vtk_to_numpy(deformation_gradient.GetOutput().GetPointData().GetArray("Gradients")) deformation_gradient_tensor = np.reshape(deformation_gradient_tensor,(-1,3,3)) right_cauchy_green_tensor = np.einsum('abj,acj->abc',deformation_gradient_tensor,deformation_gradient_tensor) eigvals = np.linalg.eigvals(right_cauchy_green_tensor) eigvals = np.sort(eigvals,axis=1) ##### ADDED BY KEVIN ############################################################################################### # Compute real part of eigenvalues, as VTK cannot accept complex numbers # Occasionally the imaginary part is present, but equal to 0 real_eigvals = np.real(eigvals[:,2]) ftle = np.ascontiguousarray(np.log(real_eigvals)) ##### END ########################################################################################################## ftle_vtu_vtk = numpy_support.numpy_to_vtk(ftle)
def post_proc_cfd_diff(parameter_list): dir_path = parameter_list[0] vtu_input = parameter_list[1] cell_type = parameter_list[1] vtu_output_1 = parameter_list[3] vtu_output_2 = parameter_list[4] N_peak = parameter_list[5] reader = vtk.vtkXMLUnstructuredGridReader() reader.SetFileName(os.path.join(dir_path, vtu_input)) reader.Update() N = reader.GetNumberOfTimeSteps() print(N) if (cell_type == "cell"): vtk_process = vtk.vtkDataObject.FIELD_ASSOCIATION_CELLS vtk_data_type = vtk.vtkDataObject.CELL else: vtk_process = vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS vtk_data_type = vtk.vtkDataObject.POINT pass_arr = vtk.vtkPassArrays() pass_arr.SetInputConnection(reader.GetOutputPort()) pass_arr.AddArray(vtk_data_type, "absolute_pressure") pass_arr.AddArray(vtk_data_type, "x_wall_shear") pass_arr.AddArray(vtk_data_type, "y_wall_shear") pass_arr.AddArray(vtk_data_type, "z_wall_shear") calc1 = vtk.vtkArrayCalculator() calc1.SetFunction("sqrt(x_wall_shear^2+y_wall_shear^2+z_wall_shear^2)") calc1.AddScalarVariable("x_wall_shear", "x_wall_shear", 0) calc1.AddScalarVariable("y_wall_shear", "y_wall_shear", 0) calc1.AddScalarVariable("z_wall_shear", "z_wall_shear", 0) calc1.SetResultArrayName("WSS") calc1.SetInputConnection(pass_arr.GetOutputPort()) if (cell_type == "cell"): calc1.SetAttributeModeToUseCellData() else: calc1.SetAttributeModeToUsePointData() calc1.SetResultArrayType(vtk.VTK_DOUBLE) x_WSS_grad = vtk.vtkGradientFilter() x_WSS_grad.SetInputConnection(calc1.GetOutputPort()) x_WSS_grad.ComputeGradientOn() x_WSS_grad.FasterApproximationOff() x_WSS_grad.ComputeDivergenceOff() x_WSS_grad.ComputeVorticityOff() x_WSS_grad.ComputeQCriterionOff() x_WSS_grad.SetResultArrayName("x_WSS_grad") x_WSS_grad.SetInputArrayToProcess(0, 0, 0, vtk_process, "x_wall_shear") y_WSS_grad = vtk.vtkGradientFilter() y_WSS_grad.SetInputConnection(x_WSS_grad.GetOutputPort()) y_WSS_grad.ComputeGradientOn() y_WSS_grad.FasterApproximationOff() y_WSS_grad.ComputeDivergenceOff() y_WSS_grad.ComputeVorticityOff() y_WSS_grad.ComputeQCriterionOff() y_WSS_grad.SetResultArrayName("y_WSS_grad") y_WSS_grad.SetInputArrayToProcess(0, 0, 0, vtk_process, "y_wall_shear") z_WSS_grad = vtk.vtkGradientFilter() z_WSS_grad.SetInputConnection(y_WSS_grad.GetOutputPort()) z_WSS_grad.ComputeGradientOn() z_WSS_grad.FasterApproximationOff() z_WSS_grad.ComputeDivergenceOff() z_WSS_grad.ComputeVorticityOff() z_WSS_grad.ComputeQCriterionOff() z_WSS_grad.SetResultArrayName("z_WSS_grad") z_WSS_grad.SetInputArrayToProcess(0, 0, 0, vtk_process, "z_wall_shear") calc2 = vtk.vtkArrayCalculator() calc2.AddScalarVariable("x_component", "x_WSS_grad", 0) calc2.AddScalarVariable("y_component", "y_WSS_grad", 1) calc2.AddScalarVariable("z_component", "z_WSS_grad", 2) calc2.SetFunction("sqrt(x_component^2+y_component^2+z_component^2)") calc2.SetResultArrayName("WSSG") calc2.SetInputConnection(z_WSS_grad.GetOutputPort()) if (cell_type == "cell"): calc2.SetAttributeModeToUseCellData() else: calc2.SetAttributeModeToUsePointData() calc2.SetResultArrayType(vtk.VTK_DOUBLE) # initialize the output to include the peak values grid = vtk.vtkUnstructuredGrid() #N_peak = 3 reader.SetTimeStep(N_peak) calc2.Update() print("loading peak: {0} timestep to copy data".format( reader.GetTimeStep())) grid.DeepCopy(calc2.GetOutput()) reader.SetTimeStep(0) #reader.Update() calc2.Update() print("loading {0}th timestep for averaging initialization".format( reader.GetTimeStep())) stats = vtk.vtkTemporalStatistics() stats.SetInputConnection(calc2.GetOutputPort()) stats.ComputeMaximumOff() stats.ComputeMinimumOff() stats.ComputeStandardDeviationOff() stats.ComputeAverageOn() stats.Update() print("what's the time step after stats :{0}".format(reader.GetTimeStep())) grid_out = vtk.vtkUnstructuredGrid() grid_out.DeepCopy(stats.GetOutput()) if (cell_type == "cell"): out_data = grid_out.GetCellData() grid_data = grid.GetCellData() else: out_data = grid_out.GetPointData() grid_data = grid.GetPointData() print("update names") out_data.AddArray(grid_data.GetArray("WSS")) out_data.GetArray("WSS").SetName("WSS_peak") out_data.AddArray(grid_data.GetArray("WSSG")) out_data.GetArray("WSSG").SetName("WSSG_peak") out_data.AddArray(grid_data.GetArray("absolute_pressure")) out_data.GetArray("absolute_pressure").SetName("pressure_peak") out_data.GetArray("WSS_average").SetName("TAWSS") out_data.GetArray("WSSG_average").SetName("TAWSSG") out_data.GetArray("absolute_pressure_average").SetName("pressure_average") print("TAWSSVector") calc3 = vtk.vtkArrayCalculator() calc3.AddScalarVariable("x_wall_shear_average", "x_wall_shear_average", 0) calc3.AddScalarVariable("y_wall_shear_average", "y_wall_shear_average", 0) calc3.AddScalarVariable("z_wall_shear_average", "z_wall_shear_average", 0) calc3.SetFunction( "sqrt(x_wall_shear_average^2+y_wall_shear_average^2+z_wall_shear_average^2)" ) calc3.SetResultArrayName("TAWSSVector") calc3.SetInputData(grid_out) if (cell_type == "cell"): calc3.SetAttributeModeToUseCellData() else: calc3.SetAttributeModeToUsePointData() calc3.SetResultArrayType(vtk.VTK_DOUBLE) calc3.Update() print("OSI") calc4 = vtk.vtkArrayCalculator() calc4.AddScalarVariable("TAWSSVector", "TAWSSVector", 0) calc4.AddScalarVariable("TAWSS", "TAWSS", 0) calc4.SetFunction("0.5*(1.0-(TAWSSVector/(TAWSS)))") calc4.SetResultArrayName("OSI") calc4.SetInputConnection(calc3.GetOutputPort()) if (cell_type == "cell"): calc4.SetAttributeModeToUseCellData() else: calc4.SetAttributeModeToUsePointData() calc4.SetResultArrayType(vtk.VTK_DOUBLE) #calc4.Update() # peak ratios calc5 = vtk.vtkArrayCalculator() calc5.AddScalarVariable("WSS_peak", "WSS_peak", 0) calc5.AddScalarVariable("TAWSS", "TAWSS", 0) calc5.SetFunction("WSS_peak/TAWSS") calc5.SetResultArrayName("WSS_peak_q_TAWSS") calc5.SetInputConnection(calc4.GetOutputPort()) if (cell_type == "cell"): calc5.SetAttributeModeToUseCellData() else: calc5.SetAttributeModeToUsePointData() calc5.SetResultArrayType(vtk.VTK_DOUBLE) #calc5.Update() calc6 = vtk.vtkArrayCalculator() calc6.AddScalarVariable("WSSG_peak", "WSSG_peak", 0) calc6.AddScalarVariable("TAWSSG", "TAWSSG", 0) calc6.SetFunction("WSSG_peak/TAWSSG") calc6.SetResultArrayName("WSSG_peak_q_TAWSSG") calc6.SetInputConnection(calc5.GetOutputPort()) if (cell_type == "cell"): calc6.SetAttributeModeToUseCellData() else: calc6.SetAttributeModeToUsePointData() calc6.SetResultArrayType(vtk.VTK_DOUBLE) calc7 = vtk.vtkArrayCalculator() calc7.AddScalarVariable("pressure_peak", "pressure_peak", 0) calc7.AddScalarVariable("pressure_average", "pressure_average", 0) calc7.SetFunction("pressure_peak/pressure_average") calc7.SetResultArrayName("pressure_peak_q_pressure_average") calc7.SetInputConnection(calc6.GetOutputPort()) if (cell_type == "cell"): calc7.SetAttributeModeToUseCellData() else: calc7.SetAttributeModeToUsePointData() calc7.SetResultArrayType(vtk.VTK_DOUBLE) pass_filt = vtk.vtkPassArrays() pass_filt.SetInputConnection(calc7.GetOutputPort()) pass_filt.AddArray(vtk_data_type, "WSS_peak") pass_filt.AddArray(vtk_data_type, "WSSG_peak") pass_filt.AddArray(vtk_data_type, "pressure_peak") pass_filt.AddArray(vtk_data_type, "pressure_average") pass_filt.AddArray(vtk_data_type, "TAWSS") pass_filt.AddArray(vtk_data_type, "TAWSSG") pass_filt.AddArray(vtk_data_type, "OSI") pass_filt.AddArray(vtk_data_type, "WSS_peak_q_TAWSS") pass_filt.AddArray(vtk_data_type, "WSSG_peak_q_TAWSSG") pass_filt.AddArray(vtk_data_type, "pressure_peak_q_pressure_average") pass_filt.Update() #if(cell_type == "cell"): # print(pass_filt.GetOutput().GetCellData().GetArray("OSI").GetValue(0)) #else: # print(pass_filt.GetOutput().GetPointData().GetArray("OSI").GetValue(0)) writer2 = vtk.vtkXMLUnstructuredGridWriter() writer2.SetFileName(os.path.join(dir_path, vtu_output_2)) writer2.SetInputConnection(pass_filt.GetOutputPort()) writer2.Update()
def main(argv): #Just get something working for testing... try: opts, args = getopt.getopt(argv,"hi:", ["ifile="]) except getopt.GetoptError as err: print 'tviewer.py -i <inputfile.vtk>' print (str(err)) for opt, arg in opts: if opt == '-h': print 'tviewer.py -i <inputfile.vtk>' sys.exit() elif opt in ("-i", "--ifile"): inputfile = arg print("Going to load and view ", inputfile) #Read data reader = vtk.vtkXMLImageDataReader() reader.SetFileName(inputfile) reader.Update() #lut = vtk.vtkLookupTable() #lut.SetNumberOfColors(65535) #lut.SetHueRange(0.0, 2.667) #lut.SetVectorMode(vtk.vtkScalarsToColors.MAGNITUDE) #lut.Build() #Setup offscreen rendering graphics_factory = vtk.vtkGraphicsFactory() graphics_factory.SetOffScreenOnlyMode(1) graphics_factory.SetUseMesaClasses(1) #imaging_factory = vtk.vtkImagingFactory() #imaging_factory.SetUseMesaClasses(1) #Get image from reader image = reader.GetOutput() #image.SetSpacing(1,1,1) #image.GetPointData().SetScalars(image.GetPointData().GetVectors()) #Compute Q Criterion for texture mapping vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") vorticity.ComputeQCriterionOn() #vorticity.SetComputeGradient(0) vorticity.Update() #Generate contour for comparison c = vtk.vtkContourFilter() #c.SetValue(0,1128) c.SetValue(0,450) image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) c.SetInputData(image) c.Update() contour = c.GetOutput() #contour.GetCellData().SetScalars(image.GetPointData().GetVectors("Velocity")) normals = vtk.vtkPolyDataNormals() normals.SetInputData(contour) normals.SetFeatureAngle(45) #? normals.Update() #print normals.GetOutput() mapper = vtk.vtkPolyDataMapper() mapper.SetInputData(normals.GetOutput()) mapper.ScalarVisibilityOn() mapper.SetScalarRange(-1,1) mapper.SetScalarModeToUsePointFieldData() mapper.ColorByArrayComponent("Velocity", 0) #print image #print contour #mapper.SelectColorArray("Q-criterion") #mapper.SetLookupTable(lut) #print mapper actor = vtk.vtkActor() actor.SetMapper(mapper) ren = vtk.vtkRenderer() ren.AddActor(actor) ren.SetBackground(1,1,1) ren.ResetCamera() renWin = vtk.vtkRenderWindow() renWin.SetSize(400,400) renWin.SetOffScreenRendering(1) renWin.AddRenderer(ren) renWin.Render() windowToImageFilter = vtk.vtkWindowToImageFilter() windowToImageFilter.SetInput(renWin) windowToImageFilter.Update() w = vtk.vtkPNGWriter() w.SetFileName("cube.png") w.SetInputConnection(windowToImageFilter.GetOutputPort()) w.Write()
def main(argv): #Just get something working for testing... try: opts, args = getopt.getopt(argv,"hi:", ["ifile="]) except getopt.GetoptError as err: print 'tviewer.py -i <inputfile.vtk>' print (str(err)) for opt, arg in opts: if opt == '-h': print 'tviewer.py -i <inputfile.vtk>' sys.exit() elif opt in ("-i", "--ifile"): inputfile = arg print("Going to load and view ", inputfile) #Read data reader = vtk.vtkXMLImageDataReader() reader.SetFileName(inputfile) reader.Update() #Setup offscreen rendering graphics_factory = vtk.vtkGraphicsFactory() graphics_factory.SetOffScreenOnlyMode(1) graphics_factory.SetUseMesaClasses(1) #Get image from reader image = reader.GetOutput() print ("Image read in") #compute q-criterion vorticity = vtk.vtkGradientFilter() vorticity.SetInputData(image) vorticity.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") vorticity.ComputeQCriterionOn() vorticity.SetComputeGradient(0) vorticity.Update() print ("Vorticity done") #Get magnitude not sure we need it now. lets see. #mag = vtk.vtkImageMagnitude() #cp = vtk.vtkCellDataToPointData() #cp.SetInputData(vorticity.GetOutput()) #cp.Update() #image.GetPointData().SetScalars(cp.GetOutput().GetPointData().GetVectors()) #mag.SetInputData(image) #mag.Update() #m = mag.GetOutput() image.GetPointData().SetScalars(vorticity.GetOutput().GetPointData().GetVectors("Q-criterion")) print image #image.GetPointData().SetScalars(image.GetPointData().GetVectors("Velocity")) c = vtk.vtkContourFilter() #c.SetValue(0,1128) c.SetValue(0,600) c.SetInputData(image) c.Update() #import pdb; pdb.set_trace() contour = c.GetOutput() #contour.GetCellData().SetScalars(image.GetPointData().GetVectors("Velocity")) print "Contour done" #normals = vtk.vtkPolyDataNormals() #normals.SetInputData(contour) #normals.SetFeatureAngle(45) #? #normals.Update() #print normals.GetOutput() mapper = vtk.vtkPolyDataMapper() mapper.SetInputData(contour) mapper.ScalarVisibilityOn() mapper.SetScalarRange(-1,1) mapper.SetScalarModeToUsePointFieldData() mapper.ColorByArrayComponent("Velocity", 0) #import pdb; pdb.set_trace() print ("mapped") #print mapper actor = vtk.vtkActor() actor.SetMapper(mapper) ren = vtk.vtkRenderer() ren.AddActor(actor) ren.SetBackground(1,1,1) ren.ResetCamera() #camera = vtk.vtkCamera() #camera.SetPosition(0,0,0) #camera.SetFocalPoint(0,0,0) #ren.SetActiveCamera(camera) renWin = vtk.vtkRenderWindow() renWin.SetSize(600,600) renWin.AddRenderer(ren) renWin.SetOffScreenRendering(1) #import pdb; pdb.set_trace() #iren = vtk.vtkRenderWindowInteractor() #iren.SetRenderWindow(renWin) #iren.Initialize() #iren.Start() windowToImageFilter = vtk.vtkWindowToImageFilter() windowToImageFilter.SetInput(renWin) windowToImageFilter.Update() w = vtk.vtkPNGWriter() w.SetFileName("cube.png") w.SetInputConnection(windowToImageFilter.GetOutputPort()) w.Write()
def getvtkdata(self, ci, timestep): PI= 3.141592654 contour=False firstval = ci.datafields.split(',')[0] #print ("First: ", firstval) if ((firstval == 'vo') or (firstval == 'qc') or (firstval == 'cvo') or (firstval == 'pcvo') or (firstval == 'qcc')): datafields = 'u' computation = firstval #We are doing a computation, so we need to know which one. if ((firstval == 'cvo') or (firstval == 'qcc') or (firstval == 'pcvo')): overlap = 3 #This was 2, but due to rounding because of the spacing, 3 is required. #Save a copy of the original request oci = jhtdblib.CutoutInfo() oci.xstart = ci.xstart oci.ystart = ci.ystart oci.zstart = ci.zstart oci.xlen = ci.xlen oci.ylen = ci.ylen oci.zlen = ci.zlen ci = self.expandcutout(ci, overlap) #Expand the cutout by the overlap contour = True else: datafields = ci.datafields.split(',') #There could be multiple components, so we will have to loop computation = '' #Split component into list and add them to the image #Check to see if we have a value for vorticity or q contour fieldlist = list(datafields) image = vtk.vtkImageData() rg = vtk.vtkRectilinearGrid() for field in fieldlist: if (ci.xlen > 61 and ci.ylen > +61 and ci.zlen > 61 and ci.xstep ==1 and ci.ystep ==1 and ci.zstep ==1 and not contour): #Do this if cutout is too large #Note: we don't want to get cubed data if we are doing cubes for contouring. data=GetData().getcubedrawdata(ci, timestep, field) else: data=GetData().getrawdata(ci, timestep, field) vtkdata = numpy_support.numpy_to_vtk(data.flat, deep=True, array_type=vtk.VTK_FLOAT) components = Datafield.objects.get(shortname=field).components vtkdata.SetNumberOfComponents(components) vtkdata.SetName(Datafield.objects.get(shortname=field).longname) #We need to see if we need to subtract one on end of extent edges. image.SetExtent(ci.xstart, ci.xstart+((ci.xlen+ci.xstep-1)/ci.xstep)-1, ci.ystart, ci.ystart+((ci.ylen+ci.ystep-1)/ci.ystep)-1, ci.zstart, ci.zstart+((ci.zlen+ci.zstep-1)/ci.zstep)-1) #image.SetExtent(ci.xstart, ci.xstart+int(ci.xlen)-1, ci.ystart, ci.ystart+int(ci.ylen)-1, ci.zstart, ci.zstart+int(ci.zlen)-1) image.GetPointData().AddArray(vtkdata) if (Datafield.objects.get(shortname=field).longname == "Velocity"): #Set the Velocity Array as vectors in the image. image.GetPointData().SetVectors(image.GetPointData().GetArray("Velocity")) #Get spacing from database and multiply it by the step. Don't do this on the contour--it is performed later on. #if (contour): #We need to scale the threshold to the spacing of the dataset. This is because we build the cubes #on a 1 spacing cube in order to get proper overlap on the contours. #ci.threshold = ci.threshold*Dataset.objects.get(dbname_text=ci.dataset).xspacing #else: xspacing = Dataset.objects.get(dbname_text=ci.dataset).xspacing yspacing = Dataset.objects.get(dbname_text=ci.dataset).yspacing zspacing = Dataset.objects.get(dbname_text=ci.dataset).zspacing #Check if we need a rectilinear grid, and set it up if so. if (ci.dataset == 'channel'): ygrid = jhtdblib.JHTDBLib().getygrid() #print("Ygrid: ") #print (ygrid) #Not sure about contouring channel yet, so we are going back to original variables at this point. rg.SetExtent(ci.xstart, ci.xstart+((ci.xlen+ci.xstep-1)/ci.xstep)-1, ci.ystart, ci.ystart+((ci.ylen+ci.ystep-1)/ci.ystep)-1, ci.zstart, ci.zstart+((ci.zlen+ci.zstep-1)/ci.zstep)-1) #components = Datafield.objects.get(shortname=field).components #vtkdata.SetNumberOfComponents(components) #vtkdata.SetName(Datafield.objects.get(shortname=field).longname) rg.GetPointData().AddArray(vtkdata) #import pdb;pdb.set_trace() #This isn't possible--we will have to do something about this in the future. #rg.SetSpacing(ci.xstep,ci.ystep,ci.zstep) xg = np.arange(0,2047.0) zg = np.arange(0,1535.0) for x in xg: xg[x] = 8*PI/2048*x for z in zg: zg[z] = 3*PI/2048*z vtkxgrid=numpy_support.numpy_to_vtk(xg, deep=True, array_type=vtk.VTK_FLOAT) vtkzgrid=numpy_support.numpy_to_vtk(zg, deep=True, array_type=vtk.VTK_FLOAT) vtkygrid=numpy_support.numpy_to_vtk(ygrid, deep=True, array_type=vtk.VTK_FLOAT) rg.SetXCoordinates(vtkxgrid) rg.SetZCoordinates(vtkzgrid) rg.SetYCoordinates(vtkygrid) image = rg #we rewrite the image since we may be doing a #computation below else: image.SetSpacing(xspacing*ci.xstep,yspacing*ci.ystep,zspacing*ci.zstep) #See if we are doing a computation if (computation == 'vo'): start = time.time() vorticity = vtk.vtkCellDerivatives() vorticity.SetVectorModeToComputeVorticity() vorticity.SetTensorModeToPassTensors() vorticity.SetInputData(image) #print("Computing Vorticity") vorticity.Update() end = time.time() comptime = end-start print("Vorticity Computation time: " + str(comptime) + "s") return image elif (computation == 'cvo' or computation == 'pcvo'): start = time.time() vorticity = vtk.vtkCellDerivatives() vorticity.SetVectorModeToComputeVorticity() vorticity.SetTensorModeToPassTensors() vorticity.SetInputData(image) #print("Computing Voricity") vorticity.Update() vend = time.time() comptime = vend-start print("Vorticity Computation time: " + str(comptime) + "s") mag = vtk.vtkImageMagnitude() cp = vtk.vtkCellDataToPointData() cp.SetInputData(vorticity.GetOutput()) #print("Computing magnitude") cp.Update() mend = time.time() image.GetPointData().SetScalars(cp.GetOutput().GetPointData().GetVectors()) mag.SetInputData(image) mag.Update() comptime = mend-vend print("Magnitude Computation time: " + str(comptime) + "s") c = vtk.vtkContourFilter() c.SetValue(0,ci.threshold) c.SetInputData(mag.GetOutput()) print("Computing Contour with threshold", ci.threshold) c.Update() cend = time.time() comptime = cend-mend print("Contour Computation time: " + str(comptime) + "s") #Now we need to clip out the overlap box = vtk.vtkBox() #set box to requested size #The OCI deepcopy didn't seem to work. Manually taking the overlap again. box.SetBounds(oci.xstart*xspacing, (oci.xstart+oci.xlen)*xspacing, oci.ystart*yspacing, (oci.ystart+oci.ylen)*yspacing, oci.zstart*yspacing,(oci.zstart+oci.zlen)*yspacing) clip = vtk.vtkClipPolyData() clip.SetClipFunction(box) clip.GenerateClippedOutputOn() clip.SetInputData(c.GetOutput()) clip.InsideOutOn() clip.Update() #import pdb;pdb.set_trace() cropdata = clip.GetOutput() #Cleanup image.ReleaseData() #mag.ReleaseData() #box.ReleaseData() #clip.ReleaseData() #image.Delete() #box.Delete() #vorticity.Delete() end = time.time() comptime = end-start print("Total Computation time: " + str(comptime) + "s") #return cropdata #We need the output port for appending, so return the clip instead return clip elif (computation == 'qcc'): start = time.time() q = vtk.vtkGradientFilter() q.SetInputData(image) q.SetInputScalars(image.FIELD_ASSOCIATION_POINTS,"Velocity") q.ComputeQCriterionOn() q.Update() image.GetPointData().SetScalars(q.GetOutput().GetPointData().GetVectors("Q-criterion")) #mag = vtk.vtkImageMagnitude() #mag.SetInputData(image) #mag.Update() mend = time.time() comptime = mend-start #print("Magnitude Computation time: " + str(comptime) + "s") c = vtk.vtkContourFilter() c.SetValue(0,ci.threshold) c.SetInputData(image) print("Computing Contour with threshold", ci.threshold) c.Update() cend = time.time() comptime = cend-mend print("Q Contour Computation time: " + str(comptime) + "s") #clip out the overlap here box = vtk.vtkBox() #set box to requested size box.SetBounds(oci.xstart, oci.xstart+oci.xlen-1, oci.ystart, oci.ystart+oci.ylen-1, oci.zstart,oci.zstart+oci.zlen-1) clip = vtk.vtkClipPolyData() clip.SetClipFunction(box) clip.GenerateClippedOutputOn() clip.SetInputData(c.GetOutput()) clip.InsideOutOn() clip.Update() cropdata = clip.GetOutput() end = time.time() comptime = end-start print("Computation time: " + str(comptime) + "s") #return cropdata return clip else: return image