def loadScene(objFilePath, mtlFilePath): mtlReader = MTLParser(mtlFilePath) mtlReader.reduceMaterialDefinitions() objReader = OBJParser(objFilePath, 'usemtl') meshBaseDirectory = os.path.join(os.path.dirname(objFilePath), os.path.basename(objFilePath)[:-4]) # custom remap mapToSha = {} for key in mtlReader.reducedMaterialMap: mapToSha[key] = mtlReader.reducedMaterialMap[nameRemap(key)] meshMapping = writeMeshes(meshBaseDirectory, objReader, mapToSha) for name in meshMapping: source = simple.OpenDataFile(meshMapping[name], guiName=name) rep = simple.Show(source) mtlReader.applyMaterialToRepresentation(name, rep) with open('%s/representations.json' % meshBaseDirectory, "w", encoding="utf-8") as text_file: text_file.write( json.dumps(mtlReader.representationsParameters, indent=2, sort_keys=True)) simple.Render()
def initialize(self): # Bring used components self.registerVtkWebProtocol( pv_protocols.ParaViewWebStartupRemoteConnection( _FileOpener.dsHost, _FileOpener.dsPort, _FileOpener.rsHost, _FileOpener.rsPort)) self.registerVtkWebProtocol( pv_protocols.ParaViewWebFileListing(_FileOpener.pathToList, "Home")) self.registerVtkWebProtocol(pv_protocols.ParaViewWebMouseHandler()) self.registerVtkWebProtocol(pv_protocols.ParaViewWebViewPort()) self.registerVtkWebProtocol( pv_protocols.ParaViewWebViewPortImageDelivery()) self.registerVtkWebProtocol( pv_protocols.ParaViewWebViewPortGeometryDelivery()) self.registerVtkWebProtocol(pv_protocols.ParaViewWebTimeHandler()) # Update authentication key to use self.updateSecret(_FileOpener.authKey) # Create default pipeline if _FileOpener.fileToLoad: _FileOpener.reader = simple.OpenDataFile(_FileOpener.fileToLoad) simple.Show() _FileOpener.view = simple.Render() _FileOpener.view.ViewSize = [800, 800] # If this is running on a Mac DO NOT use Offscreen Rendering #view.UseOffscreenRendering = 1 simple.ResetCamera() else: _FileOpener.view = simple.GetRenderView() simple.Render() _FileOpener.view.ViewSize = [800, 800] simple.SetActiveView(_FileOpener.view)
def processFile(self): self._download_mesh_file() reader = simple.OpenDataFile(_MeshViewer.meshFile) reader.UpdatePipeline() self.outline = simple.Show(reader) self.outline.Representation = 'Outline' # Get information about cell data arrays nbFaces = 0 cdInfo = reader.GetCellDataInformation() numberOfCellArrays = cdInfo.GetNumberOfArrays() for idx in xrange(numberOfCellArrays): array = cdInfo.GetArray(idx) if array.GetName() != 'modelfaceids': continue nbFaces = int(array.GetRange(-1)[1]) # Extract each face and keep representation around _MeshViewer.faces = [] for idx in range(nbFaces): threshold = simple.Threshold(Scalars=['CELLS', 'modelfaceids'], Input=reader, ThresholdRange=[idx, idx]) rep = simple.Show(threshold) _MeshViewer.faces.append(rep) self.view = simple.Render() self.view.Background = [0, 0, 0]
def testUserInput(): fileName = QtGui.QFileDialog.getOpenFileName(getMainWindow(), 'Open file',) if fileName: smp.OpenDataFile(fileName, guiName=os.path.basename(fileName)) smp.Show() smp.ResetCamera() smp.Render()
def test_grid_vs_anal_L_lfm_grid(res=9): test_re = np.linspace(start=4.0, stop=10.0, num=20) med_error = col.OrderedDict() filename = "test_data/lfm_dipole_test.vts" data = pv.OpenDataFile(filename=filename) fl1 = fl.fieldLine(data=data, start=[2.5, 0.0, 0.0]) for re in test_re: print "Running on L*={}".format(re) fl1.recompute_field_line(new_start_location=[re, 0.0, 0.0]) int_grid = ih.get_lat(fl1.get_location_for_RE(RE=2.5)) int_anal = ih.analytic_dipole_line_lat_crossing(L=re, r=2.5) int_err = np.abs(int_anal - int_grid) med_error[re] = int_err X3, Y3 = ih.dict_to_x_y(med_error) fig1 = plt.figure() ax1 = fig1.add_subplot(111) ax1.semilogy(X3, Y3, 'k.-', label="Error") ax1.set_title( "$\lambda$ Intersection Error Rates for Field Line Tracing (by $L^*$)\nLFM True Double grid Dipole Field" ) ax1.set_xlabel("$L^*$") ax1.set_ylabel("Absolute Error") # ax1.legend(handles=[line1, line2, line3], loc='best') fig1.tight_layout() fig1.savefig("out/error_analysis/Lambda_error_rates_by_re_lfm.pdf")
def getFields(self): if not self.dataTable: # read a data file if '.csv' in self.inputFile: r = vtk.vtkDelimitedTextReader() r.DetectNumericColumnsOn() r.SetFileName(self.inputFile) r.SetHaveHeaders(True) r.Update() self.dataTable = r.GetOutput() else: reader = simple.OpenDataFile(self.inputFile) reader.UpdatePipeline() ds = reader.GetClientSideObject().GetOutputDataObject(0) if ds.IsA('vtkTable'): self.dataTable = ds else: self.dataTable = vtk.vtkTable() fillTableWithDataSet(self.dataTable, ds) self.fields = {} self.columnNames = [] self.numRows = self.dataTable.GetNumberOfRows() for i in range(self.dataTable.GetNumberOfColumns()): # Add a range for any numeric fields self.columnNames.append(self.dataTable.GetColumnName(i)) arr = self.dataTable.GetColumn(i) if arr.GetDataTypeAsString() in NUMERIC_TYPES: self.fields[self.columnNames[i]] = { 'range': list(arr.GetRange()) } return self.fields
def create_reader(files): from paraview import simple reader = simple.OpenDataFile(files) if not reader: raise RuntimeError("Failed to create a suitable reader for files: %s", str(files)) return reader
def openRelativeFile(self, relativePath): fileToLoad = [] if type(relativePath) == list: for file in relativePath: fileToLoad.append(os.path.join(self.baseDir, file)) else: fileToLoad.append(os.path.join(self.baseDir, relativePath)) reader = simple.OpenDataFile(fileToLoad) name = fileToLoad[0].split("/")[-1] if len(name) > 15: name = name[:15] + '*' simple.RenameSource(name, reader) simple.Show() simple.Render() simple.ResetCamera() # Add node to pipeline self.pipeline.addNode('0', reader.GetGlobalIDAsString()) # Create LUT if need be self.lutManager.registerFieldData(reader.GetPointDataInformation()) self.lutManager.registerFieldData(reader.GetCellDataInformation()) return helper.getProxyAsPipelineNode(reader.GetGlobalIDAsString(), self.lutManager)
def sample(dataDir, outputDir): convert(os.path.join(dataDir, 'Data/bot2.wrl'), outputDir, True, True) convert(os.path.join(dataDir, 'Data/can.ex2'), outputDir) convert(os.path.join(dataDir, 'Data/can.ex2'), outputDir, True, True, 'can_MS.ex2') convert(os.path.join(dataDir, 'Data/can.ex2'), outputDir, True, False, 'can_M.ex2') convert(os.path.join(dataDir, 'Data/can.ex2'), outputDir, False, True, 'can_S.ex2') convert(os.path.join(dataDir, 'Data/disk_out_ref.ex2'), outputDir, True, False, 'disk_out_ref_M.ex2') convert(os.path.join(dataDir, 'Data/disk_out_ref.ex2'), outputDir) convert(os.path.join(dataDir, 'Data/RectGrid2.vtk'), outputDir) # Create image data based on the Wavelet source wavelet = simple.Wavelet() wavelet.UpdatePipeline() imageData = wavelet.GetClientSideObject().GetOutputDataObject(0) writeDataSet('Wavelet.vti', imageData, outputDir) # Create a table based on the disk_out_ref diskout = simple.ExtractSurface( simple.MergeBlocks( simple.OpenDataFile(os.path.join(dataDir, 'Data/disk_out_ref.ex2')))) diskout.UpdatePipeline() unstructuredGrid = diskout.GetClientSideObject().GetOutputDataObject(0) table = vtkTable() _nbFields = unstructuredGrid.GetPointData().GetNumberOfArrays() for i in range(_nbFields): table.AddColumn(unstructuredGrid.GetPointData().GetArray(i)) writeDataSet('table', table, outputDir)
def loadData(self): global dataPath mainpath = os.path.join(dataPath, "main") if os.path.isdir(mainpath): files = os.listdir(mainpath) for file in files: fullpath = os.path.join(mainpath, file) if os.path.isfile(fullpath): self.srcObj = simple.OpenDataFile(fullpath) simple.SetActiveSource(self.srcObj) self.rep = simple.GetDisplayProperties() simple.Hide() print 'Loaded %s into scene' % fullpath else: print 'Error: ' + mainpath + ' does not exist\n' raise Exception("The main directory does not exist") surfacespath = os.path.join(dataPath, "surfaces") files = os.listdir(surfacespath) for file in files: fullpath = os.path.join(surfacespath, file) if os.path.isfile(fullpath): self._loadSurfaceWithProperties(fullpath) simple.SetActiveSource(self.srcObj) simple.ResetCamera() simple.Render()
def loadData(self, datafile): """Load a data file. The argument is a path relative to the DataPath pointing to the dataset to load. Returns True if the dataset was loaded successfully, otherwise returns False. If the dataset is loaded, this methods setups the visualization pipelines for interactive probing all loaded datasets. """ datafile = os.path.join(_DataProber.DataPath, datafile) log.msg("Loading data-file", datafile, logLevel=logging.DEBUG) reader = simple.OpenDataFile(datafile) if not reader: return False rep = simple.Show(reader, Representation="Wireframe") probe = simple.PlotOverLine(Source="High Resolution Line Source") item = {} item["Reader"] = reader item["ReaderRepresentation"] = rep item["Probe"] = probe item["name"] = os.path.split(datafile)[1] _DataProber.PipelineObjects.append(item)
def loadFiles(directoryPath): for (dirpath, dirnames, filenames) in os.walk(directoryPath): for filename in filenames: if filename[:-5] == '.json': materialPaths.append(os.path.join(dirpath, filename)) else: simple.OpenDataFile(os.path.join(dirpath, filename), registrationName=filename)
def processFile(self): self._download_mesh_file() self.sideVisibility = [] self.sideNames = [] self.sideObjectValue = [] self.reader = simple.OpenDataFile(_MeshViewer.fileName) domain = self.reader.GetProperty('SideSetArrayStatus').GetDomain( 'array_list') sides = [] for i in range(domain.GetNumberOfStrings()): sideName = domain.GetString(i) self.sideVisibility.append(True) self.sideObjectValue.append(int(sideName.split(': ')[1])) self.sideNames.append(sideName) sides.append(sideName) self.reader.SideSetArrayStatus = sides self.reader.ElementBlocks = [] self.reader.UpdatePipeline() bounds = self.reader.GetDataInformation().GetBounds() box = simple.Box(XLength=(bounds[1] - bounds[0]), YLength=(bounds[3] - bounds[2]), ZLength=(bounds[5] - bounds[4]), Center=[ 0.5 * (bounds[0] + bounds[1]), 0.5 * (bounds[2] + bounds[3]), 0.5 * (bounds[4] + bounds[5]) ]) self.outline = simple.Show(box) self.outline.Representation = 'Outline' # Color/Annotation management annotations = [] self.colors = [] for i in range(domain.GetNumberOfStrings()): annotations.append(str(self.sideObjectValue[i])) annotations.append(self.sideNames[i]) self.colors.append(0.5) self.colors.append(0.5) self.colors.append(0.5) # Color management self.lut = simple.GetColorTransferFunction('ObjectId') self.lut.InterpretValuesAsCategories = 1 self.lut.Annotations = annotations self.lut.IndexedColors = self.colors mainRep = simple.Show(self.reader) vtkSMPVRepresentationProxy.SetScalarColoring(mainRep.SMProxy, 'ObjectId', vtkDataObject.CELL) self.view = simple.Render() self.view.Background = [0, 0, 0]
def processFile(self): self.sideVisibility = [] self.sideNames = [] self.sideObjectValue = [] self.blockVisibility = [] self.blockNames = [] self.blockObjectValue = [] self.reader = simple.OpenDataFile(_MeshViewer.fileName) # Get information about faces and blocks self.sideNames, self.sideValues = self.extractSubset( 'SideSetArrayStatus') self.blockNames, self.blockValues = self.extractSubset('ElementBlocks') # Show faces to start self.reader.SideSetArrayStatus = self.sideNames self.reader.ElementBlocks = [] self.reader.UpdatePipeline() # Set up initial visibilities self.sideVisibility = [True for i in xrange(len(self.sideNames))] self.blockVisibility = [True for i in xrange(len(self.blockNames))] self.showingFaces = True bounds = self.reader.GetDataInformation().GetBounds() box = simple.Box(XLength=(bounds[1] - bounds[0]), YLength=(bounds[3] - bounds[2]), ZLength=(bounds[5] - bounds[4]), Center=[ 0.5 * (bounds[0] + bounds[1]), 0.5 * (bounds[2] + bounds[3]), 0.5 * (bounds[4] + bounds[5]) ]) self.outline = simple.Show(box) self.outline.Representation = 'Outline' # Color/Annotation management self.faceAnnotations, self.faceColors = self.setupInitialColors( self.sideNames, self.sideValues) self.blockAnnotations, self.blockColors = self.setupInitialColors( self.blockNames, self.blockValues) # Color management, start with faces self.lut = simple.GetColorTransferFunction('ObjectId') self.lut.InterpretValuesAsCategories = 1 self.lut.Annotations = self.faceAnnotations self.lut.IndexedColors = self.faceColors mainRep = simple.Show(self.reader) vtkSMPVRepresentationProxy.SetScalarColoring(mainRep.SMProxy, 'ObjectId', vtkDataObject.CELL) self.view = simple.Render() self.view.Background = [0, 0, 0]
def loadScene(objFilePath, mtlFilePath): objReader = OBJParser(objFilePath, 'usemtl') mtlReader = MTLParser(mtlFilePath) meshBaseDirectory = os.path.join(os.path.dirname(objFilePath), 'pv') meshMapping = writeMeshes(meshBaseDirectory, objReader) for name in meshMapping: source = simple.OpenDataFile(meshMapping[name], guiName=name) rep = simple.Show(source) mtlReader.applyMaterialToRepresentation(name, rep) simple.Render()
def importDataset(dataDir, datafile, description, autoApply=True): if not os.path.exists(datafile): print "Data file \"%s\" does not exist" % datafile return basename = os.path.basename(datafile) filedir = os.path.join(dataDir, basename) os.mkdir(filedir) shutil.copyfile(datafile, os.path.join(filedir, basename)) result = { 'name': basename, 'size': humanReadableSize(os.path.getsize(datafile)), 'description': description, 'thumbnails': [], 'autoApply': autoApply, 'data': { 'file': basename, 'bounds': None, 'arrays': [], 'time': [], }, } reader = simple.OpenDataFile(datafile) rep = simple.Show(reader) rep.Visibility = 1 view = simple.Render() view.ViewSize = [400, 400] ds = reader.GetClientSideObject().GetOutputDataObject(0) pointArrayMap = {} cellArrayMap = {} loadArrayDataMultiBlock(ds, pointArrayMap, cellArrayMap) bounds = getBounds(ds) if 'TimestepValues' in reader.ListProperties() and len(reader.TimestepValues) > 0: for idx in range(len(reader.TimestepValues)): t = reader.TimestepValues[idx] result['data']['time'].append({ 'idx': idx, 'value': t }) reader.UpdatePipeline(t) ds = reader.GetClientSideObject().GetOutputDataObject(0) loadArrayDataMultiBlock(ds, pointArrayMap, cellArrayMap) newBounds = getBounds(ds) bounds = unionBounds(bounds, newBounds) result['data']['arrays'] = pointArrayMap.values() + cellArrayMap.values() result['data']['bounds'] = bounds tnpath = os.path.join(filedir, 'thumbnail0.png') simple.SaveScreenshot(tnpath, view) result['thumbnails'].append('thumbnail0.png') with open(os.path.join(filedir, 'index.json'), 'w') as fp: json.dump(result, fp)
def loadFiles(directoryPath): materialPaths = [] for (dirpath, dirnames, filenames) in os.walk(directoryPath): for filename in filenames: if filename == 'representations.json': materialPaths.append(os.path.join(dirpath, filename)) else: simple.OpenDataFile(os.path.join(dirpath, filename), registrationName=filename) updateSourceMapping() for matFile in materialPaths: applyMaterialToRepresentation(matFile)
def calc_surf_to_vol(filename, arr_name, sample_rate): import paraview.simple as ps import numpy as np import paraview as pv from vtk.util.numpy_support import vtk_to_numpy # have paraview open the vtk data file reader = ps.OpenDataFile(filename) sys_data = pv.servermanager.Fetch(reader) nx, ny, nz = sys_data.GetDimensions() dx, dy, dz = sys_data.GetSpacing() # downsample the data (makes for a smoother contour surface) ds = ps.ExtractSubset() ds.SampleRateI = sample_rate ds.SampleRateJ = sample_rate ds.SampleRateK = sample_rate ds.VOI[1] = nx - 1 ds.VOI[3] = ny - 1 ds.VOI[4] = 1 # leave off bottom layer for CHBDThinFilm ds.VOI[5] = nz - 2 # leave off top layer for CHBDThinFilm ds.IncludeBoundary = 1 ds.UpdatePipeline() # have paraview apply a contour surface at a concentration value of cont_val contour = ps.Contour() cont_val = 0.5 # this might change depending on order parameter contour.ContourBy = ['POINTS', arr_name] # Viz is the name of the vtk array contour.Isosurfaces = [cont_val] contour.SetPropertyWithName('ComputeNormals', 0) contour.UpdatePipeline() # integrate the surface area and curvature summed_curv = ps.IntegrateVariables() summed_curv_data = pv.servermanager.Fetch(summed_curv) surf_area = summed_curv_data.GetCellData().GetArray(0).GetValue(0) # calculate the surface area to volume ratio volume = nx * dx * ny * dy * nz * dz surf_to_vol = surf_area / volume # delete paraview sources and filters ps.Delete(reader) ps.Delete(ds) ps.Delete(contour) ps.Delete(summed_curv) del (sys_data) del (summed_curv_data) return surf_to_vol, surf_area, volume
def initialize(self): # Bring used components self.registerVtkWebProtocol( pv_protocols.ParaViewWebStartupRemoteConnection( _FileOpener.dsHost, _FileOpener.dsPort, _FileOpener.rsHost, _FileOpener.rsPort)) self.registerVtkWebProtocol( pv_protocols.ParaViewWebFileListing(_FileOpener.pathToList, "Home")) self.registerVtkWebProtocol(pv_protocols.ParaViewWebMouseHandler()) self.registerVtkWebProtocol(pv_protocols.ParaViewWebViewPort()) self.registerVtkWebProtocol( pv_protocols.ParaViewWebViewPortImageDelivery()) self.registerVtkWebProtocol( pv_protocols.ParaViewWebViewPortGeometryDelivery()) self.registerVtkWebProtocol(pv_protocols.ParaViewWebTimeHandler()) # Update authentication key to use self.updateSecret(_FileOpener.authKey) # Create default pipeline if _FileOpener.fileToLoad: _FileOpener.reader = simple.OpenDataFile(_FileOpener.fileToLoad) simple.Show() _FileOpener.view = simple.Render() _FileOpener.view.ViewSize = [800, 800] # If this is running on a Mac DO NOT use Offscreen Rendering #view.UseOffscreenRendering = 1 simple.ResetCamera() else: _FileOpener.view = simple.GetRenderView() simple.Render() _FileOpener.view.ViewSize = [800, 800] # test to prove server's working # independently from the web components #c = simple.Cone() #c.Resolution = 128 #simple.Hide(c) #p = simple.ProcessIdScalars() #r = simple.Show(p) #a = p.PointData.GetArray('ProcessId') #r.ColorArrayName = 'ProcessId' #simple.AssignLookupTable(a,'Cool to Warm') #simple.Render() #simple.SaveScreenshot('/usr/common/graphics/ParaView/4.2.0-PDACS/data/test.png') simple.SetActiveView(_FileOpener.view)
def openFileFromPath(self, files): fileToLoad = [] number_of_time_steps = 1 if type(files) == list: number_of_time_steps = len(files) for file in files: fileToLoad.append( os.path.join(_PVCatalystManager.dataDir, file)) else: fileToLoad.append(os.path.join(_PVCatalystManager.dataDir, files)) self.time_steps = [i for i in range(number_of_time_steps)] reader = simple.OpenDataFile(fileToLoad) if _PVCatalystManager.pipeline_handler: _PVCatalystManager.pipeline_handler.apply_pipeline( reader, self.time_steps)
def openFile(self, path): reader = simple.OpenDataFile(path) simple.RenameSource( path.split("/")[-1], reader) simple.Show() simple.Render() simple.ResetCamera() # Add node to pipeline self.pipeline.addNode('0', reader.GetGlobalIDAsString()) # Create LUT if need be self.lutManager.registerFieldData(reader.GetPointDataInformation()) self.lutManager.registerFieldData(reader.GetCellDataInformation()) return helper.getProxyAsPipelineNode(reader.GetGlobalIDAsString(), self.lutManager)
def render_vtk(vtkfilename, outputfilename, processing=None): """ It receives the name of a vtk file, it renders it and saves a png image of it. Give it a function `processing` to apply any processing to the paraview view. """ ext = ".png" reader = prvs.OpenDataFile(vtkfilename) renderView1 = prvs.GetActiveViewOrCreate('RenderView') testvtkDisplay = prvs.Show(reader, renderView1) if processing != None: processing(renderView1) prvs.WriteImage(outputfilename + (ext if ext not in outputfilename else ""))
def openFile(self, files): id = "" if _FileOpener.reader: try: simple.Delete(_FileOpener.reader) except: _FileOpener.reader = None try: _FileOpener.reader = simple.OpenDataFile(files) simple.Show() simple.Render() simple.ResetCamera() id = _FileOpener.reader.GetGlobalIDAsString() except: _FileOpener.reader = None return id
def _loadSurfaceWithProperties(self, fullpath): if not fullpath.endswith('.properties'): surfaceObj = simple.OpenDataFile(fullpath) self.surfaces.append(surfaceObj) rep = simple.Show() rep.Representation = 'Surface' # If a corresponding properties file exists, load it in # and apply the properties to the surface if os.path.isfile(fullpath + '.properties'): with open(fullpath + '.properties') as f: lines = f.readlines() for line in lines: (property, value) = line.split(' ', 1) if hasattr(rep, property): value = json.loads(value.strip()) setattr(rep, property, value) else: print 'Skipping invalid property %s' % property print 'Loaded surface %s into scene' % fullpath
def convert(inputFile, outputDir, merge=False, extract=False, newName=None): print(inputFile, outputDir) reader = simple.OpenDataFile(inputFile) activeSource = reader if merge: activeSource = simple.MergeBlocks(activeSource) if extract: activeSource = simple.ExtractSurface(activeSource) activeSource.UpdatePipeline() dataObject = activeSource.GetClientSideObject().GetOutputDataObject(0) if 'TimestepValues' in reader.ListProperties(): if len(reader.TimestepValues) == 0: writeDataSet(inputFile, dataObject, outputDir, newName) else: writeTimeDataSource(inputFile, reader, activeSource, outputDir, newName) else: writeDataSet(inputFile, dataObject, outputDir, newName)
def __init__(self, filename): """ Load the file and display it with default properties. """ # Load the file self.vtk_data = pv.OpenDataFile(filename) # Save applied filters to remove them when not needed anymore to release memory self._filters = [self.vtk_data] # Save a unaltered version of the data self.vtk_data_local = pv.servermanager.Fetch(self.vtk_data) # Init variables # ... for clipping function self.clip_origin = None self.clip_normal = None # ... for point cloud model self.cloud_point_size = 2 # ... for switching between point cloud and surface model self._cycle_view_mode_buffer = False # ... for positioning the models self.origin_node = viz.addGroup() self.surface_node = viz.addGroup(parent=self.origin_node) self.cloud_node = viz.addGroup(parent=self.origin_node) # ... for registering the model parts self.surface_materials = {} self.cloud_materials = {} # Calculate the center of the data to move the model later to origin # bounding_box indices are found out by reading test data and compare it to ParaView standalone bounding_box = self.vtk_data_local.GetBounds() self._original_center = ((bounding_box[0] + bounding_box[1]) / 2, (bounding_box[2] + bounding_box[3]) / 2, (bounding_box[4] + bounding_box[5]) / 2) # Generate the models form the vtk data self.reload_models()
if len(sys.argv) < 2: if rank == 0: print("ERROR: must pass in a set of files to read in") sys.exit(1) files = glob.glob(sys.argv[1]) # In case the filenames aren't padded we sort first by shorter length and then # alphabetically. This is a slight modification based on the question by Adrian and answer by # Jochen Ritzel at: # https://stackoverflow.com/questions/4659524/how-to-sort-by-length-of-string-followed-by-alphabetical-order files.sort(key=lambda item: (len(item), item)) if rank == 0: print("Reading in ", files) reader = pvsimple.OpenDataFile(files) if pm.GetSymmetricMPIMode() == False and nranks > 1: if rank == 0: print( "ERROR: must run pvbatch with -sym when running with more than a single MPI process" ) sys.exit(1) catalyst = vtkPVCatalyst.vtkCPProcessor() # We don't need to initialize Catalyst since we run from pvbatch # with the -sym argument which acts exactly like we're running # Catalyst from a simulation code. #catalyst.Initialize() for script in sys.argv[2:]:
# Purpose: # Extracts test information from a noon and midnight field line, plots the data, and exports values # as ASCII import matplotlib.pylab as pl import numpy as np import paraview.simple as pv from astropy.io import ascii from prototype import fieldLine as fl from ghostpy.prototype import inv_common as ih pv._DisableFirstRenderCameraReset() # Load Test Data t96_128 = pv.OpenDataFile('test_data/lfm_dipole_test.vts') start_positions = [(-5.5, 0, 0), (-5.25, 0, 0), (-5.00, 0.0, 0.0), (-4.50, 0.0, 0.0), (-4.0, 0.0, 0.0), (-3.5, 0.0, 0.0), (-3.0, 0.0, 0.0), (3.0, 0.0, 0.0), (3.5, 0.0, 0.0), (4.0, 0.0, 0.0), (4.5, 0.0, 0.0), (5.0, 0.0, 0.0), (5.5, 0.0, 0.0), (6.0, 0.0, 0.0), (6.5, 0.0, 0.0), (6.7, 0.0, 0.0), (6.9, 0.0, 0.0), (7.0, 0.0, 0.0)] flines = {} i_integral = {} dsp = {} fwd = {} bkwd = {} min_sphr = {} min_sphr_disp = {} contours = {}
"preset": "yellow2brown", "range": [34, 38] } } } # ----------------------------------------------------------------------------- from paraview import simple from paraview.web.dataset_builder import * # ----------------------------------------------------------------------------- # Pipeline creation # ----------------------------------------------------------------------------- core = simple.OpenDataFile(earthCore) coreSurface = simple.ExtractSurface(Input=core) coreWithNormals = simple.GenerateSurfaceNormals(Input=coreSurface) reader = simple.OpenDataFile(inputFile % time[0]) reader.CellArrayStatus = ['temperature', 'salinity'] dataCleanUp = simple.Threshold(Input=reader, Scalars=['CELLS', 'temperature'], ThresholdRange=[-1000.0, 50.0]) dataToPoints = simple.CellDatatoPointData(Input=dataCleanUp) sceneDescription = { 'size': [500, 500], 'light': ['intensity', 'normal'], 'camera': {
from paraview import simple from numpy import genfromtxt import numpy as np import os import datetime path_debug = os.path.dirname( os.path.abspath(__file__)) # the file should be in /debug/! reader = simple.OpenDataFile( os.path.join(path_debug, "../../srcLB/debug/REFERENCE_SOLUTION.50.vtk")) readerRef = simple.OpenDataFile( os.path.join(path_debug, "../pv_files/project_combined.50.pvts")) fileREF = "REF_SOLUTION.csv" fileCUR = "CURRENT_SOLUTION.csv" writer = simple.CreateWriter(os.path.join(path_debug, fileREF), readerRef, Precision=16) writer.WriteAllTimeSteps = 0 writer.FieldAssociation = "Cells" writer.UpdatePipeline() writer = simple.CreateWriter(os.path.join(path_debug, fileCUR), reader, Precision=16) writer.WriteAllTimeSteps = 0 writer.FieldAssociation = "Cells" writer.UpdatePipeline()