def deleteDownstream(input=None): """Delete downstream filters for a given input. If no input provided, all filters on the pipeline will be deleted. Args: input (str): The name of the object on the pipeline to preserve. """ import paraview.simple as pvs if input is None: # The below snippet deletes all Filters on the pipeline #- i.e. deletes anything that has an input #- preserves readers and sources for f in pvs.GetSources().values(): if f.GetProperty("Input") is not None: pvs.Delete(f) else: # Be able to specify upstream source src = pvs.FindSource(input) #print('src: ', src) # Delete ALL things downstream of input for f in pvs.GetSources().values(): #print('f: ', f) #print('f.Input: ', f.GetProperty("Input")) if f.GetPropertyValue("Input") is src: #print('Deleting: ', f) pvs.Delete(f) # Done return None
def deleteAllTemporalTransformsAxes(): namesToDelete = [ getCalculatorName("RX"), getCalculatorName("RY"), getCalculatorName("RZ") ] sToDelete = [] for s in smp.GetSources(): if s[0] in namesToDelete: smp.Delete(smp.GetSources()[s]) smp.Render()
def export_contours_geometry(destinationPath, **kwargs): view = simple.GetRenderView() sceneDescription = {'scene': []} for key, value in py2to3.iteritems(simple.GetSources()): if key[0] == 'Contour': add_scene_item(sceneDescription, key[0], value, view) count = 1 for item in sceneDescription['scene']: item['name'] += ' (%d)' % count count += 1 if count > 1: contour = sceneDescription['scene'][0]['source'] sections = {'LookupTables': get_source_lookuptable_section(contour)} # Create geometry Builder dsb = VTKGeometryDataSetBuilder(destinationPath, sceneDescription, {}, sections) dsb.start() dsb.writeData(0) dsb.stop() # Patch data range patch_data_range(destinationPath) else: print('Can not export Contour(s) geometry without at least a Contour.')
def hideAll(): """This hides all sources/filters on the pipeline from the current view """ import paraview.simple as pvs for f in pvs.GetSources().values(): pvs.Hide(f) return None
def export_layers(destinationPath, camera): view = simple.GetRenderView() fp = tuple(view.CameraFocalPoint) cp = tuple(view.CameraPosition) vu = tuple(view.CameraViewUp) sceneDescription = { 'size': tuple(view.ViewSize), 'light': ['intensity'], # 'normal', intensity 'camera': { 'CameraViewUp': vu, 'CameraPosition': cp, 'CameraFocalPoint': fp }, 'scene': [] } for key, value in py2to3.iteritems(simple.GetSources()): add_scene_item(sceneDescription, key[0], value, view) # Generate export dsb = CompositeDataSetBuilder(destinationPath, sceneDescription, camera, {}, {}, view) dsb.start() dsb.writeData() dsb.stop(compress=False)
def getComponentName(actor): global componentIndex srcs = simple.GetSources() duplicates = {} for key, val in srcs.items(): # Prevent name duplication nameToUse = key[0] if nameToUse in duplicates: count = 1 newName = '%s (%d)' % (nameToUse, count) while newName in duplicates: count += 1 newName = '%s (%d)' % (nameToUse, count) nameToUse = newName duplicates[nameToUse] = True actorRep = simple.GetRepresentation( val).GetClientSideObject().GetActiveRepresentation().GetActor() if actor == actorRep: return nameToUse nameToUse = '%d' % componentIndex componentIndex += 1 return nameToUse
def getComponentName(actor): global componentIndex srcs = simple.GetSources() duplicates = {} errs = [] for key, val in srcs.items(): # Prevent name duplication nameToUse = key[0] if nameToUse in duplicates: count = 1 newName = '%s (%d)' % (nameToUse, count) while newName in duplicates: count += 1 newName = '%s (%d)' % (nameToUse, count) nameToUse = newName duplicates[nameToUse] = True try: actorRep = simple.GetRepresentation(val).GetClientSideObject().GetActiveRepresentation().GetActor() if actor == actorRep: return nameToUse except AttributeError as err: errs.append(err) #print("Handling error: ", err) nameToUse = '%d' % componentIndex componentIndex += 1 return nameToUse
def get_contour(): for key, value in py2to3.iteritems(simple.GetSources()): if 'FlyingEdges' in key[0]: return value if 'Contour' in key[0]: return value return None
def deleteFilters(input=None): import paraview.simple as pvs """if input is not None: src = pvs.FindSource(dataNm)""" #TODO: be able to specify upstream source for f in pvs.GetSources().values(): if f.GetProperty("Input") is not None: pvs.Delete(f) return None
def DoLiveVisualization(self, datadescription, hostname, port): """This method execute the code-stub needed to communicate with ParaView for live-visualization. Call this method only if you want to support live-visualization with your co-processing module.""" if not self.__EnableLiveVisualization: return if not self.__LiveVisualizationLink and self.__EnableLiveVisualization: # Create the vtkLiveInsituLink i.e. the "link" to the visualization processes. self.__LiveVisualizationLink = servermanager.vtkLiveInsituLink() # Tell vtkLiveInsituLink what host/port must it connect to # for the visualization process. self.__LiveVisualizationLink.SetHostname(hostname) self.__LiveVisualizationLink.SetInsituPort(int(port)) # Initialize the "link" self.__LiveVisualizationLink.Initialize( servermanager.ActiveConnection.Session.GetSessionProxyManager( )) if self.__EnableLiveVisualization and self.NeedToOutput( datadescription, self.__LiveVisualizationFrequency): if not self.__LiveVisualizationLink.Initialize( servermanager.ActiveConnection.Session. GetSessionProxyManager()): return time = datadescription.GetTime() timeStep = datadescription.GetTimeStep() # stay in the loop while the simulation is paused while True: # Update the simulation state, extracts and simulationPaused # from ParaView Live self.__LiveVisualizationLink.InsituUpdate(time, timeStep) # sources need to be updated by insitu # code. vtkLiveInsituLink never updates the pipeline, it # simply uses the data available at the end of the # pipeline, if any. for source in simple.GetSources().values(): source.UpdatePipeline(time) # push extracts to the visualization process. self.__LiveVisualizationLink.InsituPostProcess(time, timeStep) if (self.__LiveVisualizationLink.GetSimulationPaused()): # This blocks until something changes on ParaView Live # and then it continues the loop. Returns != 0 if LIVE side # disconnects if (self.__LiveVisualizationLink.WaitForLiveChange()): break else: break
def runCode(self, text): log.warn(text) for source in simple.GetSources().values(): simple.Hide(source) simple.ResetSession() # temporary highly insecure krak.object_registry = {} exec(text)
def updateSourceMapping(): for source in simple.GetSources(): name = source[0] if '.vtp' in name: name = name[:-4] name = str(name) if name not in proxyMapping: proxyMapping[name] = [] proxy = simple.servermanager._getPyProxy( simple.servermanager.ActiveConnection.Session.GetRemoteObject( int(source[1]))) proxyMapping[name].append(proxy)
def export_contour_exploration_geometry(destinationPath, **kwargs): values = [int(v) for v in kwargs['multiValue'].split(',')] contour = None for key, value in py2to3.iteritems(simple.GetSources()): if key[0] == 'Contour': contour = value if contour: sections = {'LookupTables': get_source_lookuptable_section(contour)} scalarName = simple.GetRepresentation(contour).ColorArrayName[1] originalValue = [v for v in contour.Value] sceneDescription = { 'scene': [{ 'name': 'Contour', 'source': contour, 'colors': { scalarName: { 'constant': 0, 'location': 'POINT_DATA' } } }] } dsb = VTKGeometryDataSetBuilder(destinationPath, sceneDescription, {}, sections) dsb.getDataHandler().registerArgument(priority=1, name='contour', values=values, ui='slider', loop='modulo') dsb.start() scalarContainer = sceneDescription['scene'][0]['colors'][scalarName] for contourValue in dsb.getDataHandler().contour: contour.Value = [contourValue] scalarContainer['constant'] = contourValue dsb.writeData() dsb.stop() # Patch data range patch_data_range(destinationPath) # Reset to original state contour.Value = originalValue else: print('Can not export Contour geometry without Contour(s)')
def getAllNames(): actorNameMapping = {} srcs = simple.GetSources() duplicates = {} for key, val in srcs.items(): # Prevent name duplication nameToUse = key[0] if nameToUse in duplicates: count = 1 newName = '%s (%d)' % (nameToUse, count) while newName in duplicates: count += 1 newName = '%s (%d)' % (nameToUse, count) nameToUse = newName duplicates[nameToUse] = True actorRep = simple.GetRepresentation(val).GetClientSideObject().GetActiveRepresentation().GetActor() actorNameMapping[nameToUse] = actorRep return actorNameMapping
def scaleAxis(axis, scale): """Use to scale an axis visually""" import paraview.simple as pvs sc = [1, 1, 1] # Default Scale sc[axis] = scale for f in pvs.GetSources().values(): # get active view rv = pvs.GetActiveViewOrCreate('RenderView') # get display properties disp = pvs.GetDisplayProperties(f, view=rv) # Set the scale for the data axis disp.Scale = sc disp.DataAxesGrid.Scale = sc disp.PolarAxes.Scale = sc # Update the view pvs.RenderAllViews() pvs.ResetCamera() return None
def generateSceneName(): srcs = simple.GetSources() nameParts = [] for key, val in srcs.items(): proxyGroup = val.SMProxy.GetXMLGroup() if 'sources' in proxyGroup: nameParts.append(key[0]) fileName = '-'.join(nameParts) sceneName = '%s' % fileName counter = 0 while os.path.isfile( os.path.join(ROOT_OUTPUT_DIRECTORY, '%s%s' % (sceneName, FILENAME_EXTENSION))): counter += 1 sceneName = '%s (%d)' % (fileName, counter) return sceneName
def generateSceneName(): srcs = simple.GetSources() nameParts = [] for key, val in srcs.items(): proxyGroup = val.SMProxy.GetXMLGroup() if 'sources' in proxyGroup: nameParts.append(key[0]) fileName = '-'.join(nameParts) # limit to a reasonable length characters fileName = fileName[:12] if len(fileName) > 15 else fileName if len(fileName) == 0: fileName = 'SceneExport' sceneName = '%s' % fileName counter = 0 while os.path.isfile(os.path.join(ROOT_OUTPUT_DIRECTORY, '%s%s' % (sceneName, FILENAME_EXTENSION))): counter += 1 sceneName = '%s (%d)' % (fileName, counter) return sceneName
def getAllNames(): actorNameMapping = {} srcs = simple.GetSources() duplicates = {} for key, val in srcs.items(): # Prevent name duplication nameToUse = key[0] if nameToUse in duplicates: count = 1 newName = "%s (%d)" % (nameToUse, count) while newName in duplicates: count += 1 newName = "%s (%d)" % (nameToUse, count) nameToUse = newName duplicates[nameToUse] = True representation = simple.GetRepresentation(val) if representation: vtkRepInstance = representation.GetClientSideObject() if "GetActiveRepresentation" in dir(vtkRepInstance): actorRep = vtkRepInstance.GetActiveRepresentation().GetActor() actorNameMapping[nameToUse] = actorRep return actorNameMapping
def DoLiveVisualization(self, datadescription, hostname, port): """This method execute the code-stub needed to communicate with ParaView for live-visualization. Call this method only if you want to support live-visualization with your co-processing module.""" if not self.__EnableLiveVisualization: return # make sure the live insitu is initialized if not self.__LiveVisualizationLink: # Create the vtkLiveInsituLink i.e. the "link" to the visualization processes. from paraview import servermanager self.__LiveVisualizationLink = servermanager.vtkLiveInsituLink() # Tell vtkLiveInsituLink what host/port must it connect to for the visualization # process. self.__LiveVisualizationLink.SetHostname(hostname) self.__LiveVisualizationLink.SetInsituPort(int(port)) # Initialize the "link" self.__LiveVisualizationLink.SimulationInitialize( servermanager.ActiveConnection.Session.GetSessionProxyManager( )) time = datadescription.GetTime() # For every new timestep, update the simulation state before proceeding. self.__LiveVisualizationLink.SimulationUpdate(time) # sources need to be updated by insitu code. vtkLiveInsituLink never updates # the pipeline, it simply uses the data available at the end of the pipeline, # if any. from paraview import simple for source in simple.GetSources().values(): source.UpdatePipeline(time) # push extracts to the visualization process. self.__LiveVisualizationLink.SimulationPostProcess(time)
def get_state( propertiesToTraceOnCreate=1, # sm.vtkSMTrace.RECORD_MODIFIED_PROPERTIES, skipHiddenRepresentations=True, source_set=[], filter=None, raw=False): """Returns the state string""" if sm.vtkSMTrace.GetActiveTracer(): raise RuntimeError( "Cannot generate Python state when tracing is active.") if filter is None: filter = visible_representations( ) if skipHiddenRepresentations else supported_proxies() # build a set of proxies of interest if source_set: start_set = source_set else: # if nothing is specified, we save all views and sources. start_set = [x for x in simple.GetSources().values() ] + simple.GetViews() start_set = [x for x in start_set if filter(x)] # now, locate dependencies for the start_set, pruning irrelevant branches consumers = set(start_set) for proxy in start_set: get_consumers(proxy, filter, consumers) producers = set() for proxy in consumers: get_producers(proxy, filter, producers) # proxies_of_interest is set of all proxies that we should trace. proxies_of_interest = producers.union(consumers) #print ("proxies_of_interest", proxies_of_interest) trace_config = smtrace.start_trace() # this ensures that lookup tables/scalar bars etc. are fully traced. trace_config.SetFullyTraceSupplementalProxies(True) trace = smtrace.TraceOutput() trace.append("# state file generated using %s" % simple.GetParaViewSourceVersion()) #-------------------------------------------------------------------------- # First, we trace the views and layouts, if any. # TODO: add support for layouts. views = [ x for x in proxies_of_interest if smtrace.Trace.get_registered_name(x, "views") ] if views: # sort views by their names, so the state has some structure to it. views = sorted(views, key=lambda x:\ smtrace.Trace.get_registered_name(x, "views")) trace.append_separated([\ "# ----------------------------------------------------------------", "# setup views used in the visualization", "# ----------------------------------------------------------------"]) for view in views: # FIXME: save view camera positions and size. traceitem = smtrace.RegisterViewProxy(view) traceitem.finalize() del traceitem trace.append_separated( smtrace.get_current_trace_output_and_reset(raw=True)) trace.append_separated([\ "# ----------------------------------------------------------------", "# restore active view", "SetActiveView(%s)" % smtrace.Trace.get_accessor(simple.GetActiveView()), "# ----------------------------------------------------------------"]) #-------------------------------------------------------------------------- # Next, trace data processing pipelines. sorted_proxies_of_interest = __toposort(proxies_of_interest) sorted_sources = [x for x in sorted_proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "sources")] if sorted_sources: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup the data processing pipelines", "# ----------------------------------------------------------------"]) for source in sorted_sources: traceitem = smtrace.RegisterPipelineProxy(source) traceitem.finalize() del traceitem trace.append_separated( smtrace.get_current_trace_output_and_reset(raw=True)) #-------------------------------------------------------------------------- # Can't decide if the representations should be saved with the pipeline # objects or afterwords, opting for afterwords for now since the topological # sort doesn't guarantee that the representations will follow their sources # anyways. sorted_representations = [x for x in sorted_proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "representations")] scalarbar_representations = [x for x in sorted_proxies_of_interest\ if smtrace.Trace.get_registered_name(x, "scalar_bars")] # print ("sorted_representations", sorted_representations) # print ("scalarbar_representations", scalarbar_representations) if sorted_representations or scalarbar_representations: for view in views: view_representations = [ x for x in view.Representations if x in sorted_representations ] view_scalarbars = [ x for x in view.Representations if x in scalarbar_representations ] if view_representations or view_scalarbars: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup the visualization in view '%s'" % smtrace.Trace.get_accessor(view), "# ----------------------------------------------------------------"]) for rep in view_representations: try: producer = rep.Input port = rep.Input.Port traceitem = smtrace.Show( producer, port, view, rep, comment="show data from %s" % smtrace.Trace.get_accessor(producer)) traceitem.finalize() del traceitem trace.append_separated( smtrace.get_current_trace_output_and_reset(raw=True)) if rep.UseSeparateColorMap: trace.append_separated([\ "# set separate color map", "%s.UseSeparateColorMap = True" % (\ smtrace.Trace.get_accessor(rep))]) except AttributeError: pass # save the scalar bar properties themselves. if view_scalarbars: trace.append_separated( "# setup the color legend parameters for each legend in this view" ) for rep in view_scalarbars: smtrace.Trace.get_accessor(rep) trace.append_separated( smtrace.get_current_trace_output_and_reset(raw=True)) trace.append_separated([\ "# set color bar visibility", "%s.Visibility = %s" % (\ smtrace.Trace.get_accessor(rep), rep.Visibility)]) for rep in view_representations: try: producer = rep.Input port = rep.Input.Port if rep.IsScalarBarVisible(view): # FIXME: this will save this multiple times, right now, # if two representations use the same LUT. trace.append_separated([\ "# show color legend", "%s.SetScalarBarVisibility(%s, True)" % (\ smtrace.Trace.get_accessor(rep), smtrace.Trace.get_accessor(view))]) if not rep.Visibility: traceitem = smtrace.Hide(producer, port, view) traceitem.finalize() del traceitem trace.append_separated( smtrace.get_current_trace_output_and_reset( raw=True)) except AttributeError: pass #-------------------------------------------------------------------------- # Now, trace the transfer functions (color maps and opacity maps) used. ctfs = set([x for x in proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "lookup_tables")]) if ctfs: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup color maps and opacity mapes used in the visualization", "# note: the Get..() functions create a new object, if needed", "# ----------------------------------------------------------------"]) for ctf in ctfs: smtrace.Trace.get_accessor(ctf) if ctf.ScalarOpacityFunction in proxies_of_interest: smtrace.Trace.get_accessor(ctf.ScalarOpacityFunction) trace.append_separated( smtrace.get_current_trace_output_and_reset(raw=True)) # restore the active source since the order in which the pipeline is created # in the state file can end up changing the active source to be different # than what it was when the state is being saved. trace.append_separated([\ "# ----------------------------------------------------------------", "# finally, restore active source", "SetActiveSource(%s)" % smtrace.Trace.get_accessor(simple.GetActiveSource()), "# ----------------------------------------------------------------"]) del trace_config smtrace.stop_trace() #print (trace) return str(trace) if not raw else trace.raw_data()
def RequestData(): # R.1.2018.354 import sys sys.path.insert(0, "EMC_SRC_PATH") from operator import itemgetter from datetime import datetime import numpy as np from vtk.numpy_interface import dataset_adapter as dsa from vtk.util import numpy_support import IrisEMC_Paraview_Lib as lib import paraview.simple as simple views = simple.GetViews(viewtype="SpreadSheetView") if len(views) > 0: # set active view view = simple.SetActiveView(views[0]) else: view = simple.GetActiveView() layout = simple.GetLayout(view) location_id = layout.SplitViewVertical(view=view, fraction=0.7) myId = simple.GetActiveSource().Input.GetGlobalIDAsString() proxies = simple.GetSources() proxyList = [] for key in proxies: list_elt = dict() list_elt['name'] = key[0] list_elt['id'] = key[1] proxy = proxies[key] parent_id = '0' if hasattr(proxy, 'Input'): parent_id = proxy.Input.GetGlobalIDAsString() list_elt['parent'] = parent_id proxyList.append(list_elt) pdi = self.GetInput() # VTK PolyData Type try: np = pdi.GetNumberOfPoints() except Exception: raise Exception('Invalid input!') na = pdi.GetPointData().GetNumberOfArrays() val_arrays = [] for i in range(na): val_arrays.append(pdi.GetPointData().GetArray(i)) latitude = {} longitude = {} value = {} depth = {} pdo = self.GetOutput() # VTK Table Type poly_data = vtk.vtkPolyData() data_points = vtk.vtkPoints() if len(Label.strip()) <= 0: pid = simple.GetActiveSource().Input.GetGlobalIDAsString() proxies = simple.GetSources() for key in proxies: if key[1] == pid: Label = " ".join(["Coordinates:", key[0]]) break for i in range(np): point = pdi.GetPoints().GetPoint(i) (lat, lon, this_depth) = lib.xyz2llz(point[0], point[1], point[2]) data_points.InsertNextPoint((lat, lon, this_depth)) key = "%0.2f" % this_depth if key not in list(latitude.keys()): latitude[key] = [] longitude[key] = [] value[key] = [] # need to control precision to have a reasonable sort order # note that these coordinates are recomputed if key not in list(depth.keys()): depth[key] = float('%0.4f' % this_depth) latitude[key].append(float('%0.4f' % lat)) longitude[key].append(float('%0.4f' % lon)) value_array = [] for j in range(na): value_array.append(float(val_arrays[j].GetTuple1(i))) value[key].append(value_array) # store boundary metadata field_data = poly_data.GetFieldData() field_data.AllocateArrays(5) # number of fields depth_data = vtk.vtkFloatArray() depth_data.SetName('depth') lat_data = vtk.vtkFloatArray() lat_data.SetName('latitude') lon_data = vtk.vtkFloatArray() lon_data.SetName('longitude') val_data = [] for j in range(na): val_data.append(vtk.vtkFloatArray()) val_data[j].SetName('value(%s)' % pdi.GetPointData().GetArray(j).GetName()) depth_keys = list(latitude.keys()) for i in range(len(depth_keys)): depth_key = depth_keys[i] lon_list = longitude[depth_key] lat_list = latitude[depth_key] val_list = value[depth_key] point_list = list(zip(lat_list, lon_list, val_list)) point_list.sort(key=itemgetter(0, 1)) for index, data in enumerate(point_list): depth_data.InsertNextValue(float(depth[depth_key])) lat_data.InsertNextValue(float(data[0])) lon_data.InsertNextValue(float(data[1])) for k in range(na): point_data = data[2] val_data[k].InsertNextValue(point_data[k]) field_data.AddArray(lat_data) field_data.AddArray(lon_data) field_data.AddArray(depth_data) for j in range(na): field_data.AddArray(val_data[j]) if len(Label.strip()) > 0: simple.RenameSource(Label) pdo.SetFieldData(field_data)
def RequestData(): # R.0.2018.080 import sys sys.path.insert(0, "EMC_SRC_PATH") from datetime import datetime import numpy as np from vtk.numpy_interface import dataset_adapter as dsa from vtk.util import numpy_support import IrisEMC_Paraview_Lib as lib import paraview.simple as simple views = simple.GetViews(viewtype="SpreadSheetView") if len(views) > 0: simple.Delete(views[0]) else: view = simple.GetActiveView() layout = simple.GetLayout(view) locationId = layout.SplitViewVertical(view=view ,fraction=0.7) myId = simple.GetActiveSource().Input.GetGlobalIDAsString() proxies = simple.GetSources() proxyList = [] for key in proxies: listElt = {} listElt['name'] = key[0] listElt['id'] = key[1] proxy = proxies[key] parentId = '0' if hasattr(proxy, 'Input'): parentId = proxy.Input.GetGlobalIDAsString() listElt['parent'] = parentId proxyList.append(listElt) pdi = self.GetInput() # VTK PolyData Type np = pdi.GetNumberOfPoints() depthMin = 9999999999999.0 depthMax = -9999999999999.0 latitude = {} longitude = {} pdo = self.GetOutput() # VTK Table Type polyData = vtk.vtkPolyData() dataPoints = vtk.vtkPoints() if len(Label.strip()) <= 0: pid = simple.GetActiveSource().Input.GetGlobalIDAsString() proxies = simple.GetSources() for key in proxies: if key[1] == pid: Label = " ".join(["Coordinates View:",key[0]]) break for i in range(np): point = pdi.GetPoints().GetPoint(i) (lat,lon,depth) = lib.xyz2llz(point[0],point[1],point[2]) dataPoints.InsertNextPoint((lat,lon,depth)) key = "%0.1f"%(depth) if depthMin >= float(key): depthMin = float(key) depthMinKey = key if depthMax <= float(key): depthMax = float(key) depthMaxKey = key if key not in latitude.keys(): latitude[key] =[] longitude[key] = [] latitude[key].append(float("%0.1f"%(lat))) longitude[key].append(float("%0.1f"%(lon))) # store boundary metadata fieldData = polyData.GetFieldData() fieldData.AllocateArrays(3) # number of fields depthData = vtk.vtkStringArray() depthData.SetName('Depth\n(km)') data = vtk.vtkStringArray() data.SetName('Corners (lat,lon)\n(degrees)') depthKeys = [depthMinKey,depthMaxKey] if depthMinKey == depthMaxKey: depthKeys = [depthMinKey] for i in range(len(depthKeys)): depthKey = depthKeys[i] borderLat = [] borderLon = [] oldMin = 999999999.0 oldMax = -99999999.0 lonList = list(set(sorted(longitude[depthKey]))) for j in range(len(lonList)): lon = lonList[j] minVal = 999999999.0 maxVal = -99999999.0 for i in range(len(longitude[depthKey])): if longitude[depthKey][i] == lon: if latitude[depthKey][i] > maxVal: maxVal = latitude[depthKey][i] if latitude[depthKey][i] < minVal: minVal = latitude[depthKey][i] if oldMin != minVal or j==len(lonList)-1: if abs(oldMin) < 9999.0: borderLat.append(oldMin) borderLon.append(lon) borderLat.append(minVal) borderLon.append(lon) oldMin = minVal if oldMax != maxVal or j==len(lonList)-1: if abs(oldMax) < 9999.0: borderLat.append(oldMax) borderLon.append(lon) borderLat.append(maxVal) borderLon.append(lon) oldMax = maxVal borderList = zip(borderLat, borderLon) borderList.sort() borderList = list(set(borderList)) min1 = borderList[0][0] max1 = borderList[0][0] for i in range(len(borderList)): if borderList[i][0] < min1: min1 = borderList[i][0] if borderList[i][0] > max1: max1 = borderList[i][0] minList = [] maxList = [] for i in range(len(borderList)): if borderList[i][0] == min1: minList.append(borderList[i][1]) if borderList[i][0] == max1: maxList.append(borderList[i][1]) depthData.InsertNextValue(depthKey) data.InsertNextValue("%0.1f, %0.1f"%(min1,min(minList))) if min(minList) != max(minList): depthData.InsertNextValue(" ") data.InsertNextValue("%0.1f, %0.1f"%(min1,max(minList))) depthData.InsertNextValue(" ") data.InsertNextValue("%0.1f, %0.1f"%(max1,max(maxList))) if min(maxList) != max(maxList): depthData.InsertNextValue(" ") data.InsertNextValue("%0.1f, %0.1f"%(max1,min(maxList))) fieldData.AddArray(data) fieldData.AddArray(depthData) if len(Label.strip()) > 0: simple.RenameSource(Label) pdo.SetFieldData(fieldData)
def Save_PV_data_to_picture_file(inputFileName, field_name, node_or_cell,outputFileName ): pvs._DisableFirstRenderCameraReset() #pvs.HideAll(view=None)#Not available in paraview 5.1.2 view = pvs.GetActiveView() sources = pvs.GetSources().values() for aSource in sources: pvs.Hide(aSource, view) # create a new 'XML Unstructured Grid Reader' reader = pvs.XMLUnstructuredGridReader(FileName=[inputFileName]) if node_or_cell== 'CELLS': reader.CellArrayStatus = [field_name] elif node_or_cell== 'NODES': reader.PointArrayStatus = [field_name] else: raise ValueError("unknown type : should be CELLS or NODES") # get active view renderView1 = pvs.GetActiveViewOrCreate('RenderView') # uncomment following to set a specific view size # renderView1.ViewSize = [1057, 499] # show data in view display = pvs.Show(reader, renderView1); # trace defaults for the display properties. display.ColorArrayName = [None, ''] display.GlyphType = 'Arrow' display.ScalarOpacityUnitDistance = 0.02234159571242408 # reset view to fit data renderView1.ResetCamera() # set scalar coloring if node_or_cell== 'CELLS': pvs.ColorBy(display, ('CELLS', field_name)) elif node_or_cell== 'NODES': pvs.ColorBy(display, ('POINTS', field_name)) else: raise ValueError("unknown type : should be CELLS or NODES") # rescale color and/or opacity maps used to include current data range display.RescaleTransferFunctionToDataRange(True) # show color bar/color legend display.SetScalarBarVisibility(renderView1, True) pvs.SaveScreenshot(outputFileName+".png", magnification=1, quality=100, view=renderView1) display.SetScalarBarVisibility(renderView1, False) if field_name=='Velocity' : #pvs.HideAll(view=None)#Not available in paraview 5.1.2 view = pvs.GetActiveView() sources = pvs.GetSources().values() for aSource in sources: pvs.Hide(aSource, view) # create a new 'Stream Tracer' streamTracer1 = pvs.StreamTracer(Input=reader, SeedType='Point Source') streamTracer1.Vectors = ['CELLS', 'Velocity'] # init the 'Point Source' selected for 'SeedType' streamTracer1.SeedType.Center = [0.5, 0.5, 0.0] streamTracer1.SeedType.Radius = 0.0 # Properties modified on streamTracer1 streamTracer1.SeedType = 'High Resolution Line Source' # Properties modified on streamTracer1.SeedType streamTracer1.SeedType.Point1 = [0.0, 0.0, 0.0] streamTracer1.SeedType.Point2 = [1.0, 1.0, 0.0] streamTracer1.SeedType.Resolution = 20# Pb : claims attribute Resolution does not exist # show data in view streamTracer1Display = pvs.Show(streamTracer1, renderView1) # create a new 'Stream Tracer' streamTracer2 = pvs.StreamTracer(Input=reader, SeedType='Point Source') streamTracer2.Vectors = ['CELLS', 'Velocity'] # init the 'Point Source' selected for 'SeedType' streamTracer2.SeedType.Center = [0.5, 0.5, 0.0] streamTracer2.SeedType.Radius = 0.0 # Properties modified on streamTracer2 streamTracer2.SeedType = 'High Resolution Line Source' # Properties modified on streamTracer2.SeedType streamTracer2.SeedType.Point1 = [0.0, 1.0, 0.0] streamTracer2.SeedType.Point2 = [1.0, 0.0, 0.0] streamTracer2.SeedType.Resolution = 25# Pb : claims attribute Resolution does not exist # show data in view streamTracer2Display = pvs.Show(streamTracer2, renderView1) pvs.SaveScreenshot(outputFileName+"_streamlines.png", magnification=1, quality=100, view=renderView1) pvs.Delete()
def get_trivial_producer(): for key, value in py2to3.iteritems(simple.GetSources()): if 'TrivialProducer' in key[0]: return value return None
def clearAll(self): for f in simple.GetSources().values(): simple.Delete(f)
def clear(self): version = self.version rv = self.rv simple = self.simple print 'Clear the pipeline.' # Reset time so that color range is detected correctly on build(). rv.ViewTime = 0 rv.StillRender() def name(proxy): # Return name of proxy. return (type(proxy)).__name__ def cmp_tubes_filters_glyphs_blocks(x,y): # Using this function to sort the proxies will assure they are # removed in the right order. if name(x) in ['GenerateTubes', 'TubeFilter', 'Tube']: return -1 elif name(y) in ['GenerateTubes', 'TubeFilter', 'Tube']: return 1 if name(x) == 'ProgrammableFilter': return -1 elif name(y) == 'ProgrammableFilter': return 1 elif name(x) == 'Glyph' or name(x)[:11] == 'TensorGlyph': return -1 elif name(y) == 'Glyph' or name(y)[:11] == 'TensorGlyph': return 1 if name(x) == 'ExtractBlock': return -1 elif name(y) == 'ExtractBlock': return 1 return cmp(x,y) # Remove lookup tables first. pxm = servermanager.ProxyManager() for proxy in pxm.GetProxiesInGroup('lookup_tables').itervalues(): servermanager.UnRegister(proxy) if version == 4: # Then remove the source proxies. for proxy in sorted(pxm.GetProxiesInGroup('sources').itervalues(), cmp_tubes_filters_glyphs_blocks): if name(proxy) == 'TensorGlyphWithCustomSource': # Do nothing. # Setting Source or Input gives: # 'QAbstractItemModel::endRemoveRows: # Invalid index ( 2 , 0 ) in model # pqPipelineModel(0x26340b0)' # http://www.paraview.org/Bug/view.php?id=9312 pass else: # Avoid error: # 'Connection sink not found in the pipeline model'. if hasattr(proxy, "Source"): proxy.Source = None if hasattr(proxy, "Input"): proxy.Input = None servermanager.UnRegister(proxy) # Finally remove the representations. for proxy in pxm.GetProxiesInGroup('representations').itervalues(): servermanager.UnRegister(proxy) rv.Representations = [] else: for proxy in sorted(simple.GetSources().itervalues(), cmp_tubes_filters_glyphs_blocks): # Avoid error: # 'Connection sink not found in the pipeline model'. if hasattr(proxy, "Input"): proxy.Input = None if hasattr(proxy, "GlyphType"): proxy.GlyphType = None simple.Delete(proxy) rv.ResetCamera() rv.StillRender()
def get_state(options=None, source_set=[], filter=None, raw=False): """Returns the state string""" if options: options = sm._getPyProxy(options) propertiesToTraceOnCreate = options.PropertiesToTraceOnCreate skipHiddenRepresentations = options.SkipHiddenDisplayProperties skipRenderingComponents = options.SkipRenderingComponents else: propertiesToTraceOnCreate = RECORD_MODIFIED_PROPERTIES skipHiddenRepresentations = True skipRenderingComponents = False # essential to ensure any obsolete accessors don't linger - can cause havoc # when saving state following a Python trace session # (paraview/paraview#18994) import gc gc.collect() if sm.vtkSMTrace.GetActiveTracer(): raise RuntimeError ("Cannot generate Python state when tracing is active.") if filter is None: filter = visible_representations() if skipHiddenRepresentations else supported_proxies() # build a set of proxies of interest if source_set: start_set = source_set else: # if nothing is specified, we save all views and sources. start_set = [x for x in simple.GetSources().values()] + simple.GetViews() start_set = [x for x in start_set if filter(x)] # now, locate dependencies for the start_set, pruning irrelevant branches consumers = set(start_set) for proxy in start_set: get_consumers(proxy, filter, consumers) producers = set() for proxy in consumers: get_producers(proxy, filter, producers) # proxies_of_interest is set of all proxies that we should trace. proxies_of_interest = producers.union(consumers) #print ("proxies_of_interest", proxies_of_interest) trace_config = smtrace.start_trace(preamble="") # this ensures that lookup tables/scalar bars etc. are fully traced. trace_config.SetFullyTraceSupplementalProxies(True) trace_config.SetSkipRenderingComponents(skipRenderingComponents) trace = smtrace.TraceOutput() trace.append("# state file generated using %s" % simple.GetParaViewSourceVersion()) trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) #-------------------------------------------------------------------------- # We trace the views and layouts, if any. if skipRenderingComponents: views = [] else: views = [x for x in proxies_of_interest if smtrace.Trace.get_registered_name(x, "views")] if views: # sort views by their names, so the state has some structure to it. views = sorted(views, key=lambda x:\ smtrace.Trace.get_registered_name(x, "views")) trace.append_separated([\ "# ----------------------------------------------------------------", "# setup views used in the visualization", "# ----------------------------------------------------------------"]) for view in views: # FIXME: save view camera positions and size. traceitem = smtrace.RegisterViewProxy(view) traceitem.finalize() del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) trace.append_separated(["SetActiveView(None)"]) # from views, build the list of layouts of interest. layouts = set() for aview in views: l = simple.GetLayout(aview) if l: layouts.add(simple.GetLayout(aview)) # trace create of layouts if layouts: layouts = sorted(layouts, key=lambda x:\ smtrace.Trace.get_registered_name(x, "layouts")) trace.append_separated([\ "# ----------------------------------------------------------------", "# setup view layouts", "# ----------------------------------------------------------------"]) for layout in layouts: traceitem = smtrace.RegisterLayoutProxy(layout) traceitem.finalize(filter=lambda x: x in views) del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) if views: # restore the active view after the layouts have been created. trace.append_separated([\ "# ----------------------------------------------------------------", "# restore active view", "SetActiveView(%s)" % smtrace.Trace.get_accessor(simple.GetActiveView()), "# ----------------------------------------------------------------"]) #-------------------------------------------------------------------------- # Next, trace data processing pipelines. sorted_proxies_of_interest = __toposort(proxies_of_interest) sorted_sources = [x for x in sorted_proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "sources")] if sorted_sources: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup the data processing pipelines", "# ----------------------------------------------------------------"]) for source in sorted_sources: traceitem = smtrace.RegisterPipelineProxy(source) traceitem.finalize() del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) #-------------------------------------------------------------------------- # Can't decide if the representations should be saved with the pipeline # objects or afterwards, opting for afterwards for now since the topological # sort doesn't guarantee that the representations will follow their sources # anyways. sorted_representations = [x for x in sorted_proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "representations")] scalarbar_representations = [x for x in sorted_proxies_of_interest\ if smtrace.Trace.get_registered_name(x, "scalar_bars")] # print ("sorted_representations", sorted_representations) # print ("scalarbar_representations", scalarbar_representations) if not skipRenderingComponents and (sorted_representations or scalarbar_representations): for view in views: view_representations = [x for x in view.Representations if x in sorted_representations] view_scalarbars = [x for x in view.Representations if x in scalarbar_representations] if view_representations or view_scalarbars: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup the visualization in view '%s'" % smtrace.Trace.get_accessor(view), "# ----------------------------------------------------------------"]) for rep in view_representations: try: producer = rep.Input port = rep.Input.Port traceitem = smtrace.Show(producer, port, view, rep, comment="show data from %s" % smtrace.Trace.get_accessor(producer)) traceitem.finalize() del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) if rep.UseSeparateColorMap: trace.append_separated([\ "# set separate color map", "%s.UseSeparateColorMap = True" % (\ smtrace.Trace.get_accessor(rep))]) except AttributeError: pass # save the scalar bar properties themselves. if view_scalarbars: trace.append_separated("# setup the color legend parameters for each legend in this view") for rep in view_scalarbars: smtrace.Trace.get_accessor(rep) trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) trace.append_separated([\ "# set color bar visibility", "%s.Visibility = %s" % (\ smtrace.Trace.get_accessor(rep), rep.Visibility)]) for rep in view_representations: try: producer = rep.Input port = rep.Input.Port if rep.IsScalarBarVisible(view): # FIXME: this will save this multiple times, right now, # if two representations use the same LUT. trace.append_separated([\ "# show color legend", "%s.SetScalarBarVisibility(%s, True)" % (\ smtrace.Trace.get_accessor(rep), smtrace.Trace.get_accessor(view))]) if not rep.Visibility: traceitem = smtrace.Hide(producer, port, view) traceitem.finalize() del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) except AttributeError: pass #-------------------------------------------------------------------------- # Now, trace the transfer functions (color maps and opacity maps) used. ctfs = set([x for x in proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "lookup_tables")]) if not skipRenderingComponents and ctfs: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup color maps and opacity mapes used in the visualization", "# note: the Get..() functions create a new object, if needed", "# ----------------------------------------------------------------"]) for ctf in ctfs: smtrace.Trace.get_accessor(ctf) if ctf.ScalarOpacityFunction in proxies_of_interest: smtrace.Trace.get_accessor(ctf.ScalarOpacityFunction) trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) # Trace extract generators. exgens = set([x for x in proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "extract_generators")]) if exgens: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup extract generators", "# ----------------------------------------------------------------"]) for exgen in exgens: # FIXME: this currently doesn't handle multiple output ports # correctly. traceitem = smtrace.CreateExtractGenerator(\ xmlname=exgen.Writer.GetXMLName(), producer=exgen.Producer, generator=exgen, registrationName=smtrace.Trace.get_registered_name(exgen, "extract_generators")) traceitem.finalize() del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) # restore the active source since the order in which the pipeline is created # in the state file can end up changing the active source to be different # than what it was when the state is being saved. trace.append_separated([\ "# ----------------------------------------------------------------", "# restore active source", "SetActiveSource(%s)" % smtrace.Trace.get_accessor(simple.GetActiveSource()), "# ----------------------------------------------------------------"]) if options: # add coda about extracts generation. trace.append_separated(["", "if __name__ == '__main__':", " # generate extracts", " SaveExtracts(ExtractsOutputDirectory='%s')" % options.ExtractsOutputDirectory]) del trace_config smtrace.stop_trace() #print (trace) return str(trace) if not raw else trace.raw_data()
def clearFilters(self): for f in simple.GetSources().values(): if f.GetProperty("Input") is not None: simple.Delete(f)