def CreateProducer(name): global ActiveDataDescription, ActivePythonPipelineModule assert IsInsituInput(name) if vtkInSituInitializationHelper.IsInitialized(): # Catalyst 2.0 from paraview import servermanager producer = servermanager._getPyProxy( vtkInSituInitializationHelper.GetProducer(name)) # since state file may have arbitrary properties being specified # on the original source, we ensure we ignore them producer.IgnoreUnknownSetRequests = True return producer # Legacy Catalyst module = ActivePythonPipelineModule if not hasattr(module, "_producer_map"): module._producer_map = {} if name in module._producer_map: return module[name] from paraview import servermanager dataDesc = ActiveDataDescription ipdesc = dataDesc.GetInputDescriptionByName(name) # eventually, we want the Catalyst C++ code to give use the vtkAlgorithm to # use; e.g. # servermanager._getPyProxy(ipdesc.GetProducer()) pxm = servermanager.ProxyManager() producer = servermanager._getPyProxy( pxm.NewProxy("sources", "PVTrivialProducer2")) controller = servermanager.ParaViewPipelineController() controller.InitializeProxy(producer) controller.RegisterPipelineProxy(producer, name) # since state file may have arbitrary properties being specified # on the original source, we ensure we ignore them producer.IgnoreUnknownSetRequests = True vtkobject = producer.GetClientSideObject() assert vtkobject vtkobject.SetWholeExtent(ipdesc.GetWholeExtent()) vtkobject.SetOutput(ipdesc.GetGrid()) module._producer_map[name] = producer return producer
def __GetLookupTableForArray(self, aArray, **kwargs): """ Set the lookup table for the given array and assign the named properties. """ proxyName = '%d.%s.PVLookupTable' % (aArray.GetNumberOfComponents(), aArray.GetName()) lut = servermanager.ProxyManager().GetProxy('lookup_tables', proxyName) if not lut: lut = servermanager.rendering.PVLookupTable( ColorSpace="HSV", RGBPoints=[0, 0, 0, 1, 1, 1, 0, 0]) servermanager.Register(lut, registrationName=proxyName) for arg in kwargs.keys(): if not hasattr(lut, arg): raise AttributeError("LUT has no property %s" % (arg)) setattr(lut, arg, kwargs[arg]) return lut
def _createSelection(proxyname, **args): """ Utility function to create a selection source. Basically a factory function that creates a proxy of a given name and hands off keyword arguments to the newly created proxy. """ session = sm.ActiveConnection.Session pxm = sm.ProxyManager(session) proxy = pxm.NewProxy('sources', proxyname) s = SelectionProxy(proxy=proxy) for name, value in args.items(): # Warning: this will add only the attributes initially passed in when creating the proxy. s.add_attribute(name, value) s.SetPropertyWithName(name, value) return s
def WriterParametersProxy(self, writer, filename, freq): """Creates a client only proxy that will be synchronized with ParaView Live, allowing a user to set filename and frequency. """ controller = servermanager.ParaViewPipelineController() # assume that a client only proxy with the same name as a writer # is available in "filters" proxy = servermanager.ProxyManager().NewProxy("filters", writer.GetXMLName()) controller.PreInitializeProxy(proxy) proxy.GetProperty("Input").SetInputConnection(0, writer.Input.SMProxy, 0) proxy.GetProperty("FileName").SetElement(0, filename) proxy.GetProperty("WriteFrequency").SetElement(0, freq) controller.PostInitializeProxy(proxy) controller.RegisterPipelineProxy(proxy) return proxy
def show(self, proxy): # Helper function. version = self.version rv = self.rv simple = self.simple if version == 4: # Adapted from Show in simple.py (ParaView 3.6). rep = servermanager.CreateRepresentation(proxy, rv) servermanager.ProxyManager().RegisterProxy("representations", "my_representation%d" % _funcs_internals.rep_counter, rep) _funcs_internals.rep_counter += 1 else: rep = simple.Show(proxy) rep.Visibility = 1 return rep
def get_proxy_label(self, xmlgroup, xmlname): pxm = servermanager.ProxyManager() prototype = pxm.GetPrototypeProxy(xmlgroup, xmlname) if not prototype: # a bit of a hack but we assume that there's a stub of some # writer that's not available in this build but is available # with the build used by the simulation code (probably through a plugin) # this stub must have the proper name in the coprocessing hints print "WARNING: Could not find", xmlname, "writer in", xmlgroup, \ "XML group. This is not a problem as long as the writer is available with " \ "the ParaView build used by the simulation code." ctor = servermanager._make_name_valid(xmlname) else: ctor = servermanager._make_name_valid(prototype.GetXMLLabel()) # TODO: use servermanager.ProxyManager().NewProxy() instead # we create the writer proxy such that it is not registered with the # ParaViewPipelineController, so its state is not sent to ParaView Live. return "servermanager.%s.%s" % (xmlgroup, ctor)
def Glob(path, rootDir=None): """Given a path, this function performs globbing on the file names inside the input directory. rootDir is an optional parameter that can set a relative root directory from which path is defined. This function returns the list of files matching the globbing pattern (the wildcard * is an example of pattern that can be used) of the input path. Note that for this function to work, the globbing pattern needs to only belong to the file name at the end of path. fnmatch package is used as the backend for processing the input pattern. """ import paraview import paraview.simple import paraview.servermanager as sm import fnmatch import os.path head_tail = os.path.split(path) dirPath = head_tail[0] fileName = head_tail[1] fileInfoHelperProxy = sm.ProxyManager().NewProxy("misc", "FileInformationHelper") fileInfoHelperProxy.GetProperty("DirectoryListing").SetElement(0, True) fileInfoHelperProxy.GetProperty("Path").SetElement(0, dirPath) fileInfoHelperProxy.GetProperty("GroupFileSequences").SetElement(0, False) if rootDir != None: fileInfoHelperProxy.GetProperty("WorkingDirectory").SetElement( 0, rootDir) fileInfoHelperProxy.UpdateVTKObjects() localFileInfo = sm.vtkPVFileInformation() fileInfoHelperProxy.GatherInformation(localFileInfo) numFiles = localFileInfo.GetContents().GetNumberOfItems() foundFiles = [] for i in range(numFiles): name = localFileInfo.GetContents().GetItemAsObject(i).GetName() if fnmatch.fnmatch(name, fileName): foundFiles.append(dirPath + '/' + name) foundFiles.sort() return foundFiles
def WriterParametersProxy(self, writer, filename, freq, paddingamount): """Creates a client only proxy that will be synchronized with ParaView Live, allowing a user to set the filename and frequency. """ controller = servermanager.ParaViewPipelineController() # assume that a client only proxy with the same name as a writer # is available in "insitu_writer_parameters" # Since coprocessor sometimes pass writer as a custom object and not # a proxy, we need to handle that. Just creating any arbitrary writer # proxy to store the parameters it acceptable. So let's just do that # when the writer is not a proxy. writerIsProxy = isinstance(writer, servermanager.Proxy) helperName = writer.GetXMLName( ) if writerIsProxy else "XMLPImageDataWriter" proxy = servermanager.ProxyManager().NewProxy( "insitu_writer_parameters", helperName) controller.PreInitializeProxy(proxy) if writerIsProxy: # it's possible that the writer can take in multiple input connections # so we need to go through all of them. the try/except block seems # to be the best way to figure out if there are multiple input connections try: length = len(writer.Input) for i in range(length): proxy.GetProperty("Input").AddInputConnection( writer.Input[i].SMProxy, 0) except: proxy.GetProperty("Input").SetInputConnection( 0, writer.Input.SMProxy, 0) proxy.GetProperty("FileName").SetElement(0, filename) proxy.GetProperty("WriteFrequency").SetElement(0, freq) proxy.GetProperty("PaddingAmount").SetElement(0, paddingamount) controller.PostInitializeProxy(proxy) controller.RegisterPipelineProxy(proxy) return proxy
def run(filename=None): """Create a dummy pipeline and save the coprocessing state in the filename specified, if any, else dumps it out on stdout.""" from paraview import simple, servermanager wavelet = simple.Wavelet(registrationName="Wavelet1") contour = simple.Contour() display = simple.Show() view = simple.Render() viewname = servermanager.ProxyManager().GetProxyName("views", view.SMProxy) script = DumpPipeline( export_rendering=True, simulation_input_map={"Wavelet1": "input"}, screenshot_info={viewname: ['image.png', '1', '1', '2', '400', '400']}) if filename: f = open(filename, "w") f.write(script) f.close() else: print "# *** Generated Script Begin ***" print script print "# *** Generated Script End ***"
def DumpPipeline(export_rendering, simulation_input_map, screenshot_info, cinema_tracks, cinema_arrays, enable_live_viz, live_viz_frequency): """Method that will dump the current pipeline and return it as a string trace. export_rendering boolean telling if we want to export rendering simulation_input_map string->string map with key being the proxyname while value being the simulation input name. screenshot_info map with information about screenshots * key -> view proxy name * value -> [filename, writefreq, fitToScreen, magnification, width, height, cinemacamera options] cinema_tracks map with information about cinema tracks to record * key -> proxy name * value -> argument ranges cinema_arrays map with information about value arrays to be exported * key -> proxy name * value -> list of array names enable_live_viz boolean telling if we want to enable Catalyst Live connection live_viz_frequency integer telling how often to update Live connection. only used if enable_live_viz is True """ # reset the global variables. reset_cpstate_globals() cpstate_globals.export_rendering = export_rendering cpstate_globals.simulation_input_map = simulation_input_map cpstate_globals.screenshot_info = screenshot_info cpstate_globals.cinema_tracks = cinema_tracks cpstate_globals.cinema_arrays = cinema_arrays cpstate_globals.enable_live_viz = enable_live_viz cpstate_globals.live_viz_frequency = live_viz_frequency # Initialize the write frequency map for key in cpstate_globals.simulation_input_map.values(): cpstate_globals.write_frequencies[key] = [] # Start trace filter = cpstate_filter_proxies_to_serialize() smtrace.RealProxyAccessor.register_create_callback(cp_hook) state = smstate.get_state(filter=filter, raw=True) smtrace.RealProxyAccessor.unregister_create_callback(cp_hook) # add in the new style writer proxies state = state + NewStyleWriters().make_trace() # iterate over all views that were saved in state and update write requencies if export_rendering: pxm = servermanager.ProxyManager() for key, vtuple in screenshot_info.items(): view = pxm.GetProxy("views", key) if not view: continue image_write_frequency = int(vtuple[1]) # Locate which simulation input this write is connected to, if any. If so, # we update the write_frequencies datastructure accordingly. sim_inputs = locate_simulation_inputs_for_view(view) for sim_input_name in sim_inputs: if not image_write_frequency in cpstate_globals.write_frequencies: cpstate_globals.write_frequencies[sim_input_name].append( image_write_frequency) cpstate_globals.write_frequencies[sim_input_name].sort() if not sim_input_name in cpstate_globals.channels_needed: cpstate_globals.channels_needed.append(sim_input_name) if enable_live_viz: for key in simulation_input_map: sim_input_name = simulation_input_map[key] if not live_viz_frequency in cpstate_globals.write_frequencies: cpstate_globals.write_frequencies[sim_input_name].append( live_viz_frequency) cpstate_globals.write_frequencies[sim_input_name].sort() if not sim_input_name in cpstate_globals.channels_needed: cpstate_globals.channels_needed.append(sim_input_name) pxm = servermanager.ProxyManager() arrays = {} for channel_name in cpstate_globals.channels_needed: arrays[channel_name] = [] p = pxm.GetProxy("sources", channel_name) if p: for i in range(p.GetPointDataInformation().GetNumberOfArrays()): arrays[channel_name].append( [p.GetPointDataInformation().GetArray(i).GetName(), 0]) for i in range(p.GetCellDataInformation().GetNumberOfArrays()): arrays[channel_name].append( [p.GetCellDataInformation().GetArray(i).GetName(), 1]) # Create global fields values pipelineClassDef = "\n" pipelineClassDef += "# ----------------------- CoProcessor definition -----------------------\n\n" # Create the resulting string that will contains the pipeline definition pipelineClassDef += "def CreateCoProcessor():\n" pipelineClassDef += " def _CreatePipeline(coprocessor, datadescription):\n" pipelineClassDef += " class Pipeline:\n" # add the traced code. for original_line in state: for line in original_line.split("\n"): if line.find("import *") != -1 or \ line.find("#### import the simple") != -1: continue if line: pipelineClassDef += " " + line + "\n" else: pipelineClassDef += "\n" pipelineClassDef += " return Pipeline()\n" pipelineClassDef += "\n" pipelineClassDef += " class CoProcessor(coprocessing.CoProcessor):\n" pipelineClassDef += " def CreatePipeline(self, datadescription):\n" pipelineClassDef += " self.Pipeline = _CreatePipeline(self, datadescription)\n" pipelineClassDef += "\n" pipelineClassDef += " coprocessor = CoProcessor()\n" pipelineClassDef += " # these are the frequencies at which the coprocessor updates.\n" pipelineClassDef += " freqs = " + str( cpstate_globals.write_frequencies) + "\n" pipelineClassDef += " coprocessor.SetUpdateFrequencies(freqs)\n" if arrays: pipelineClassDef += " if requestSpecificArrays:\n" for channel_name in arrays: pipelineClassDef += " arrays = " + str( arrays[channel_name]) + "\n" pipelineClassDef += " coprocessor.SetRequestedArrays('" + channel_name + "', arrays)\n" pipelineClassDef += " coprocessor.SetInitialOutputOptions(timeStepToStartOutputAt,forceOutputAtFirstCall)\n" pipelineClassDef += "\n" pipelineClassDef += " if rootDirectory:\n" pipelineClassDef += " coprocessor.SetRootDirectory(rootDirectory)\n" pipelineClassDef += "\n" pipelineClassDef += " if make_cinema_table:\n" pipelineClassDef += " coprocessor.EnableCinemaDTable()\n" pipelineClassDef += "\n" pipelineClassDef += " return coprocessor\n" return pipelineClassDef
def make_trace(self): """gather trace for the writer proxies that are not in the trace pipeline but rather in the new export state. aDIOSWriter1 = servermanager.writers.ADIOSWriter(Input=wavelet1) coprocessor.RegisterWriter(aDIOSWriter1, filename='filename.vta', freq=1, paddingamount=0) """ res = [] res.append("") res.append("# Now any catalyst writers") pxm = servermanager.ProxyManager() globalepxy = pxm.GetProxy("export_global", "catalyst") exports = pxm.GetProxiesInGroup( "export_writers") #todo should use ExportDepot for x in exports: xs = x[0] pxy = pxm.GetProxy('export_writers', xs) if not pxy.HasAnnotation('enabled'): continue xmlname = pxy.GetXMLName() if xmlname == "Cinema image options": # skip the array and property export information we stuff in this proxy continue inputname = xs.split('|')[0].lower().replace("*", "").replace(".", "") writername = xs.split('|')[1] xmlgroup = pxy.GetXMLGroup() padding_amount = globalepxy.GetProperty( "FileNamePadding").GetElement(0) write_frequency = pxy.GetProperty("WriteFrequency").GetElement(0) filename = pxy.GetProperty("CatalystFilePattern").GetElement(0) sim_inputs = locate_simulation_inputs(pxy) for sim_input_name in sim_inputs: if not write_frequency in cpstate_globals.write_frequencies[ sim_input_name]: cpstate_globals.write_frequencies[sim_input_name].append( write_frequency) cpstate_globals.write_frequencies[sim_input_name].sort() if not sim_input_name in cpstate_globals.channels_needed: cpstate_globals.channels_needed.append(sim_input_name) prototype = pxm.GetPrototypeProxy(xmlgroup, xmlname) if not prototype: varname = self.__make_name(xmlname) else: varname = self.__make_name(prototype.GetXMLLabel()) f = "%s = servermanager.writers.%s(Input=%s)" % ( varname, writername, inputname) res.append(f) if self.__make_temporal_script: f = "STP.RegisterWriter(%s, '%s', tp_writers)" % (varname, filename) else: f = "coprocessor.RegisterWriter(%s, filename='%s', freq=%s, paddingamount=%s)" % ( varname, filename, write_frequency, padding_amount) res.append(f) res.append("") if len(res) == 2: return [] # don't clutter output if there are no writers return res
except ImportError: hasSimpleModule = False # this import prevents python-source-tools that ude introspection from working # because it prevents import into a normal python from paraview import servermanager from PyFoam.Error import warning from PyFoam.RunDictionary.SolutionDirectory import SolutionDirectory from math import sqrt from os import path from SourceBase import SourceBase proxyManager = servermanager.ProxyManager() def version(): """Tries to determine the paraview-version""" try: # old versions return (proxyManager.GetVersionMajor(), proxyManager.GetVersionMinor(), proxyManager.GetVersionPatch()) except AttributeError: return (servermanager.vtkSMProxyManager.GetVersionMajor(), servermanager.vtkSMProxyManager.GetVersionMinor(), servermanager.vtkSMProxyManager.GetVersionPatch()) def paraFoamReader():
def getProxyAsPipelineNode(id, view=None): """ Create a representation for that proxy so it can be used within a pipeline browser. """ pxm = servermanager.ProxyManager() proxy = idToProxy(id) rep = simple.GetDisplayProperties(proxy) nbActiveComp = 1 pointData = [] searchArray = ('POINTS' == rep.ColorArrayName[0]) and (len( rep.ColorArrayName[1]) > 0) if servermanager.ActiveConnection.GetNumberOfDataPartitions() > 1: info = { \ 'lutId': 'vtkProcessId_1', \ 'name': 'vtkProcessId', \ 'size': 1, \ 'range': [0, servermanager.ActiveConnection.GetNumberOfDataPartitions()-1] } pointData.append(info) # FIXME seb # dataInfo = rep.GetRepresentedDataInformation() # pointData = dataInfo.GetPointDataInformation() # cellData = dataInfo.GetCellDataInformation() # for idx in pointData.GetNumberOfArrays(): # info = pointData.GetArrayInformation(idx) # nbComponents = info.GetNumberOfComponents() # if searchArray and array.Name == rep.ColorArrayName: # nbActiveComp = nbComponents # rangeOn = (nbComponents == 3 if -1 else 0) # info = { \ # 'lutId': info.GetName() + '_' + str(nbComponents), \ # 'name': info.GetName, \ # 'size': nbComponents, \ # 'range': info.GetRange(rangeOn) } # pointData.append(info) for array in proxy.GetPointDataInformation(): nbComponents = array.GetNumberOfComponents() if searchArray and array.Name == rep.ColorArrayName[1]: nbActiveComp = nbComponents rangeOn = (nbComponents == 1 if 0 else -1) info = { \ 'lutId': array.Name + '_' + str(nbComponents), \ 'name': array.Name, \ 'size': nbComponents, \ 'range': array.GetRange(rangeOn) } pointData.append(info) cellData = [] searchArray = ('CELLS' == rep.ColorArrayName[0]) and (len( rep.ColorArrayName[1]) > 0) for array in proxy.GetCellDataInformation(): nbComponents = array.GetNumberOfComponents() if searchArray and array.Name == rep.ColorArrayName[1]: nbActiveComp = nbComponents rangeOn = (nbComponents == 1 if 0 else -1) info = { \ 'lutId': array.Name + '_' + str(nbComponents), \ 'name': array.Name, \ 'size': nbComponents, \ 'range': array.GetRange(rangeOn) } cellData.append(info) state = getProxyAsState(proxy.GetGlobalID()) showScalarbar = 1 if view and vtkSMPVRepresentationProxy.IsScalarBarVisible( rep.SMProxy, view.SMProxy) else 0 repName = 'Hide' if rep.Visibility == 1: repName = rep.Representation return { 'proxy_id' : proxy.GetGlobalID(), \ 'name' : pxm.GetProxyName("sources", proxy), \ 'bounds' : proxy.GetDataInformation().GetBounds(), \ 'pointData' : pointData, \ 'cellData' : cellData, \ 'activeData': str(rep.ColorArrayName[0]) + ':' + str(rep.ColorArrayName[1]), \ 'diffuseColor' : str(rep.DiffuseColor), \ 'showScalarBar' : showScalarbar, \ 'representation': repName, \ 'state' : state, \ 'children' : [] }
def make_trace(self): """gather trace for the writer proxies that are not in the trace pipeline but rather in the new export state. """ res = [] res.append("") res.append("# Now any catalyst writers") pxm = servermanager.ProxyManager() globalepxy = pxm.GetProxy("export_global", "catalyst") exports = pxm.GetProxiesInGroup( "export_writers") #todo should use ExportDepot for x in exports: xs = x[0] pxy = pxm.GetProxy('export_writers', xs) if not pxy.HasAnnotation('enabled'): continue xmlname = pxy.GetXMLName() if xmlname == "Cinema image options": # skip the array and property export information we stuff in this proxy continue inputname = xs.split('|')[0].lower().replace("*", "").replace(".", "") writername = xs.split('|')[1] xmlgroup = pxy.GetXMLGroup() padding_amount = globalepxy.GetProperty( "FileNamePadding").GetElement(0) write_frequency = pxy.GetProperty("WriteFrequency").GetElement(0) filename = pxy.GetProperty("CatalystFilePattern").GetElement(0) DataMode = pxy.GetProperty("DataMode") if DataMode is not None: DataMode = pxy.GetProperty("DataMode").GetElement(0) HeaderType = pxy.GetProperty("HeaderType") if HeaderType is not None: HeaderType = pxy.GetProperty("HeaderType").GetElement(0) EncodeAppendedData = pxy.GetProperty("EncodeAppendedData") if EncodeAppendedData is not None: EncodeAppendedData = pxy.GetProperty( "EncodeAppendedData").GetElement(0) != 0 CompressorType = pxy.GetProperty("CompressorType") if CompressorType is not None: CompressorType = pxy.GetProperty("CompressorType").GetElement( 0) CompressionLevel = pxy.GetProperty("CompressionLevel") if CompressionLevel is not None: CompressionLevel = pxy.GetProperty( "CompressionLevel").GetElement(0) sim_inputs = locate_simulation_inputs(pxy) for sim_input_name in sim_inputs: if not write_frequency in cpstate_globals.write_frequencies[ sim_input_name]: cpstate_globals.write_frequencies[sim_input_name].append( write_frequency) cpstate_globals.write_frequencies[sim_input_name].sort() if not sim_input_name in cpstate_globals.channels_needed: cpstate_globals.channels_needed.append(sim_input_name) prototype = pxm.GetPrototypeProxy(xmlgroup, xmlname) if not prototype: varname = self.__make_name(xmlname) else: varname = self.__make_name(prototype.GetXMLLabel()) # Write pass array proxy if pxy.GetProperty("ChooseArraysToWrite").GetElement(0) == 1: point_arrays = [] cell_arrays = [] arrays_property = pxy.GetProperty("PointDataArrays") for i in range(arrays_property.GetNumberOfElements()): point_arrays.append(arrays_property.GetElement(i)) arrays_property = pxy.GetProperty("CellDataArrays") for i in range(arrays_property.GetNumberOfElements()): cell_arrays.append(arrays_property.GetElement(i)) f = "%s_arrays = PassArrays(Input=%s, PointDataArrays=%s, CellDataArrays=%s)" % \ (inputname, inputname, str(point_arrays), str(cell_arrays)) inputname = "%s_arrays" % inputname res.append(f) # Actual writer f = "%s = servermanager.writers.%s(Input=%s)" % ( varname, writername, inputname) res.append(f) if self.__make_temporal_script: f = "STP.RegisterWriter(%s, '%s', tp_writers)" % (varname, filename) else: f = "coprocessor.RegisterWriter(%s, filename='%s', freq=%s, paddingamount=%s, DataMode='%s', HeaderType='%s', EncodeAppendedData=%s, CompressorType='%s', CompressionLevel='%s')" % ( varname, filename, write_frequency, padding_amount, DataMode, HeaderType, EncodeAppendedData, CompressorType, CompressionLevel) res.append(f) res.append("") if len(res) == 2: return [] # don't clutter output if there are no writers return res
def DumpPipeline(export_rendering, simulation_input_map, screenshot_info): """ Method that will dump the current pipeline and return it as a string trace - export_rendering : boolean telling if we want to export rendering - simulation_input_map: string->string map with key being the proxyname while value being the simulation input name. - screenshot_info : map with information about screenshots key -> view proxy name value -> [filename, writefreq, fitToScreen, magnification, width, height] """ # reset the global variables. reset_cpstate_globals() cpstate_globals.export_rendering = export_rendering cpstate_globals.simulation_input_map = simulation_input_map cpstate_globals.screenshot_info = screenshot_info # Initialize the write frequency map for key in cpstate_globals.simulation_input_map.values(): cpstate_globals.write_frequencies[key] = [] # Start trace smtrace.start_trace(CaptureAllProperties=True, UseGuiName=True) # Disconnect the smtrace module's observer. It should not be # active while tracing the state. smtrace.reset_trace_observer() # update trace globals. smtrace.trace_globals.proxy_ctor_hook = staticmethod(cp_hook) smtrace.trace_globals.trace_output = [] # Get list of proxy lists proxy_lists = smstate.get_proxy_lists_ordered_by_group( WithRendering=cpstate_globals.export_rendering) # Now register the proxies with the smtrace module for proxy_list in proxy_lists: smstate.register_proxies_by_dependency(proxy_list) # Calling append_trace causes the smtrace module to sort out all the # registered proxies and their properties and write them as executable # python. smtrace.append_trace() # Stop trace and print it to the console smtrace.stop_trace() # During tracing, cp_hook() will fill up the cpstate_globals.view_proxies # list with view proxies, if rendering was enabled. for view_proxy in cpstate_globals.view_proxies: # Locate which simulation input this write is connected to, if any. If so, # we update the write_frequencies datastructure accordingly. sim_inputs = locate_simulation_inputs_for_view(view_proxy) proxyName = servermanager.ProxyManager().GetProxyName( "views", view_proxy) image_write_frequency = cpstate_globals.screenshot_info[proxyName][1] for sim_input_name in sim_inputs: if not image_write_frequency in cpstate_globals.write_frequencies[ sim_input_name]: cpstate_globals.write_frequencies[sim_input_name].append( image_write_frequency) cpstate_globals.write_frequencies[sim_input_name].sort() # Create global fields values pipelineClassDef = "\n" pipelineClassDef += "# ----------------------- CoProcessor definition -----------------------\n\n" # Create the resulting string that will contains the pipeline definition pipelineClassDef += "def CreateCoProcessor():\n" pipelineClassDef += " def _CreatePipeline(coprocessor, datadescription):\n" pipelineClassDef += " class Pipeline:\n" # add the traced code. for original_line in smtrace.trace_globals.trace_output: for line in original_line.split("\n"): pipelineClassDef += " " + line + "\n" smtrace.clear_trace() pipelineClassDef += " return Pipeline()\n" pipelineClassDef += "\n" pipelineClassDef += " class CoProcessor(coprocessing.CoProcessor):\n" pipelineClassDef += " def CreatePipeline(self, datadescription):\n" pipelineClassDef += " self.Pipeline = _CreatePipeline(self, datadescription)\n" pipelineClassDef += "\n" pipelineClassDef += " coprocessor = CoProcessor()\n" pipelineClassDef += " freqs = " + str( cpstate_globals.write_frequencies) + "\n" pipelineClassDef += " coprocessor.SetUpdateFrequencies(freqs)\n" pipelineClassDef += " return coprocessor\n" return pipelineClassDef
def run(filename=None, nframes=60): """ Runs the benchmark. If a filename is specified, it will write the results to that file as csv. The number of frames controls how many times a particular configuration is rendered. Higher numbers lead to more accurate averages. """ # Turn off progress printing if servermanager.progressObserverTag: servermanager.ToggleProgressPrinting() # Create a sphere source to use in the benchmarks pm = servermanager.ProxyManager() ss = servermanager.sources.SphereSource( ThetaResolution=1000, PhiResolution=500, registrationGroup="sources", registrationName="benchmark source") # The view and representation v = servermanager.GetRenderView() if not v: v = servermanager.CreateRenderView() rep = servermanager.CreateRepresentation( ss, v, registrationGroup="representations", registrationName="benchmark rep") results = [] # Start with these defaults v.RemoteRenderThreshold = 0 v.UseImmediateMode = 0 v.UseTriangleStrips = 0 # Test different configurations v.UseImmediateMode = 0 title = 'display lists, no triangle strips, solid color' v.UseTriangleStrips = 0 results.append(render(ss, v, title, nframes)) title = 'display lists, triangle strips, solid color' v.UseTriangleStrips = 1 results.append(render(ss, v, title, nframes)) v.UseImmediateMode = 1 title = 'no display lists, no triangle strips, solid color' v.UseTriangleStrips = 0 results.append(render(ss, v, title, nframes)) title = 'no display lists, triangle strips, solid color' v.UseTriangleStrips = 1 results.append(render(ss, v, title, nframes)) # Color by normals lt = servermanager.rendering.PVLookupTable() rep.LookupTable = lt rep.ColorAttributeType = 0 # point data rep.ColorArrayName = "Normals" lt.RGBPoints = [-1, 0, 0, 1, 0.0288, 1, 0, 0] lt.ColorSpace = 1 # HSV lt.VectorComponent = 0 v.UseImmediateMode = 0 title = 'display lists, no triangle strips, color by array' v.UseTriangleStrips = 0 results.append(render(ss, v, title, nframes)) title = 'display lists, triangle strips, color by array' v.UseTriangleStrips = 1 results.append(render(ss, v, title, nframes)) v.UseImmediateMode = 1 v.UseImmediateMode = 1 title = 'no display lists, no triangle strips, color by array' v.UseTriangleStrips = 0 results.append(render(ss, v, title, nframes)) title = 'no display lists, triangle strips, color by array' v.UseTriangleStrips = 1 results.append(render(ss, v, title, nframes)) newr = [] for r in v.Representations: if r != rep: newr.append(r) v.Representations = newr pm.UnRegisterProxy("sources", "benchmark source", ss) pm.UnRegisterProxy("representations", "benchmark rep", rep) ss = None rep = None v.StillRender() v = None if filename: f = open(filename, "w") else: f = sys.stdout print >> f, 'configuration, %d, %d' % (results[0][1][0], results[0][2][0]) for i in results: print >> f, '"%s", %g, %g' % (i[0], i[1][1], i[2][1])
#-------------------- Start testing -------------------------- print "Start PythonAnnotationFilter testing" options = servermanager.vtkProcessModule.GetProcessModule().GetOptions() dataToLoad = options.GetParaViewDataName() # Load data file reader = OpenDataFile(dataToLoad) reader.GlobalVariables = ['KE', 'XMOM', 'YMOM', 'ZMOM', 'NSTEPS', 'TMSTEP'] reader.UpdatePipeline() # Time management timesteps = servermanager.ProxyManager().GetProxy('timekeeper','TimeKeeper').TimestepValues time = timesteps[5] # Merge blocks ### Just skip the merge = MergeBlocks() # Annotation filter annotation = PythonAnnotation() annotation.Expression = '"%f %f %f" % (inputMB[0].FieldData["XMOM"][t_index], inputMB[0].FieldData["YMOM"][t_index], inputMB[0].FieldData["ZMOM"][t_index])' # Update time and trigger pipeline execution time = timesteps[5] annotation.UpdatePipeline(time) annotation.SMProxy.UpdatePropertyInformation() value = annotation.SMProxy.GetProperty('AnnotationValue').GetElement(0)
def cp_hook(info, ctorMethod, ctorArgs, extraCtorCommands): """Callback registered with the smtrace to control the code recorded by the trace for simulation inputs and writers, among other things.""" if info.ProxyName in cpstate_globals.simulation_input_map.keys(): # mark this proxy as a simulation input to make it easier to locate the # simulation input for the writers. info.Proxy.cpSimulationInput = cpstate_globals.simulation_input_map[ info.ProxyName] return ('coprocessor.CreateProducer',\ [ 'datadescription', '\"%s\"' % (cpstate_globals.simulation_input_map[info.ProxyName]) ], '') # handle views proxy = info.Proxy if proxy.GetXMLGroup() == 'views' and cpstate_globals.export_rendering: proxyName = servermanager.ProxyManager().GetProxyName("views", proxy) ctorArgs = [ ctorMethod, "\"%s\"" % cpstate_globals.screenshot_info[proxyName][0], cpstate_globals.screenshot_info[proxyName][1], cpstate_globals.screenshot_info[proxyName][2], cpstate_globals.screenshot_info[proxyName][3], cpstate_globals.screenshot_info[proxyName][4], cpstate_globals.screenshot_info[proxyName][5] ] cpstate_globals.view_proxies.append(proxy) return ("coprocessor.CreateView", ctorArgs, extraCtorCommands) # handle writers. if not proxy.GetHints() or \ not proxy.GetHints().FindNestedElementByName("CoProcessing"): return (ctorMethod, ctorArgs, extraCtorCommands) # this is a writer we are dealing with. xmlElement = proxy.GetHints().FindNestedElementByName("CoProcessing") xmlgroup = xmlElement.GetAttribute("group") xmlname = xmlElement.GetAttribute("name") pxm = smtrace.servermanager.ProxyManager() ctorMethod = None writer_proxy = pxm.GetPrototypeProxy(xmlgroup, xmlname) if writer_proxy: # we have a valid prototype based on the writer stub ctorMethod = \ smtrace.servermanager._make_name_valid(writer_proxy.GetXMLLabel()) else: # a bit of a hack but we assume that there's a stub of some # writer that's not available in this build but is available # with the build used by the simulation code (probably through a plugin) # this stub must have the proper name in the coprocessing hints print "WARNING: Could not find", xmlname, "writer in", xmlgroup, \ "XML group. This is not a problem as long as the writer is available with " \ "the ParaView build used by the simulation code." ctorMethod = \ smtrace.servermanager._make_name_valid(xmlname) write_frequency = proxy.GetProperty("WriteFrequency").GetElement(0) ctorArgs = [ctorMethod, \ "\"%s\"" % proxy.GetProperty("FileName").GetElement(0),\ write_frequency] ctorMethod = "coprocessor.CreateWriter" # Locate which simulation input this write is connected to, if any. If so, # we update the write_frequencies datastructure accordingly. sim_inputs = locate_simulation_inputs(proxy) for sim_input_name in sim_inputs: if not write_frequency in cpstate_globals.write_frequencies[ sim_input_name]: cpstate_globals.write_frequencies[sim_input_name].append( write_frequency) cpstate_globals.write_frequencies[sim_input_name].sort() return (ctorMethod, ctorArgs, '')
import os.path import sys from paraview import servermanager from paraview import smtesting smtesting.ProcessCommandLineArguments() servermanager.Connect() pvsm_file = os.path.join(smtesting.SMStatesDir, "ProxyPropertyLinks.pvsm") print("State file: %s" % pvsm_file) smtesting.LoadServerManagerState(pvsm_file) pxm = servermanager.ProxyManager() sphere1 = pxm.GetProxy("sources", "Sphere1") sphere2 = pxm.GetProxy("sources", "Sphere2") sphere3 = pxm.GetProxy("sources", "Sphere3") # Create links. proxyLink = servermanager.vtkSMProxyLink() proxyLink.AddLinkedProxy(sphere1.SMProxy, 1) # Input proxyLink.AddLinkedProxy(sphere2.SMProxy, 2) # Output pxm.RegisterLink("MyProxyLink", proxyLink) proxyLink = None propertyLink = servermanager.vtkSMPropertyLink() propertyLink.AddLinkedProperty(sphere3.SMProxy, "EndTheta", 1) # Input. propertyLink.AddLinkedProperty(sphere1.SMProxy, "StartTheta", 2) # Output. pxm.RegisterLink("MyPropertyLink", propertyLink)
def setUp(self): self.pxm = servermanager.ProxyManager()
def clear(self): version = self.version rv = self.rv simple = self.simple print 'Clear the pipeline.' # Reset time so that color range is detected correctly on build(). rv.ViewTime = 0 rv.StillRender() def name(proxy): # Return name of proxy. return (type(proxy)).__name__ def cmp_tubes_filters_glyphs_blocks(x,y): # Using this function to sort the proxies will assure they are # removed in the right order. if name(x) in ['GenerateTubes', 'TubeFilter', 'Tube']: return -1 elif name(y) in ['GenerateTubes', 'TubeFilter', 'Tube']: return 1 if name(x) == 'ProgrammableFilter': return -1 elif name(y) == 'ProgrammableFilter': return 1 elif name(x) == 'Glyph' or name(x)[:11] == 'TensorGlyph': return -1 elif name(y) == 'Glyph' or name(y)[:11] == 'TensorGlyph': return 1 if name(x) == 'ExtractBlock': return -1 elif name(y) == 'ExtractBlock': return 1 return cmp(x,y) # Remove lookup tables first. pxm = servermanager.ProxyManager() for proxy in pxm.GetProxiesInGroup('lookup_tables').itervalues(): servermanager.UnRegister(proxy) if version == 4: # Then remove the source proxies. for proxy in sorted(pxm.GetProxiesInGroup('sources').itervalues(), cmp_tubes_filters_glyphs_blocks): if name(proxy) == 'TensorGlyphWithCustomSource': # Do nothing. # Setting Source or Input gives: # 'QAbstractItemModel::endRemoveRows: # Invalid index ( 2 , 0 ) in model # pqPipelineModel(0x26340b0)' # http://www.paraview.org/Bug/view.php?id=9312 pass else: # Avoid error: # 'Connection sink not found in the pipeline model'. if hasattr(proxy, "Source"): proxy.Source = None if hasattr(proxy, "Input"): proxy.Input = None servermanager.UnRegister(proxy) # Finally remove the representations. for proxy in pxm.GetProxiesInGroup('representations').itervalues(): servermanager.UnRegister(proxy) rv.Representations = [] else: for proxy in sorted(simple.GetSources().itervalues(), cmp_tubes_filters_glyphs_blocks): # Avoid error: # 'Connection sink not found in the pipeline model'. if hasattr(proxy, "Input"): proxy.Input = None if hasattr(proxy, "GlyphType"): proxy.GlyphType = None simple.Delete(proxy) rv.ResetCamera() rv.StillRender()
def make_trace(self): """gather trace for the writer proxies that are not in the trace pipeline but rather in the new export state. """ res = [] res.append("") res.append("# Now any catalyst writers") pxm = servermanager.ProxyManager() globalepxy = pxm.GetProxy("export_global", "catalyst") exports = pxm.GetProxiesInGroup("export_writers") #todo should use ExportDepot for x in exports: xs = x[0] pxy = pxm.GetProxy('export_writers', xs) if not pxy.HasAnnotation('enabled'): continue xmlname = pxy.GetXMLName() if xmlname == "Cinema image options": # skip the array and property export information we stuff in this proxy continue # note: this logic is not truly correct. the way this is setup, # there is no good way to really find the variable name used for the input since the names # that smtrace assigns are already cleaned up at this point. # Ideally, this class should have been written as a true `Accessor` so it could # be traced correctly. Right now, I am hacking this to attempt to get a reasonable name # that works in most cases. inputname = xs.split('|')[0] inputname = servermanager._make_name_valid(inputname) inputname = inputname[0].lower() + inputname[1:] writername = xs.split('|')[1] xmlgroup = pxy.GetXMLGroup() padding_amount = globalepxy.GetProperty("FileNamePadding").GetElement(0) write_frequency = pxy.GetProperty("WriteFrequency").GetElement(0) filename = pxy.GetProperty("CatalystFilePattern").GetElement(0) cpstate_globals = get_globals() sim_inputs = locate_simulation_inputs(pxy) for sim_input_name in sim_inputs: if not write_frequency in cpstate_globals.write_frequencies[sim_input_name]: cpstate_globals.write_frequencies[sim_input_name].append(write_frequency) cpstate_globals.write_frequencies[sim_input_name].sort() if not sim_input_name in cpstate_globals.channels_needed: cpstate_globals.channels_needed.append(sim_input_name) prototype = pxm.GetPrototypeProxy(xmlgroup, xmlname) if not prototype: varname = self.__make_name(xmlname) else: varname = self.__make_name(prototype.GetXMLLabel()) # Write pass array proxy if pxy.GetProperty("ChooseArraysToWrite").GetElement(0) == 1: point_arrays = [] cell_arrays = [] arrays_property = pxy.GetProperty("PointDataArrays") for i in range(arrays_property.GetNumberOfElements()): point_arrays.append(arrays_property.GetElement(i)) arrays_property = pxy.GetProperty("CellDataArrays") for i in range(arrays_property.GetNumberOfElements()): cell_arrays.append(arrays_property.GetElement(i)) f = "%s_arrays = PassArrays(Input=%s, PointDataArrays=%s, CellDataArrays=%s)" % \ (inputname, inputname, str(point_arrays), str(cell_arrays)) inputname = "%s_arrays" % inputname res.append(f) # Actual writer f = "%s = servermanager.writers.%s(Input=%s)" % (varname, writername, inputname) res.append(f) # set various writer properties, except for the filename since that will be set later. # point, cell and field arrays should already be set notNeededProperties = ['CatalystFilePattern', 'CellDataArrays', 'ChooseArraysToWrite', 'EdgeDataArrays', 'FieldDataArrays', 'FileName', 'FileNameSuffix', 'Filenamesuffix', 'Input', 'PointDataArrays', 'WriteFrequency',] for p in pxy.ListProperties(): if p not in notNeededProperties and len(pxy.GetProperty(p)): if isinstance(pxy.GetProperty(p).GetData(), servermanager.Proxy): proxyvalue = pxy.GetProperty(p).GetData() cpstate_globals = get_globals() if proxyvalue in cpstate_globals.variable_to_name_map: f = "%s.%s = %s" % (varname, p, cpstate_globals.variable_to_name_map[proxyvalue]) res.append(f) elif hasattr(pxy.GetProperty(p), 'GetElement'): value = pxy.GetProperty(p).GetElement(0) if isinstance(value, numbers.Number): f = "%s.%s = %s" % (varname, p, value) elif isinstance(value, str): f = "%s.%s = '%s'" % (varname, p, value) else: f = "%s.%s = %s" % (varname, p, str(value)) res.append(f) if self.__make_temporal_script: f = "STP.RegisterWriter(%s, '%s', tp_writers)" % ( varname, filename) else: f = "coprocessor.RegisterWriter(%s, filename='%s', freq=%s, paddingamount=%s)" % ( varname, filename, write_frequency, padding_amount) res.append(f) res.append("") if len(res) == 2: return [] # don't clutter output if there are no writers return res
def DumpPipeline(export_rendering, simulation_input_map, screenshot_info): """ Method that will dump the current pipeline and return it as a string trace - export_rendering : boolean telling if we want to export rendering - simulation_input_map: string->string map with key being the proxyname while value being the simulation input name. - screenshot_info : map with information about screenshots key -> view proxy name value -> [filename, writefreq, fitToScreen, magnification, width, height] """ # reset the global variables. reset_cpstate_globals() cpstate_globals.export_rendering = export_rendering cpstate_globals.simulation_input_map = simulation_input_map cpstate_globals.screenshot_info = screenshot_info # Initialize the write frequency map for key in cpstate_globals.simulation_input_map.values(): cpstate_globals.write_frequencies[key] = [] # Start trace filter = cpstate_filter_proxies_to_serialize() smtrace.RealProxyAccessor.register_create_callback(cp_hook) state = smstate.get_state(filter=filter, raw=True) smtrace.RealProxyAccessor.unregister_create_callback(cp_hook) # iterate over all views that were saved in state and update write requencies if export_rendering: pxm = servermanager.ProxyManager() for key, vtuple in screenshot_info.iteritems(): view = pxm.GetProxy("views", key) if not view: continue image_write_frequency = int(vtuple[1]) # Locate which simulation input this write is connected to, if any. If so, # we update the write_frequencies datastructure accordingly. sim_inputs = locate_simulation_inputs_for_view(view) for sim_input_name in sim_inputs: if not image_write_frequency in cpstate_globals.write_frequencies: cpstate_globals.write_frequencies[sim_input_name].append( image_write_frequency) cpstate_globals.write_frequencies[sim_input_name].sort() # Create global fields values pipelineClassDef = "\n" pipelineClassDef += "# ----------------------- CoProcessor definition -----------------------\n\n" # Create the resulting string that will contains the pipeline definition pipelineClassDef += "def CreateCoProcessor():\n" pipelineClassDef += " def _CreatePipeline(coprocessor, datadescription):\n" pipelineClassDef += " class Pipeline:\n" # add the traced code. for original_line in state: for line in original_line.split("\n"): if line.find("import *") != -1 or \ line.find("#### import the simple") != -1: continue if line: pipelineClassDef += " " + line + "\n" else: pipelineClassDef += "\n" pipelineClassDef += " return Pipeline()\n" pipelineClassDef += "\n" pipelineClassDef += " class CoProcessor(coprocessing.CoProcessor):\n" pipelineClassDef += " def CreatePipeline(self, datadescription):\n" pipelineClassDef += " self.Pipeline = _CreatePipeline(self, datadescription)\n" pipelineClassDef += "\n" pipelineClassDef += " coprocessor = CoProcessor()\n" pipelineClassDef += " # these are the frequencies at which the coprocessor updates.\n" pipelineClassDef += " freqs = " + str( cpstate_globals.write_frequencies) + "\n" pipelineClassDef += " coprocessor.SetUpdateFrequencies(freqs)\n" pipelineClassDef += " return coprocessor\n" return pipelineClassDef
def getProxyAsPipelineNode(id, view=None): """ Create a representation for that proxy so it can be used within a pipeline browser. """ pxm = servermanager.ProxyManager() proxy = idToProxy(id) rep = simple.GetDisplayProperties(proxy) nbActiveComp = 1 pointData = [] searchArray = ("POINTS" == rep.ColorArrayName[0]) and (len( rep.ColorArrayName[1]) > 0) if servermanager.ActiveConnection.GetNumberOfDataPartitions() > 1: info = { "lutId": "vtkProcessId_1", "name": "vtkProcessId", "size": 1, "range": [ 0, servermanager.ActiveConnection.GetNumberOfDataPartitions() - 1, ], } pointData.append(info) # FIXME seb # dataInfo = rep.GetRepresentedDataInformation() # pointData = dataInfo.GetPointDataInformation() # cellData = dataInfo.GetCellDataInformation() # for idx in pointData.GetNumberOfArrays(): # info = pointData.GetArrayInformation(idx) # nbComponents = info.GetNumberOfComponents() # if searchArray and array.Name == rep.ColorArrayName: # nbActiveComp = nbComponents # rangeOn = (nbComponents == 3 if -1 else 0) # info = { \ # 'lutId': info.GetName() + '_' + str(nbComponents), \ # 'name': info.GetName, \ # 'size': nbComponents, \ # 'range': info.GetRange(rangeOn) } # pointData.append(info) for array in proxy.GetPointDataInformation(): nbComponents = array.GetNumberOfComponents() if searchArray and array.Name == rep.ColorArrayName[1]: nbActiveComp = nbComponents rangeOn = nbComponents == 1 if 0 else -1 info = { "lutId": array.Name + "_" + str(nbComponents), "name": array.Name, "size": nbComponents, "range": array.GetRange(rangeOn), } pointData.append(info) cellData = [] searchArray = ("CELLS" == rep.ColorArrayName[0]) and (len( rep.ColorArrayName[1]) > 0) for array in proxy.GetCellDataInformation(): nbComponents = array.GetNumberOfComponents() if searchArray and array.Name == rep.ColorArrayName[1]: nbActiveComp = nbComponents rangeOn = nbComponents == 1 if 0 else -1 info = { "lutId": array.Name + "_" + str(nbComponents), "name": array.Name, "size": nbComponents, "range": array.GetRange(rangeOn), } cellData.append(info) state = getProxyAsState(proxy.GetGlobalID()) showScalarbar = (1 if view and vtkSMPVRepresentationProxy.IsScalarBarVisible( rep.SMProxy, view.SMProxy) else 0) repName = "Hide" if rep.Visibility == 1: repName = rep.Representation return { "proxy_id": proxy.GetGlobalID(), "name": pxm.GetProxyName("sources", proxy), "bounds": proxy.GetDataInformation().GetBounds(), "pointData": pointData, "cellData": cellData, "activeData": str(rep.ColorArrayName[0]) + ":" + str(rep.ColorArrayName[1]), "diffuseColor": str(rep.DiffuseColor), "showScalarBar": showScalarbar, "representation": repName, "state": state, "children": [], }