def createTemporalTransformsAxes(source="default", renderView="default"): """ Display orientation part of a TemporalTransforms Allow visualizing the orientations part of the TemporalTransforms by displaying the x, y and z unitary vector of the sensor reference frame. The source must provide transforms, for example it could be: a TemporalTransformReader, or the trajectory output of Lidar SLAM ParaView pipeline elements are created. These elements can be deleted using function deleteAllTemporalTransformsAxes(). """ # GetActiveSource() and GetRenderView() do not work if used as default # parameter value, so they are called here if needs be: if source == "default": source = smp.GetActiveSource() if renderView == "default": renderView = smp.GetRenderView() smp.SetActiveSource(source) # maybe useless calculatorX = setupCalculator(source, "RX", [1.0, 0.0, 0.0], fx, renderView) calculatorY = setupCalculator(source, "RY", [0.0, 1.0, 0.0], fy, renderView) calculatorZ = setupCalculator(source, "RZ", [0.0, 0.0, 1.0], fz, renderView) smp.Render()
def ClearSelection(Source=None): """ Clears the selection on the source passed in to the source parameter or the active source if no source is provided. """ if Source == None: Source = ps.GetActiveSource() Source.SMProxy.SetSelectionInput(0, None, 0)
def GetSelectionSource(proxy=None): """If a selection has exists for the proxy (if proxy is not specified then the active source is used), returns that selection source""" if not proxy: proxy = smp.GetActiveSource() if not proxy: raise RuntimeError, \ "GetSelectionSource() needs a proxy argument of that an active source is set." return proxy.GetSelectionInput(proxy.Port)
def fitPlane(): src = smp.GetActiveSource() selection = GetSelectionSource(src) if not selection: return extracter = smp.ExtractSelection() extracter.Selection = selection extracter.Input = src smp.Show(extracter) try: pd = extracter.GetClientSideObject().GetOutput() origin = range(3) normal = range(3) mind, maxd, stddev = vtk.mutable(0), vtk.mutable(0), vtk.mutable(0) channelMean = range(32) channelStdDev = range(32) channelNpts = range(32) vvmod.vtkPlaneFitter.PlaneFit(pd, origin, normal, mind, maxd, stddev, channelMean, channelStdDev, channelNpts) rows = [['overall', origin, normal, 0.0, stddev, stddev, pd.GetNumberOfPoints()]] rows = rows + [['%d' % i, origin, normal, channelMean[i], channelStdDev[i], math.sqrt(channelMean[i]**2 + channelStdDev[i]**2), channelNpts[i]] for i in range(32)] def rowconverter(x): try: return '\t'.join(['%.4f' % d for d in x]) except TypeError: try: x = x.get() except AttributeError: pass if type(x) == float: return '%.4f' % x elif type(x) in (int, long): return '%d' % x else: return x print '\t'.join(['channel','originx','originy','originz','normalx','normaly','normalz', 'mean','stddev','RMS','npts']) for r in rows: if r[-1] == 0: r[-4:-1] = ['nan', 'nan', 'nan'] print '\t'.join([rowconverter(x) for x in r]) finally: smp.Delete(extracter) smp.SetActiveSource(src)
def animate(fn): casefoam = pv.OpenFOAMReader(FileName=fn) pv.Show(casefoam) dp = pv.GetDisplayProperties(casefoam) dp.SetPropertyWithName('ColorArrayName', ['POINTS', 'U']) view = pv.GetActiveView() reader = pv.GetActiveSource() tsteps = reader.TimestepValues annTime = pv.AnnotateTimeFilter(reader) pv.Show(annTime) pv.Render() while True: try: for t in tsteps: view.ViewTime = t pv.Render() except KeyboardInterrupt: sys.exit(0)
def main(): import paraview.simple as para version_major = para.servermanager.vtkSMProxyManager.GetVersionMajor() source = para.GetActiveSource() renderView1 = para.GetRenderView() atoms = para.Glyph( Input=source, GlyphType='Sphere', Scalars='radii', ScaleMode='scalar', ) para.RenameSource('Atoms', atoms) atomsDisplay = para.Show(atoms, renderView1) if version_major <= 4: atoms.SetScaleFactor = 0.8 atomicnumbers_PVLookupTable = para.GetLookupTableForArray( 'atomic numbers', 1) atomsDisplay.ColorArrayName = ('POINT_DATA', 'atomic numbers') atomsDisplay.LookupTable = atomicnumbers_PVLookupTable else: atoms.ScaleFactor = 0.8 para.ColorBy(atomsDisplay, 'atomic numbers') atomsDisplay.SetScalarBarVisibility(renderView1, True) para.Render()
def get_state(options=None, source_set=[], filter=None, raw=False): """Returns the state string""" if options: options = sm._getPyProxy(options) propertiesToTraceOnCreate = options.PropertiesToTraceOnCreate skipHiddenRepresentations = options.SkipHiddenDisplayProperties skipRenderingComponents = options.SkipRenderingComponents else: propertiesToTraceOnCreate = RECORD_MODIFIED_PROPERTIES skipHiddenRepresentations = True skipRenderingComponents = False # essential to ensure any obsolete accessors don't linger - can cause havoc # when saving state following a Python trace session # (paraview/paraview#18994) import gc gc.collect() if sm.vtkSMTrace.GetActiveTracer(): raise RuntimeError ("Cannot generate Python state when tracing is active.") if filter is None: filter = visible_representations() if skipHiddenRepresentations else supported_proxies() # build a set of proxies of interest if source_set: start_set = source_set else: # if nothing is specified, we save all views and sources. start_set = [x for x in simple.GetSources().values()] + simple.GetViews() start_set = [x for x in start_set if filter(x)] # now, locate dependencies for the start_set, pruning irrelevant branches consumers = set(start_set) for proxy in start_set: get_consumers(proxy, filter, consumers) producers = set() for proxy in consumers: get_producers(proxy, filter, producers) # proxies_of_interest is set of all proxies that we should trace. proxies_of_interest = producers.union(consumers) #print ("proxies_of_interest", proxies_of_interest) trace_config = smtrace.start_trace(preamble="") # this ensures that lookup tables/scalar bars etc. are fully traced. trace_config.SetFullyTraceSupplementalProxies(True) trace_config.SetSkipRenderingComponents(skipRenderingComponents) trace = smtrace.TraceOutput() trace.append("# state file generated using %s" % simple.GetParaViewSourceVersion()) trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) #-------------------------------------------------------------------------- # We trace the views and layouts, if any. if skipRenderingComponents: views = [] else: views = [x for x in proxies_of_interest if smtrace.Trace.get_registered_name(x, "views")] if views: # sort views by their names, so the state has some structure to it. views = sorted(views, key=lambda x:\ smtrace.Trace.get_registered_name(x, "views")) trace.append_separated([\ "# ----------------------------------------------------------------", "# setup views used in the visualization", "# ----------------------------------------------------------------"]) for view in views: # FIXME: save view camera positions and size. traceitem = smtrace.RegisterViewProxy(view) traceitem.finalize() del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) trace.append_separated(["SetActiveView(None)"]) # from views, build the list of layouts of interest. layouts = set() for aview in views: l = simple.GetLayout(aview) if l: layouts.add(simple.GetLayout(aview)) # trace create of layouts if layouts: layouts = sorted(layouts, key=lambda x:\ smtrace.Trace.get_registered_name(x, "layouts")) trace.append_separated([\ "# ----------------------------------------------------------------", "# setup view layouts", "# ----------------------------------------------------------------"]) for layout in layouts: traceitem = smtrace.RegisterLayoutProxy(layout) traceitem.finalize(filter=lambda x: x in views) del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) if views: # restore the active view after the layouts have been created. trace.append_separated([\ "# ----------------------------------------------------------------", "# restore active view", "SetActiveView(%s)" % smtrace.Trace.get_accessor(simple.GetActiveView()), "# ----------------------------------------------------------------"]) #-------------------------------------------------------------------------- # Next, trace data processing pipelines. sorted_proxies_of_interest = __toposort(proxies_of_interest) sorted_sources = [x for x in sorted_proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "sources")] if sorted_sources: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup the data processing pipelines", "# ----------------------------------------------------------------"]) for source in sorted_sources: traceitem = smtrace.RegisterPipelineProxy(source) traceitem.finalize() del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) #-------------------------------------------------------------------------- # Can't decide if the representations should be saved with the pipeline # objects or afterwards, opting for afterwards for now since the topological # sort doesn't guarantee that the representations will follow their sources # anyways. sorted_representations = [x for x in sorted_proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "representations")] scalarbar_representations = [x for x in sorted_proxies_of_interest\ if smtrace.Trace.get_registered_name(x, "scalar_bars")] # print ("sorted_representations", sorted_representations) # print ("scalarbar_representations", scalarbar_representations) if not skipRenderingComponents and (sorted_representations or scalarbar_representations): for view in views: view_representations = [x for x in view.Representations if x in sorted_representations] view_scalarbars = [x for x in view.Representations if x in scalarbar_representations] if view_representations or view_scalarbars: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup the visualization in view '%s'" % smtrace.Trace.get_accessor(view), "# ----------------------------------------------------------------"]) for rep in view_representations: try: producer = rep.Input port = rep.Input.Port traceitem = smtrace.Show(producer, port, view, rep, comment="show data from %s" % smtrace.Trace.get_accessor(producer)) traceitem.finalize() del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) if rep.UseSeparateColorMap: trace.append_separated([\ "# set separate color map", "%s.UseSeparateColorMap = True" % (\ smtrace.Trace.get_accessor(rep))]) except AttributeError: pass # save the scalar bar properties themselves. if view_scalarbars: trace.append_separated("# setup the color legend parameters for each legend in this view") for rep in view_scalarbars: smtrace.Trace.get_accessor(rep) trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) trace.append_separated([\ "# set color bar visibility", "%s.Visibility = %s" % (\ smtrace.Trace.get_accessor(rep), rep.Visibility)]) for rep in view_representations: try: producer = rep.Input port = rep.Input.Port if rep.IsScalarBarVisible(view): # FIXME: this will save this multiple times, right now, # if two representations use the same LUT. trace.append_separated([\ "# show color legend", "%s.SetScalarBarVisibility(%s, True)" % (\ smtrace.Trace.get_accessor(rep), smtrace.Trace.get_accessor(view))]) if not rep.Visibility: traceitem = smtrace.Hide(producer, port, view) traceitem.finalize() del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) except AttributeError: pass #-------------------------------------------------------------------------- # Now, trace the transfer functions (color maps and opacity maps) used. ctfs = set([x for x in proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "lookup_tables")]) if not skipRenderingComponents and ctfs: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup color maps and opacity mapes used in the visualization", "# note: the Get..() functions create a new object, if needed", "# ----------------------------------------------------------------"]) for ctf in ctfs: smtrace.Trace.get_accessor(ctf) if ctf.ScalarOpacityFunction in proxies_of_interest: smtrace.Trace.get_accessor(ctf.ScalarOpacityFunction) trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) # Trace extract generators. exgens = set([x for x in proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "extract_generators")]) if exgens: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup extract generators", "# ----------------------------------------------------------------"]) for exgen in exgens: # FIXME: this currently doesn't handle multiple output ports # correctly. traceitem = smtrace.CreateExtractGenerator(\ xmlname=exgen.Writer.GetXMLName(), producer=exgen.Producer, generator=exgen, registrationName=smtrace.Trace.get_registered_name(exgen, "extract_generators")) traceitem.finalize() del traceitem trace.append_separated(smtrace.get_current_trace_output_and_reset(raw=True)) # restore the active source since the order in which the pipeline is created # in the state file can end up changing the active source to be different # than what it was when the state is being saved. trace.append_separated([\ "# ----------------------------------------------------------------", "# restore active source", "SetActiveSource(%s)" % smtrace.Trace.get_accessor(simple.GetActiveSource()), "# ----------------------------------------------------------------"]) if options: # add coda about extracts generation. trace.append_separated(["", "if __name__ == '__main__':", " # generate extracts", " SaveExtracts(ExtractsOutputDirectory='%s')" % options.ExtractsOutputDirectory]) del trace_config smtrace.stop_trace() #print (trace) return str(trace) if not raw else trace.raw_data()
def get_state( propertiesToTraceOnCreate=1, # sm.vtkSMTrace.RECORD_MODIFIED_PROPERTIES, skipHiddenRepresentations=True, source_set=[], filter=None, raw=False): """Returns the state string""" if sm.vtkSMTrace.GetActiveTracer(): raise RuntimeError( "Cannot generate Python state when tracing is active.") if filter is None: filter = visible_representations( ) if skipHiddenRepresentations else supported_proxies() # build a set of proxies of interest if source_set: start_set = source_set else: # if nothing is specified, we save all views and sources. start_set = [x for x in simple.GetSources().values() ] + simple.GetViews() start_set = [x for x in start_set if filter(x)] # now, locate dependencies for the start_set, pruning irrelevant branches consumers = set(start_set) for proxy in start_set: get_consumers(proxy, filter, consumers) producers = set() for proxy in consumers: get_producers(proxy, filter, producers) # proxies_of_interest is set of all proxies that we should trace. proxies_of_interest = producers.union(consumers) #print ("proxies_of_interest", proxies_of_interest) trace_config = smtrace.start_trace() # this ensures that lookup tables/scalar bars etc. are fully traced. trace_config.SetFullyTraceSupplementalProxies(True) trace = smtrace.TraceOutput() trace.append("# state file generated using %s" % simple.GetParaViewSourceVersion()) #-------------------------------------------------------------------------- # First, we trace the views and layouts, if any. # TODO: add support for layouts. views = [ x for x in proxies_of_interest if smtrace.Trace.get_registered_name(x, "views") ] if views: # sort views by their names, so the state has some structure to it. views = sorted(views, key=lambda x:\ smtrace.Trace.get_registered_name(x, "views")) trace.append_separated([\ "# ----------------------------------------------------------------", "# setup views used in the visualization", "# ----------------------------------------------------------------"]) for view in views: # FIXME: save view camera positions and size. traceitem = smtrace.RegisterViewProxy(view) traceitem.finalize() del traceitem trace.append_separated( smtrace.get_current_trace_output_and_reset(raw=True)) trace.append_separated([\ "# ----------------------------------------------------------------", "# restore active view", "SetActiveView(%s)" % smtrace.Trace.get_accessor(simple.GetActiveView()), "# ----------------------------------------------------------------"]) #-------------------------------------------------------------------------- # Next, trace data processing pipelines. sorted_proxies_of_interest = __toposort(proxies_of_interest) sorted_sources = [x for x in sorted_proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "sources")] if sorted_sources: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup the data processing pipelines", "# ----------------------------------------------------------------"]) for source in sorted_sources: traceitem = smtrace.RegisterPipelineProxy(source) traceitem.finalize() del traceitem trace.append_separated( smtrace.get_current_trace_output_and_reset(raw=True)) #-------------------------------------------------------------------------- # Can't decide if the representations should be saved with the pipeline # objects or afterwords, opting for afterwords for now since the topological # sort doesn't guarantee that the representations will follow their sources # anyways. sorted_representations = [x for x in sorted_proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "representations")] scalarbar_representations = [x for x in sorted_proxies_of_interest\ if smtrace.Trace.get_registered_name(x, "scalar_bars")] # print ("sorted_representations", sorted_representations) # print ("scalarbar_representations", scalarbar_representations) if sorted_representations or scalarbar_representations: for view in views: view_representations = [ x for x in view.Representations if x in sorted_representations ] view_scalarbars = [ x for x in view.Representations if x in scalarbar_representations ] if view_representations or view_scalarbars: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup the visualization in view '%s'" % smtrace.Trace.get_accessor(view), "# ----------------------------------------------------------------"]) for rep in view_representations: try: producer = rep.Input port = rep.Input.Port traceitem = smtrace.Show( producer, port, view, rep, comment="show data from %s" % smtrace.Trace.get_accessor(producer)) traceitem.finalize() del traceitem trace.append_separated( smtrace.get_current_trace_output_and_reset(raw=True)) if rep.UseSeparateColorMap: trace.append_separated([\ "# set separate color map", "%s.UseSeparateColorMap = True" % (\ smtrace.Trace.get_accessor(rep))]) except AttributeError: pass # save the scalar bar properties themselves. if view_scalarbars: trace.append_separated( "# setup the color legend parameters for each legend in this view" ) for rep in view_scalarbars: smtrace.Trace.get_accessor(rep) trace.append_separated( smtrace.get_current_trace_output_and_reset(raw=True)) trace.append_separated([\ "# set color bar visibility", "%s.Visibility = %s" % (\ smtrace.Trace.get_accessor(rep), rep.Visibility)]) for rep in view_representations: try: producer = rep.Input port = rep.Input.Port if rep.IsScalarBarVisible(view): # FIXME: this will save this multiple times, right now, # if two representations use the same LUT. trace.append_separated([\ "# show color legend", "%s.SetScalarBarVisibility(%s, True)" % (\ smtrace.Trace.get_accessor(rep), smtrace.Trace.get_accessor(view))]) if not rep.Visibility: traceitem = smtrace.Hide(producer, port, view) traceitem.finalize() del traceitem trace.append_separated( smtrace.get_current_trace_output_and_reset( raw=True)) except AttributeError: pass #-------------------------------------------------------------------------- # Now, trace the transfer functions (color maps and opacity maps) used. ctfs = set([x for x in proxies_of_interest \ if smtrace.Trace.get_registered_name(x, "lookup_tables")]) if ctfs: trace.append_separated([\ "# ----------------------------------------------------------------", "# setup color maps and opacity mapes used in the visualization", "# note: the Get..() functions create a new object, if needed", "# ----------------------------------------------------------------"]) for ctf in ctfs: smtrace.Trace.get_accessor(ctf) if ctf.ScalarOpacityFunction in proxies_of_interest: smtrace.Trace.get_accessor(ctf.ScalarOpacityFunction) trace.append_separated( smtrace.get_current_trace_output_and_reset(raw=True)) # restore the active source since the order in which the pipeline is created # in the state file can end up changing the active source to be different # than what it was when the state is being saved. trace.append_separated([\ "# ----------------------------------------------------------------", "# finally, restore active source", "SetActiveSource(%s)" % smtrace.Trace.get_accessor(simple.GetActiveSource()), "# ----------------------------------------------------------------"]) del trace_config smtrace.stop_trace() #print (trace) return str(trace) if not raw else trace.raw_data()
def fitPlane(): src = smp.GetActiveSource() if not src: return selection = GetSelectionSource(src) if not selection: return extracter = smp.ExtractSelection() extracter.Selection = selection extracter.Input = src smp.Show(extracter) try: pd = extracter.GetClientSideObject().GetOutput() if pd.IsTypeOf("vtkMultiBlockDataSet"): appendFilter = vtk.vtkAppendFilter() for i in range(pd.GetNumberOfBlocks()): appendFilter.AddInputData(pd.GetBlock(i)) appendFilter.Update() pd = appendFilter.GetOutput() max_laser_id = pd.GetPointData().GetArray("laser_id").GetRange()[1] nchannels = 2**vtk.vtkMath.CeilLog2(int(max_laser_id)) origin = range(3) normal = range(3) mind, maxd, stddev = vtk.mutable(0), vtk.mutable(0), vtk.mutable(0) channelMean = range(nchannels) channelStdDev = range(nchannels) channelNpts = range(nchannels) vvmod.vtkPlaneFitter.PlaneFit(pd, origin, normal, mind, maxd, stddev, channelMean, channelStdDev, channelNpts, nchannels) rows = [[ 'overall', origin, normal, 0.0, stddev, stddev, pd.GetNumberOfPoints() ]] rows = rows + [[ '%d' % i, origin, normal, channelMean[i], channelStdDev[i], math.sqrt(channelMean[i]**2 + channelStdDev[i]**2), channelNpts[i] ] for i in range(nchannels)] def rowconverter(x): try: return '\t'.join(['%.4f' % d for d in x]) except TypeError: try: x = x.get() except AttributeError: pass if type(x) == float: return '%.4f' % x elif type(x) in (int, long): return '%d' % x else: return x print '\t'.join([ 'channel', 'originx', 'originy', 'originz', 'normalx', 'normaly', 'normalz', 'mean', 'stddev', 'RMS', 'npts' ]) for r in rows: if r[-1] == 0: r[-4:-1] = ['nan', 'nan', 'nan'] print '\t'.join([rowconverter(x) for x in r]) finally: smp.Delete(extracter) smp.SetActiveSource(src)
iterator.GoToNextItem() append.Update() output.ShallowCopy(append.GetOutput()) """ class WrongInput(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) # get and check active source source = ps.GetActiveSource() if source is None: raise WrongInput('No source selected.') info = source.GetDataInformation().DataInformation if info is None: raise WrongInput('Source has no information, uncomplete.') composite_class = info.GetCompositeDataClassName() if composite_class is None or not composite_class == 'vtkMultiBlockDataSet': raise WrongInput( 'Source produce wrong type of data. MultiBlockDataSet required.') # make a filter to collect all well lines in order # to workaround a bug that do not porduce selsection labels # on composite datasets merged = ps.ProgrammableFilter() merged.Script = merge_groups_script
def RequestData(): # R.0.2018.080 import sys sys.path.insert(0, "EMC_SRC_PATH") from datetime import datetime import numpy as np from vtk.numpy_interface import dataset_adapter as dsa from vtk.util import numpy_support import IrisEMC_Paraview_Lib as lib import paraview.simple as simple views = simple.GetViews(viewtype="SpreadSheetView") if len(views) > 0: simple.Delete(views[0]) else: view = simple.GetActiveView() layout = simple.GetLayout(view) locationId = layout.SplitViewVertical(view=view ,fraction=0.7) myId = simple.GetActiveSource().Input.GetGlobalIDAsString() proxies = simple.GetSources() proxyList = [] for key in proxies: listElt = {} listElt['name'] = key[0] listElt['id'] = key[1] proxy = proxies[key] parentId = '0' if hasattr(proxy, 'Input'): parentId = proxy.Input.GetGlobalIDAsString() listElt['parent'] = parentId proxyList.append(listElt) pdi = self.GetInput() # VTK PolyData Type np = pdi.GetNumberOfPoints() depthMin = 9999999999999.0 depthMax = -9999999999999.0 latitude = {} longitude = {} pdo = self.GetOutput() # VTK Table Type polyData = vtk.vtkPolyData() dataPoints = vtk.vtkPoints() if len(Label.strip()) <= 0: pid = simple.GetActiveSource().Input.GetGlobalIDAsString() proxies = simple.GetSources() for key in proxies: if key[1] == pid: Label = " ".join(["Coordinates View:",key[0]]) break for i in range(np): point = pdi.GetPoints().GetPoint(i) (lat,lon,depth) = lib.xyz2llz(point[0],point[1],point[2]) dataPoints.InsertNextPoint((lat,lon,depth)) key = "%0.1f"%(depth) if depthMin >= float(key): depthMin = float(key) depthMinKey = key if depthMax <= float(key): depthMax = float(key) depthMaxKey = key if key not in latitude.keys(): latitude[key] =[] longitude[key] = [] latitude[key].append(float("%0.1f"%(lat))) longitude[key].append(float("%0.1f"%(lon))) # store boundary metadata fieldData = polyData.GetFieldData() fieldData.AllocateArrays(3) # number of fields depthData = vtk.vtkStringArray() depthData.SetName('Depth\n(km)') data = vtk.vtkStringArray() data.SetName('Corners (lat,lon)\n(degrees)') depthKeys = [depthMinKey,depthMaxKey] if depthMinKey == depthMaxKey: depthKeys = [depthMinKey] for i in range(len(depthKeys)): depthKey = depthKeys[i] borderLat = [] borderLon = [] oldMin = 999999999.0 oldMax = -99999999.0 lonList = list(set(sorted(longitude[depthKey]))) for j in range(len(lonList)): lon = lonList[j] minVal = 999999999.0 maxVal = -99999999.0 for i in range(len(longitude[depthKey])): if longitude[depthKey][i] == lon: if latitude[depthKey][i] > maxVal: maxVal = latitude[depthKey][i] if latitude[depthKey][i] < minVal: minVal = latitude[depthKey][i] if oldMin != minVal or j==len(lonList)-1: if abs(oldMin) < 9999.0: borderLat.append(oldMin) borderLon.append(lon) borderLat.append(minVal) borderLon.append(lon) oldMin = minVal if oldMax != maxVal or j==len(lonList)-1: if abs(oldMax) < 9999.0: borderLat.append(oldMax) borderLon.append(lon) borderLat.append(maxVal) borderLon.append(lon) oldMax = maxVal borderList = zip(borderLat, borderLon) borderList.sort() borderList = list(set(borderList)) min1 = borderList[0][0] max1 = borderList[0][0] for i in range(len(borderList)): if borderList[i][0] < min1: min1 = borderList[i][0] if borderList[i][0] > max1: max1 = borderList[i][0] minList = [] maxList = [] for i in range(len(borderList)): if borderList[i][0] == min1: minList.append(borderList[i][1]) if borderList[i][0] == max1: maxList.append(borderList[i][1]) depthData.InsertNextValue(depthKey) data.InsertNextValue("%0.1f, %0.1f"%(min1,min(minList))) if min(minList) != max(minList): depthData.InsertNextValue(" ") data.InsertNextValue("%0.1f, %0.1f"%(min1,max(minList))) depthData.InsertNextValue(" ") data.InsertNextValue("%0.1f, %0.1f"%(max1,max(maxList))) if min(maxList) != max(maxList): depthData.InsertNextValue(" ") data.InsertNextValue("%0.1f, %0.1f"%(max1,min(maxList))) fieldData.AddArray(data) fieldData.AddArray(depthData) if len(Label.strip()) > 0: simple.RenameSource(Label) pdo.SetFieldData(fieldData)
def RequestData(): # R.1.2018.354 import sys sys.path.insert(0, "EMC_SRC_PATH") from operator import itemgetter from datetime import datetime import numpy as np from vtk.numpy_interface import dataset_adapter as dsa from vtk.util import numpy_support import IrisEMC_Paraview_Lib as lib import paraview.simple as simple views = simple.GetViews(viewtype="SpreadSheetView") if len(views) > 0: # set active view view = simple.SetActiveView(views[0]) else: view = simple.GetActiveView() layout = simple.GetLayout(view) location_id = layout.SplitViewVertical(view=view, fraction=0.7) myId = simple.GetActiveSource().Input.GetGlobalIDAsString() proxies = simple.GetSources() proxyList = [] for key in proxies: list_elt = dict() list_elt['name'] = key[0] list_elt['id'] = key[1] proxy = proxies[key] parent_id = '0' if hasattr(proxy, 'Input'): parent_id = proxy.Input.GetGlobalIDAsString() list_elt['parent'] = parent_id proxyList.append(list_elt) pdi = self.GetInput() # VTK PolyData Type try: np = pdi.GetNumberOfPoints() except Exception: raise Exception('Invalid input!') na = pdi.GetPointData().GetNumberOfArrays() val_arrays = [] for i in range(na): val_arrays.append(pdi.GetPointData().GetArray(i)) latitude = {} longitude = {} value = {} depth = {} pdo = self.GetOutput() # VTK Table Type poly_data = vtk.vtkPolyData() data_points = vtk.vtkPoints() if len(Label.strip()) <= 0: pid = simple.GetActiveSource().Input.GetGlobalIDAsString() proxies = simple.GetSources() for key in proxies: if key[1] == pid: Label = " ".join(["Coordinates:", key[0]]) break for i in range(np): point = pdi.GetPoints().GetPoint(i) (lat, lon, this_depth) = lib.xyz2llz(point[0], point[1], point[2]) data_points.InsertNextPoint((lat, lon, this_depth)) key = "%0.2f" % this_depth if key not in list(latitude.keys()): latitude[key] = [] longitude[key] = [] value[key] = [] # need to control precision to have a reasonable sort order # note that these coordinates are recomputed if key not in list(depth.keys()): depth[key] = float('%0.4f' % this_depth) latitude[key].append(float('%0.4f' % lat)) longitude[key].append(float('%0.4f' % lon)) value_array = [] for j in range(na): value_array.append(float(val_arrays[j].GetTuple1(i))) value[key].append(value_array) # store boundary metadata field_data = poly_data.GetFieldData() field_data.AllocateArrays(5) # number of fields depth_data = vtk.vtkFloatArray() depth_data.SetName('depth') lat_data = vtk.vtkFloatArray() lat_data.SetName('latitude') lon_data = vtk.vtkFloatArray() lon_data.SetName('longitude') val_data = [] for j in range(na): val_data.append(vtk.vtkFloatArray()) val_data[j].SetName('value(%s)' % pdi.GetPointData().GetArray(j).GetName()) depth_keys = list(latitude.keys()) for i in range(len(depth_keys)): depth_key = depth_keys[i] lon_list = longitude[depth_key] lat_list = latitude[depth_key] val_list = value[depth_key] point_list = list(zip(lat_list, lon_list, val_list)) point_list.sort(key=itemgetter(0, 1)) for index, data in enumerate(point_list): depth_data.InsertNextValue(float(depth[depth_key])) lat_data.InsertNextValue(float(data[0])) lon_data.InsertNextValue(float(data[1])) for k in range(na): point_data = data[2] val_data[k].InsertNextValue(point_data[k]) field_data.AddArray(lat_data) field_data.AddArray(lon_data) field_data.AddArray(depth_data) for j in range(na): field_data.AddArray(val_data[j]) if len(Label.strip()) > 0: simple.RenameSource(Label) pdo.SetFieldData(field_data)