Ejemplo n.º 1
0
 def RequestData(self, request, inInfo, outInfo):
     # Handle input arrays
     pdi = self.GetInputData(inInfo, 0, 0)
     wpdi = dsa.WrapDataObject(pdi)
     dx = _helpers.get_numpy_array(wpdi, self.__dx_id[0], self.__dx_id[1])
     dy = _helpers.get_numpy_array(wpdi, self.__dy_id[0], self.__dy_id[1])
     dz = _helpers.get_numpy_array(wpdi, self.__dz_id[0], self.__dz_id[1])
     VoxelizePoints.set_deltas(self, dx, dy, dz)
     # call parent and make sure EstimateGrid is set to False
     return VoxelizePoints.RequestData(self, request, inInfo, outInfo)
Ejemplo n.º 2
0
    def boundary_cell_data(self, boundary, sort=None):
        """Return cell-centre coordinates and data from cells adjacent
        to a specific boundary.

        Parameters
        ----------
        boundary : str
            The name of the boundary.
        sort : {None, 'x', 'y'}, optional
            Whether to sort the data along a coordinate. Use 'x' and
            'y' to sort along x and y, respectively. Default is no
            sorting.

        Returns
        -------
            Two ndarrays

        """
        selection = self.extract_boundary_cells(boundary)
        cCenters = vtk.vtkCellCenters()
        cCenters.SetInputData(selection.GetOutput())
        cCenters.Update()

        points = np.array(dsa.WrapDataObject(cCenters.GetOutput()).Points)
        dataVTK = dsa.WrapDataObject(selection.GetOutput()).CellData

        data = {}
        for key in dataVTK.keys():
            data[key] = np.array(dataVTK[key])

        if sort is None:
            return points[:, [0, 1]], data
        elif sort == "x":
            ind = np.argsort(points[:, 0])
        elif sort == "y":
            ind = np.argsort(points[:, 1])

        points = points[ind]

        for key in data:
            data[key] = data[key][ind]

        return points[:, [0, 1]], data
Ejemplo n.º 3
0
def test_bifurcation_section_group_ids_correct(aorta_centerline_branches,
                                               aorta_surface_branches,
                                               bifurcation_sections,
                                               expectedValue):
    wrapped_bifur_section = dsa.WrapDataObject(
        bifurcation_sections(aorta_centerline_branches,
                             aorta_surface_branches))
    assert np.allclose(
        wrapped_bifur_section.CellData.GetArray('BifurcationSectionGroupIds'),
        expectedValue) == True
Ejemplo n.º 4
0
    def RequestData(self, request, inInfoVec, outInfoVec):
        pdi = self.GetInputData(inInfoVec, 0, 0)
        pdo = self.GetOutputData(outInfoVec, 0)
        # Find cell centers
        filt = vtk.vtkCellCenters()
        filt.SetInputDataObject(pdi)
        filt.Update()

        centers = dsa.WrapDataObject(filt.GetOutput()).Points
        # Get CellData
        wpdi = dsa.WrapDataObject(pdi)
        celldata = wpdi.CellData
        keys = celldata.keys()

        # Make poly data of Cell centers:
        pdo.DeepCopy(interface.pointsToPolyData(centers))
        for i, name in enumerate(keys):
            pdo.GetPointData().AddArray(pdi.GetCellData().GetArray(name))
        return 1
Ejemplo n.º 5
0
 def _gen_and_check(self, op, check, flip=False):
     # Perform filter
     f = NormalizeArray()
     f.SetNormalization(op)
     f.SetNewArrayName('test')
     # Now test the result
     output = f.Apply(self.t0, self.title)
     wout = dsa.WrapDataObject(output)
     arr = wout.RowData['test']
     self.assertTrue(np.allclose(arr, check, rtol=RTOL))
Ejemplo n.º 6
0
 def check_data_fidelity(self, ido, checkme, points=False):
     """`TableToTimeGrid`: data fidelity"""
     wido = dsa.WrapDataObject(ido)
     for i in range(len(self.titles)):
         if points:
             arr = wido.PointData[self.titles[i]]
         else:
             arr = wido.CellData[self.titles[i]]
         # print(arr, checkme[i])
         self.assertTrue(np.allclose(arr, checkme[i], rtol=RTOL))
Ejemplo n.º 7
0
    def __init__(self, filename=None, vector=None, name="vtkData"):
        """
        Initializes a GHOSTpy data object for a vtk data file
        :param filename: Name of the vtk file to read
        :param vector: Name of the vector to be read within the vtk data file
        :param name: common name for the reader. Default is "vtkData"  used to display the trace mode
        """
        assert isinstance(name, tp.StringType), "Must specify a string Name for the data Mode"
        self.name = name
        assert isinstance(filename, tp.StringType), "Filename must be a valid string"
        self.file = filename
        self.reader = self.vtk_xml_reader
        assert isinstance(vector, tp.StringType), "Vector Name must be a string"

        # Let us not save all of the intermediate work, as it may overwhelm our resources
        odata = self.reader.GetOutput()
        in_data = dsa.WrapDataObject(odata)
        points = in_data.Points
        bvalues = in_data.PointData.GetArray(vector)
        self.extents = self.reader.GetUpdateExtent()
        self.dims = np.array([0,0,0,0])
        self.dims[0] = self.extents[1]+1
        self.dims[1] = self.extents[3]+1
        self.dims[2] = self.extents[5]+1
        self.dims[3] = 3

        # print("Dims: {}".format(self.dims))

        self.gridX, self.gridY, self.gridZ = self.build_grid2(points, self.dims[:-1])
        # self.gridX = cv.cm_to_re(self.gridX)
        # self.gridY = cv.cm_to_re(self.gridY)
        # self.gridZ = cv.cm_to_re(self.gridZ)

        self.dataX, self.dataY, self.dataZ = self.build_grid2(bvalues, self.dims[:-1])

        blade_trees = []
        FLookup = []
        midY = int(self.dims[1]/2)  # Prevents calculating along the X axis
        midZ = int(self.dims[2]/2)  # Prevents calculating along the X axis
        for i in range(self.dims[0]):
            fanX = self.gridX[i, :, :]
            fanY = self.gridY[i, :, :]
            fanZ = self.gridZ[i, :, :]

            self.fanP = zip(fanX.ravel(), fanY.ravel(), fanZ.ravel())
            blade_trees.append(spatial.cKDTree(self.fanP, leafsize=1e6))

            posX = self.gridX[i, midY, midZ]
            posY = self.gridY[i, midY, midZ]
            posZ = self.gridZ[i, midY, midZ]

            FLookup.append(algx.__xyz_to_fan_angle__(xyz=[posX, posY, posZ]))

        self.FanLookup = np.array(FLookup)
        self.blades = np.array(blade_trees)
def get_unique_integer_list_from_vtu_array(mesh_vtu, array_type, array_name):
    list_unique = []
    mesh_vtu_wrapped = dsa.WrapDataObject(mesh_vtu)
    if array_type == "cell_array":
        data = mesh_vtu_wrapped.CellData
    elif array_type == "point_array":
        data = mesh_vtu_wrapped.PointData
    if array_name in data.keys():
        array = data[array_name]
        list_unique = list(set(array))
    return list_unique
Ejemplo n.º 9
0
def cutPolySurface(dataSet, point, normal):
	''' Cut a surface with a plane, and return an ordered list
	of points around the circumference of the resulting curve. The cut
	must result in a closed loop, and if the cut produces multiple sub-curves
	the closest one is returned.

	Args:
		:dataSet: (vtkPolyData): surface dataset
		:point: origin of the cutplane
		:normal: normal of the cutplane

	Returns:	
		:np.array: List of positions around the cicumference of the cut

	Raises:
		RuntimeError: If the cut results in a non-closed loop being formed
	'''

	# Generate surface cutcurve
	cutData = cutDataSet(dataSet, point, normal)
	
	edges = []
	cutLines = cutData.GetLines()
	cutLines.InitTraversal()
	idList = vtk.vtkIdList()
	while cutLines.GetNextCell(idList) == 1:
		edges.append((idList.GetId(0), idList.GetId(1)))	

	# Gather all points by traversing the edge graph starting
	# from the point closest to the centerline point
	locator = vtk.vtkPointLocator()
	locator.SetDataSet(cutData)
	locator.BuildLocator()
	startPtId = locator.FindClosestPoint(point)

	pointIds = [startPtId]
	try:
		while True:
			# Find the edge that starts at the latest point 
			pred = (v[1] for v in edges if v[0] == pointIds[-1])
			currentPtId = next(pred)

			# Check if we've returned to the start point
			if currentPtId == startPtId:
				break

			pointIds.append(currentPtId)
		else:	# if no break occured
			raise RuntimeError('The cut curve does not form a closed loop')
	except:
		# We reached the end of the edge graph without getting back to the beginning
		raise RuntimeError('The cut curve does not form a closed loop')
	cutCurve = dsa.WrapDataObject(cutData)
	return cutCurve.Points[pointIds]
Ejemplo n.º 10
0
def get_aggregate_data(poly,a_cell_id,l_aggregate_columns,func="sum"):
	wdo = dsa.WrapDataObject(poly)
	l_output = []
	
	if func=="sum":
		for ix, column in enumerate(l_aggregate_columns):
			l_output.append( np.sum(wdo.CellData[l_aggregate_columns[ix]][a_cell_id].__array__()) )
	else:
		print('func not supported')

	return l_output
Ejemplo n.º 11
0
def write_file(data, xfreq):
    wdata = dsa.WrapDataObject(data)
    array = wdata.PointData['RTData']
    # Note that we flip the dimensions here because
    # VTK's order is Fortran whereas h5py writes in
    # C order. We don't want to do deep copies so we write
    # with dimensions flipped and pretend the array is
    # C order.
    array = array.reshape(wdata.GetDimensions()[::-1])
    f = h5py.File('data%d.h5' % xfreq, 'w')
    f.create_dataset("RTData", data=array)
Ejemplo n.º 12
0
def arrays(dataobject):
    """
    Iterate over (name, array) for the arrays in this datset.

    :param dataobject The incoming dataset
    :type: vtkDataObject
    """
    do = dsa.WrapDataObject(dataobject)
    for i in range(0, do.PointData.GetNumberOfArrays()):
        name = do.PointData.GetArrayName(i)
        yield (name, get_array(dataobject, name))
Ejemplo n.º 13
0
def test_cell_data_point_start_and_end_xyz_locations(bifur_profiles, pointidstart, numberofpoints, 
                                                     expectedlocationstart, expectedlocationend, paramid):
    bcx = bifur_profiles.GetCell(paramid)
    bw = dsa.WrapDataObject(bifur_profiles)
    
    pointIdEnd = bcx.GetPointId(numberofpoints - 1)
    pointLocationEnd = bw.Points[pointIdEnd]
    pointLocationStart = bw.Points[pointidstart]

    assert np.allclose(np.array(pointLocationStart), expectedlocationstart) == True
    assert np.allclose(np.array(pointLocationEnd), expectedlocationend) == True
def test_dataset(ds):
    p2c = vtk.vtkPointDataToCellData()
    p2c.SetInputData(ds)
    p2c.Update()

    d1 = dsa.WrapDataObject(p2c.GetOutput())

    vtkm_p2c = vtk.vtkmAverageToCells()
    vtkm_p2c.SetInputData(ds)
    vtkm_p2c.SetInputArrayToProcess(0, 0, 0,
                                    vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS,
                                    "RTData")
    vtkm_p2c.Update()

    d2 = dsa.WrapDataObject(vtkm_p2c.GetOutput())

    rtD1 = d1.PointData['RTData']
    rtD2 = d2.PointData['RTData']

    assert (algs.max(algs.abs(rtD1 - rtD2)) < 10E-4)
Ejemplo n.º 15
0
 def RequestData(self, request, inInfo, outInfo):
     f = h5py.File(self.__FileName, 'r')
     data = f['RTData'][:]
     output = dsa.WrapDataObject(vtk.vtkImageData.GetData(outInfo))
     # Note that we flip the dimensions here because
     # VTK's order is Fortran whereas h5py writes in
     # C order.
     output.SetDimensions(data.shape[::-1])
     output.PointData.append(data.ravel(), 'RTData')
     output.PointData.SetActiveScalars('RTData')
     return 1
Ejemplo n.º 16
0
def set_tilt_angles(dataobject, newarray):
    # replace the tilt angles with the new array
    from vtkmodules.util.vtkConstants import VTK_DOUBLE
    # deep copy avoids having to keep numpy array around, but is more
    # expensive.  I don't expect tilt_angles to be a big array though.
    vtkarray = np_s.numpy_to_vtk(newarray, deep=1, array_type=VTK_DOUBLE)
    vtkarray.Association = dsa.ArrayAssociation.FIELD
    vtkarray.SetName('tilt_angles')
    do = dsa.WrapDataObject(dataobject)
    do.FieldData.RemoveArray('tilt_angles')
    do.FieldData.AddArray(vtkarray)
Ejemplo n.º 17
0
def set_scalars(dataobject, newscalars):
    do = dsa.WrapDataObject(dataobject)
    oldscalars = do.PointData.GetScalars()
    name = oldscalars.GetName()
    del oldscalars

    if not is_numpy_vtk_type(newscalars):
        newscalars = newscalars.astype(np.float32)

    do.PointData.append(newscalars, name)
    do.PointData.SetActiveScalars(name)
Ejemplo n.º 18
0
def table_to_data_frame(table):
    """Converts a vtkTable to a pandas DataFrame"""
    if not isinstance(table, vtk.vtkTable):
        raise PVGeoError('Input is not a vtkTable')
    num = table.GetNumberOfColumns()
    names = [table.GetColumnName(i) for i in range(num)]
    data = dsa.WrapDataObject(table).RowData
    df = pd.DataFrame()
    for i, n in enumerate(names):
        df[n] = np.array(data[n])
    return df
Ejemplo n.º 19
0
 def RequestData(self, request, inInfo, outInfo):
     info = inInfo[0].GetInformationObject(0)
     inp = dsa.WrapDataObject(vtk.vtkDataSet.GetData(info))
     # Extract the value for the current time step.
     self.ValueOverTime[self.UpdateTimeIndex] =\
         inp.PointData['vectors'][0, 0]
     if self.UpdateTimeIndex < len(self.TimeValues) - 1:
         # If we are not done, ask the pipeline to re-execute us.
         self.UpdateTimeIndex += 1
         request.Set(
             vtk.vtkStreamingDemandDrivenPipeline.CONTINUE_EXECUTING(),
             1)
     else:
         # We are done. Populate the output.
         output = dsa.WrapDataObject(vtk.vtkTable.GetData(outInfo))
         output.RowData.append(self.ValueOverTime, 'u over time')
         # Stop execution
         request.Remove(
             vtk.vtkStreamingDemandDrivenPipeline.CONTINUE_EXECUTING())
     return 1
Ejemplo n.º 20
0
 def _check_data_fidelity(self, table, order):
     wpdi = dsa.WrapDataObject(table)
     tarr = np.zeros((self.nrows, self.ncols))
     for i in range(self.ncols):
         tarr[:, i] = wpdi.RowData[i]
     arrs = np.array(self.arrs).T
     arrs = arrs.flatten()
     arrs = np.reshape(arrs, (self.nrows, self.ncols), order=order)
     self.assertEqual(tarr.shape, arrs.shape)
     self.assertTrue(np.allclose(tarr, arrs, rtol=RTOL))
     return
Ejemplo n.º 21
0
    def ProbeVelocity(self, data):
        # Update the particle locations we sample at
        pts = dsa.numpyTovtkDataArray(self.Points)
        self.ProbePoints.GetPoints().SetData(pts)

        self.Probe.SetSourceData(data)

        # Sample
        self.Probe.Update()
        p = dsa.WrapDataObject(self.Probe.GetOutput())
        # All we care about is the vector values/
        return p.PointData['vectors']
Ejemplo n.º 22
0
 def _number_of_labels_changed(self, value):
     if self.input is None:
         return
     f = self.mask.filter
     inp = self.input.get_output_dataset()
     data_obj = dsa.WrapDataObject(tvtk.to_vtk(inp))
     npts = data_obj.GetNumberOfPoints()
     typ = type(f.on_ratio)
     f.on_ratio = typ(max(npts/value, 1))
     if self.mask.running:
         f.update()
         self.mask.data_changed = True
Ejemplo n.º 23
0
 def _gen_and_check(self, op, check, flip=False):
     # Perform filter
     f = ArrayMath()
     f.SetOperation(op)
     f.SetNewArrayName('test')
     if flip:
         output = f.Apply(self.t0, self.titles[1], self.titles[0])
     else:
         output = f.Apply(self.t0, self.titles[0], self.titles[1])
     wout = dsa.WrapDataObject(output)
     arr = wout.RowData['test']
     self.assertTrue(np.allclose(arr, check, rtol=RTOL))
Ejemplo n.º 24
0
def load_vtk_mesh(fileName):
    reader = vtk.vtkUnstructuredGridReader()
    reader.SetFileName(fileName)
    reader.Update()
    mesh = reader.GetOutput()
    wmesh = dsa.WrapDataObject(mesh)
    point = wmesh.GetPoints()
    cell = wmesh.GetCells()
    cellLocation = wmesh.GetCellLocations()
    cellType = wmesh.GetCellTypes()
    pmesh = PolygonMesh(point[:, [0, 1]], cell, cellLocation, cellType)
    return pmesh
Ejemplo n.º 25
0
    def __init__(self, slice1):
        self._slice = slice1
        # vtkCommonDataModelPython.vtkMultiBlockDataSet
        dset = pv.servermanager.Fetch(slice1)

        if dset.GetClassName() not in ("vtkMultiBlockDataSet",):
            raise ValueError(f"Incompatible type: {type(dset)}")

        # <vtk.numpy_interface.dataset_adapter.CompositeDataSet
        self._dset = dset
        self._obj = dsa.WrapDataObject(dset)
        self._ptdata = self._obj.GetPointData()
Ejemplo n.º 26
0
    def __init__(self, fileName, clean=False, pointData=False):
        """
        Create Case from file.

        Parameters
        ----------
        fileName : str
            The file to be read in. Should be data in VTK format.

        clean : bool
            Whether to attempt to clean the data of redundant cells.

        """
        self.fileName = fileName

        # Read in the data
        self._blockData = self.read(clean, pointData)

        # Compute the cell-centres
        self._cellCentres = vtk.vtkCellCenters()
        self._cellCentres.SetInputData(self._blockData.GetBlock(0))
        self._cellCentres.Update()
        self._cellCentres =\
            dsa.WrapDataObject(self._cellCentres.GetOutput()).GetPoints()
        self._cellCentres = np.array(self._cellCentres[:, :2])

        self._vtkData = dsa.WrapDataObject(self._blockData.GetBlock(0))

        self._boundaries = self._fill_boundary_list()

        self._bounds = self._vtkData.VTKObject.GetBounds()[:4]

        self._fields = self._vtkData.CellData.keys()

        plot_limits = self._compute_plot_limits()
        self._xlim = plot_limits[0]
        self._ylim = plot_limits[1]

        self._boundaryCellCoords, self._boundaryCellData = \
            self._compute_boundary_cell_data()
Ejemplo n.º 27
0
def execute(self, expression):
    """
    **Internal Method**
    Called by vtkPythonCalculator in its RequestData(...) method. This is not
    intended for use externally except from within
    vtkPythonCalculator::RequestData(...).
    """

    # Add inputs.
    inputs = []

    for index in range(self.GetNumberOfInputConnections(0)):
        # wrap all input data objects using vtk.numpy_interface.dataset_adapter
        wdo_input = dsa.WrapDataObject(self.GetInputDataObject(0, index))
        t, t_index = get_data_time(self, wdo_input.VTKObject,
                                   self.GetInputInformation(0, index))
        wdo_input.time_value = wdo_input.t_value = t
        wdo_input.time_index = wdo_input.t_index = t_index
        inputs.append(wdo_input)

    # Setup output.
    output = dsa.WrapDataObject(self.GetOutputDataObject(0))

    if self.GetCopyArrays():
        output.GetPointData().PassData(inputs[0].GetPointData())
        output.GetCellData().PassData(inputs[0].GetCellData())

    # get a dictionary for arrays in the dataset attributes. We pass that
    # as the variables in the eval namespace for compute.
    variables = get_arrays(inputs[0].GetAttributes(self.GetArrayAssociation()))
    variables.update({
        "time_value": inputs[0].time_value,
        "t_value": inputs[0].t_value,
        "time_index": inputs[0].time_index,
        "t_index": inputs[0].t_index
    })
    retVal = compute(inputs, expression, ns=variables)
    if retVal is not None:
        output.GetAttributes(self.GetArrayAssociation()).append(\
            retVal, self.GetArrayName())
Ejemplo n.º 28
0
    def RequestData(self, request, inInfo, outInfo):
        info = inInfo[0].GetInformationObject(0)
        inp = dsa.WrapDataObject(vtk.vtkDataSet.GetData(info))
        output = vtk.vtkMultiBlockDataSet.GetData(outInfo)

        # Initialize the number of blocks in the output
        if output.GetNumberOfBlocks() == 0:
            output.SetNumberOfBlocks(self.NumberOfBlocks)

        # Contour the current piece and add to the output
        self.Contour.SetInputData(inp.VTKObject)
        self.Contour.Update()
        #print self.UpdateIndex, self.Contour.GetOutput().GetNumberOfCells()
        contour = dsa.WrapDataObject(self.Contour.GetOutput())
        rtdata = contour.PointData['RTData']
        # We create an array to color by later. To show different
        # pieces.
        color = np.empty_like(rtdata)
        color[:] = self.UpdateIndex
        contour.PointData.append(color, "color")
        contour.PointData.SetActiveScalars("color")
        if contour.GetNumberOfCells() > 0:
            block = vtk.vtkPolyData()
            block.ShallowCopy(contour.VTKObject)
            output.SetBlock(self.UpdateIndex, block)

        # These control streaming.
        if self.UpdateIndex < self.NumberOfBlocks - 1:
            # If we are not done, ask the pipeline to re-execute us.
            self.UpdateIndex += 1
            request.Set(
                vtk.vtkStreamingDemandDrivenPipeline.CONTINUE_EXECUTING(),
                1)
        else:
            # Stop execution
            request.Remove(
                vtk.vtkStreamingDemandDrivenPipeline.CONTINUE_EXECUTING())
            # Reset for next potential execution.
            self.UpdateIndex = 0
        return 1
Ejemplo n.º 29
0
def minimal_length(patchData):
    """ Compute minimal length as sqrt of smallest area on a patch.

    """
    areaFilter = vtk.vtkMeshQuality()
    areaFilter.SetInputData(patchData) 
    areaFilter.SetTriangleQualityMeasureToArea()
    areaFilter.SetQuadQualityMeasureToArea()
    areaFilter.Update()
    area = dsa.WrapDataObject(areaFilter.GetOutput())
    area = area.CellData["Quality"]

    return np.sqrt(np.min(area))
Ejemplo n.º 30
0
def test_clip_returns_3_groups(aorta_centerline_branches, aorta_surface):
    clipper = branchclipper.vmtkBranchClipper()
    clipper.Centerlines = aorta_centerline_branches
    clipper.Surface = aorta_surface
    clipper.RadiusArrayName = 'MaximumInscribedSphereRadius'
    clipper.BlankingArrayName = 'Blanking'
    clipper.GroupIdsArrayName = 'GroupIds'
    clipper.Execute()

    wrapedclip = dsa.WrapDataObject(clipper.Surface)
    uniqueGroups = np.unique(np.array(wrapedclip.PointData['GroupIds'].tolist()))

    assert np.allclose(uniqueGroups, np.array([0, 2, 3])) == True