Exemplo n.º 1
0
def test_get_scalar():
    grid = vtki.UnstructuredGrid(ex.hexbeamfile)
    # add array to both point/cell data with same name
    carr = np.random.rand(grid.n_cells)
    grid._add_cell_scalar(carr, 'test_data')
    parr = np.random.rand(grid.n_points)
    grid._add_point_scalar(parr, 'test_data')
    oarr = np.random.rand(grid.n_points)
    grid._add_point_scalar(oarr, 'other')
    assert np.allclose(
        carr, utilities.get_scalar(grid, 'test_data', preference='cell'))
    assert np.allclose(
        parr, utilities.get_scalar(grid, 'test_data', preference='point'))
    assert np.allclose(oarr, utilities.get_scalar(grid, 'other'))
    assert None == utilities.get_scalar(grid, 'foo')
Exemplo n.º 2
0
 def format_array(key, field):
     """internal helper to foramt array information for printing"""
     arr = get_scalar(self, key)
     dl, dh = self.get_data_range(key)
     if key == self.active_scalar_info[1]:
         key = '<b>{}</b>'.format(key)
     return row.format(key, field, arr.dtype, dl, dh)
Exemplo n.º 3
0
 def set_active_scalar(self, name, preference='cell'):
     """Finds the scalar by name and appropriately sets it as active"""
     arr, field = get_scalar(self, name, preference=preference, info=True)
     if field == POINT_DATA_FIELD:
         self.GetPointData().SetActiveScalars(name)
     elif field == CELL_DATA_FIELD:
         self.GetCellData().SetActiveScalars(name)
     else:
         raise RuntimeError('Data field ({}) no useable'.format(field))
     self._active_scalar_info = [field, name]
Exemplo n.º 4
0
 def rename_scalar(self, old_name, new_name, preference='cell'):
     """Changes array name by searching for the array then renaming it"""
     _, field = get_scalar(self, old_name, preference=preference, info=True)
     if field == POINT_DATA_FIELD:
         self.point_arrays[new_name] = self.point_arrays.pop(old_name)
     elif field == CELL_DATA_FIELD:
         self.cell_arrays[new_name] = self.cell_arrays.pop(old_name)
     else:
         raise RuntimeError('Array not found.')
     if self.active_scalar_info[1] == old_name:
         self.set_active_scalar(new_name, preference=field)
Exemplo n.º 5
0
 def get_data_range(self, name):
     """Gets the min/max of a scalar given its name across all blocks"""
     mini, maxi = np.inf, -np.inf
     for i in range(self.n_blocks):
         data = self[i]
         if data is None:
             continue
         # get the scalar if availble
         arr = get_scalar(data, name)
         if arr is None:
             continue
         tmi, tma = np.nanmin(arr), np.nanmax(arr)
         if tmi < mini:
             mini = tmi
         if tma > maxi:
             maxi = tma
     return mini, maxi
Exemplo n.º 6
0
 def contour(dataset,
             isosurfaces=10,
             scalars=None,
             compute_normals=False,
             compute_gradients=False,
             compute_scalars=True,
             preference='point'):
     """Contours an input dataset by an array. ``isosurfaces`` can be an integer
     specifying the number of isosurfaces in the data range or an iterable set of
     values for explicitly setting the isosurfaces.
     """
     alg = vtk.vtkContourFilter()  #vtkMarchingCubes
     alg.SetInputDataObject(dataset)
     alg.SetComputeNormals(compute_normals)
     alg.SetComputeGradients(compute_gradients)
     alg.SetComputeScalars(compute_scalars)
     # set the array to contour on
     #dataset.set_active_scalar(scalars, preference=preference)
     if scalars is None:
         field, scalars = dataset.active_scalar_info
     else:
         _, field = get_scalar(dataset,
                               scalars,
                               preference=preference,
                               info=True)
     # NOTE: only point data is allowed
     if field != 0:
         raise RuntimeError('Can only contour by Point data at this time.')
     alg.SetInputArrayToProcess(
         0, 0, 0, field,
         scalars)  # args: (idx, port, connection, field, name)
     # set the isosurfaces
     if isinstance(isosurfaces, int):
         # generate values
         alg.GenerateValues(isosurfaces, dataset.get_data_range(scalars))
     elif isinstance(isosurfaces, collections.Iterable):
         alg.SetNumberOfContours(len(isosurfaces))
         for i, val in enumerate(isosurfaces):
             alg.SetValue(i, val)
     else:
         raise RuntimeError('isosurfaces not understood.')
     alg.Update()
     return _get_output(alg)
Exemplo n.º 7
0
    def warp_by_scalar(dataset, scalars=None, scale_factor=1.0, normal=None,
                       in_place=False):
        """
        Warp the dataset's points by a point data scalar array's values.
        This modifies point coordinates by moving points along point normals by
        the scalar amount times the scale factor.

        Parameters
        ----------
        scalars : str, optional
            Name of scalars to warb by. Defaults to currently active scalars.

        scale_factor : float, optional
            A scalaing factor to increase the scaling effect

        normal : np.array, list, tuple of length 3
            User specified normal. If given, data normals will be ignored and
            the given normal will be used to project the warp.

        in_place : bool
            If True, the points of the give dataset will be updated.
        """
        if scalars is None:
            field, scalars = dataset.active_scalar_info
        arr, field = get_scalar(dataset, scalars, preference='point', info=True)
        if field != vtki.POINT_DATA_FIELD:
            raise AssertionError('Dataset can only by warped by a point data array.')
        # Run the algorithm
        alg = vtk.vtkWarpScalar()
        alg.SetInputDataObject(dataset)
        alg.SetInputArrayToProcess(0, 0, 0, field, scalars) # args: (idx, port, connection, field, name)
        alg.SetScaleFactor(scale_factor)
        if normal is not None:
            alg.SetNormal(normal)
            alg.SetUseNormal(True)
        alg.Update()
        output = _get_output(alg)
        if in_place:
            dataset.points = output.points
            return
        return output
Exemplo n.º 8
0
    def get_data_range(self, arr=None, preference='cell'):
        """Get the non-NaN min and max of a named scalar array

        Parameters
        ----------
        arr : str, np.ndarray, optional
            The name of the array to get the range. If None, the active scalar
            is used

        preference : str, optional
            When scalars is specified, this is the perfered scalar type to
            search for in the dataset.  Must be either ``'point'`` or ``'cell'``

        """
        if arr is None:
            # use active scalar array
            _, arr = self.active_scalar_info
        if isinstance(arr, str):
            arr = get_scalar(self, arr, preference=preference)
        # If array has no tuples return a NaN range
        if arr is None or arr.size == 0:
            return (np.nan, np.nan)
        # Use the array range
        return np.nanmin(arr), np.nanmax(arr)
Exemplo n.º 9
0
 def format_array(key, field):
     arr = get_scalar(self, key)
     dl, dh = self.get_data_range(key)
     if key == self.active_scalar_info[1]:
         key = '<b>{}</b>'.format(key)
     return row.format(key, field, arr.dtype, dl, dh)
Exemplo n.º 10
0
 def get_data_range(self, name):
     arr = get_scalar(self, name)
     return np.nanmin(arr), np.nanmax(arr)
Exemplo n.º 11
0
 def get_scalar(self, name, preference='cell', info=False):
     """ Searches both point and cell data for an array """
     return get_scalar(self, name, preference=preference, info=info)
Exemplo n.º 12
0
    def threshold(dataset,
                  value,
                  scalars=None,
                  invert=False,
                  continuous=False,
                  preference='cell'):
        """
        This filter will apply a ``vtkThreshold`` filter to the input dataset and
        return the resulting object. This extracts cells where scalar value in each
        cell satisfies threshold criterion.  If scalars is None, the inputs 
        active_scalar is used.

        Parameters
        ----------
        dataset : vtk.vtkDataSet object
            Input dataset.

        value : float or iterable
            Single value or (min, max) to be used for the data threshold.  If
            iterable then length must be 2.

        scalars : str
            Name of scalars.

        invert : bool, optional
            If value is a single value, when invert is True cells are kept when
            their values are below parameter "value".  When invert is False
            cells are kept when their value is above the threshold "value".
            
        continuous : bool, optional
            When True, the continuous interval [minimum cell scalar, 
            maxmimum cell scalar] will be used to intersect the threshold bound, 
            rather than the set of discrete scalar values from the vertices.
            
        preference : str, optional
            When scalars is None, this is the perfered scalar type to search for
            in the dataset.  Must be either 'point' or 'cell'.

        """
        alg = vtk.vtkThreshold()
        alg.SetInputDataObject(dataset)
        # set the scalaras to threshold on
        if scalars is None:
            field, scalars = dataset.active_scalar_info
        else:
            _, field = get_scalar(dataset,
                                  scalars,
                                  preference=preference,
                                  info=True)
        alg.SetInputArrayToProcess(
            0, 0, 0, field,
            scalars)  # args: (idx, port, connection, field, name)
        # set thresholding parameters
        alg.SetUseContinuousCellRange(continuous)
        # check if value is iterable (if so threshold by min max range like ParaView)
        if isinstance(value, collections.Iterable):
            if len(value) != 2:
                raise RuntimeError(
                    'Value range must be length one for a float value or two for min/max; not ({}).'
                    .format(value))
            alg.ThresholdBetween(value[0], value[1])
            # NOTE: Invert for ThresholdBetween is coming in vtk=>8.2.x
            #alg.SetInvert(invert)
        else:
            # just a single value
            if invert:
                alg.ThresholdByLower(value)
            else:
                alg.ThresholdByUpper(value)
        # Run the threshold
        alg.Update()
        return _get_output(alg)
Exemplo n.º 13
0
    def contour(dataset, isosurfaces=10, scalars=None, compute_normals=False,
                compute_gradients=False, compute_scalars=True,  rng=None,
                preference='point'):
        """Contours an input dataset by an array. ``isosurfaces`` can be an integer
        specifying the number of isosurfaces in the data range or an iterable set of
        values for explicitly setting the isosurfaces.

        Parameters
        ----------
        isosurfaces : int or iterable
            Number of isosurfaces to compute across valid data range or an
            iterable of float values to explicitly use as the isosurfaces.

        scalars : str, optional
            Name of scalars to threshold on. Defaults to currently active scalars.

        compute_normals : bool, optional

        compute_gradients : bool, optional
            Desc

        compute_scalars : bool, optional
            Preserves the scalar values that are being contoured

        rng : tuple(float), optional
            If an integer number of isosurfaces is specified, this is the range
            over which to generate contours. Default is the scalar arrays's full
            data range.

        preference : str, optional
            When scalars is specified, this is the perfered scalar type to
            search for in the dataset.  Must be either ``'point'`` or ``'cell'``

        """
        # Make sure the input has scalars to contour on
        if dataset.n_scalars < 1:
            raise AssertionError('Input dataset for the contour filter must have scalar data.')
        alg = vtk.vtkContourFilter()
        alg.SetInputDataObject(dataset)
        alg.SetComputeNormals(compute_normals)
        alg.SetComputeGradients(compute_gradients)
        alg.SetComputeScalars(compute_scalars)
        # set the array to contour on
        if scalars is None:
            field, scalars = dataset.active_scalar_info
        else:
            _, field = get_scalar(dataset, scalars, preference=preference, info=True)
        # NOTE: only point data is allowed? well cells works but seems buggy?
        if field != vtki.POINT_DATA_FIELD:
            raise AssertionError('Contour filter only works on Point data. Array ({}) is in the Cell data.'.format(scalars))
        alg.SetInputArrayToProcess(0, 0, 0, field, scalars) # args: (idx, port, connection, field, name)
        # set the isosurfaces
        if isinstance(isosurfaces, int):
            # generate values
            if rng is None:
                rng = dataset.get_data_range(scalars)
            alg.GenerateValues(isosurfaces, rng)
        elif isinstance(isosurfaces, collections.Iterable):
            alg.SetNumberOfContours(len(isosurfaces))
            for i, val in enumerate(isosurfaces):
                alg.SetValue(i, val)
        else:
            raise RuntimeError('isosurfaces not understood.')
        alg.Update()
        return _get_output(alg)
Exemplo n.º 14
0
    def threshold(dataset, value=None, scalars=None, invert=False, continuous=False,
                  preference='cell'):
        """
        This filter will apply a ``vtkThreshold`` filter to the input dataset and
        return the resulting object. This extracts cells where scalar value in each
        cell satisfies threshold criterion.  If scalars is None, the inputs
        active_scalar is used.

        Parameters
        ----------
        value : float or iterable, optional
            Single value or (min, max) to be used for the data threshold.  If
            iterable, then length must be 2. If no value is specified, the
            non-NaN data range will be used to remove any NaN values.

        scalars : str, optional
            Name of scalars to threshold on. Defaults to currently active scalars.

        invert : bool, optional
            If value is a single value, when invert is True cells are kept when
            their values are below parameter "value".  When invert is False
            cells are kept when their value is above the threshold "value".
            Default is False: yielding above the threshold "value".

        continuous : bool, optional
            When True, the continuous interval [minimum cell scalar,
            maxmimum cell scalar] will be used to intersect the threshold bound,
            rather than the set of discrete scalar values from the vertices.

        preference : str, optional
            When scalars is specified, this is the perfered scalar type to
            search for in the dataset.  Must be either ``'point'`` or ``'cell'``

        """
        # set the scalaras to threshold on
        if scalars is None:
            field, scalars = dataset.active_scalar_info
        arr, field = get_scalar(dataset, scalars, preference=preference, info=True)

        if arr is None:
            raise AssertionError('No arrays present to threshold.')

        # If using an inverted range, merge the result of two fitlers:
        if isinstance(value, collections.Iterable) and invert:
            valid_range = [np.nanmin(arr), np.nanmax(arr)]
            # Create two thresholds
            t1 = dataset.threshold([valid_range[0], value[0]], scalars=scalars,
                    continuous=continuous, preference=preference, invert=False)
            t2 = dataset.threshold([value[1], valid_range[1]], scalars=scalars,
                    continuous=continuous, preference=preference, invert=False)
            # Use an AppendFilter to merge the two results
            appender = vtk.vtkAppendFilter()
            appender.AddInputData(t1)
            appender.AddInputData(t2)
            appender.Update()
            return _get_output(appender)

        # Run a standard threshold algorithm
        alg = vtk.vtkThreshold()
        alg.SetInputDataObject(dataset)
        alg.SetInputArrayToProcess(0, 0, 0, field, scalars) # args: (idx, port, connection, field, name)
        # set thresholding parameters
        alg.SetUseContinuousCellRange(continuous)
        # use valid range if no value given
        if value is None:
            value = dataset.get_data_range(scalars)
        # check if value is iterable (if so threshold by min max range like ParaView)
        if isinstance(value, collections.Iterable):
            if len(value) != 2:
                raise AssertionError('Value range must be length one for a float value or two for min/max; not ({}).'.format(value))
            alg.ThresholdBetween(value[0], value[1])
        else:
            # just a single value
            if invert:
                alg.ThresholdByLower(value)
            else:
                alg.ThresholdByUpper(value)
        # Run the threshold
        alg.Update()
        return _get_output(alg)