Esempio n. 1
0
    def merge_image(self, build_dim: np.ndarray, spacing: np.ndarray,
                    ls_obj: List['PrintObject']) -> Tuple[vtkImageData, int]:

        bg_im = vtkImageData()
        bg_im.SetSpacing(spacing)
        bg_im.SetDimensions(build_dim)
        bg_im.SetOrigin(0, 0, 0)
        bg_im.AllocateScalars(VTK_UNSIGNED_CHAR, 1)
        bg_im.GetPointData().GetScalars().Fill(255)

        w_bg = dsa.WrapDataObject(bg_im)
        data_bg = np.reshape(w_bg.PointData['ImageScalars'],
                             build_dim,
                             order='F')

        top_layer = 0

        for obj in ls_obj:
            vtk_im = dsa.WrapDataObject(obj.sliced_object)
            origin = np.ceil(np.array(vtk_im.GetOrigin()) /
                             spacing).astype(int)
            obj_dim = np.array(vtk_im.GetDimensions()).astype(int)
            top_z = origin[2] + obj_dim[2]
            obj_data = np.reshape(vtk_im.PointData['ImageScalars'],
                                  obj_dim,
                                  order='F')
            data_bg[origin[0]:origin[0] + obj_dim[0],
                    origin[1]:origin[1] + obj_dim[1],
                    origin[2]:top_z] = obj_data

            if (top_z > top_layer):
                top_layer = top_z

        return bg_im, top_layer
Esempio n. 2
0
    def RequestData(self, request, inInfo, outInfo):
        logger.debug("Requesting data...")
        input = self.GetInputDataObject(0, 0)
        trajectory_data = dsa.WrapDataObject(input)
        output = dsa.WrapDataObject(vtkPolyData.GetData(outInfo))

        # Retrieve current time
        time = timesteps_util.get_timestep(self, logger=logger)

        # Retrieve trajectory data
        trajectory_times = trajectory_data.PointData["Time"]
        trajectory_points = trajectory_data.Points

        # Interpolate along the trajectory to find current position
        current_position = [
            np.interp(time, trajectory_times, trajectory_points[:, i])
            for i in range(3)
        ]

        # Expose to VTK
        points_vtk = vtk.vtkPoints()
        verts_vtk = vtk.vtkCellArray()
        verts_vtk.InsertNextCell(1)
        points_vtk.InsertPoint(0, *current_position)
        verts_vtk.InsertCellPoint(0)
        output.SetPoints(points_vtk)
        output.SetVerts(verts_vtk)

        # Interpolate remaining point data along the trajectory
        for dataset in trajectory_data.PointData.keys():
            if dataset == "Time":
                continue
            point_data = trajectory_data.PointData[dataset]
            data_at_position = np.zeros(point_data.shape[1:])
            if len(data_at_position.shape) > 0:
                for i in itertools.product(
                        *map(range, data_at_position.shape)):
                    point_data_i = point_data[(slice(None), ) + i]
                    if len(trajectory_times) == len(point_data_i):
                        data_at_position[i] = np.interp(
                            time, trajectory_times, point_data_i)
                    else:
                        logger.warning(
                            "Unable to interpolate trajectory dataset"
                            f" {dataset}[{i}]: Length of dataset"
                            f" ({len(point_data_i)}) does not match length of"
                            f" trajectory times ({len(trajectory_times)}).")
            else:
                data_at_position = np.interp(time, trajectory_times,
                                             point_data)
            data_vtk = vtknp.numpy_to_vtk(np.array([data_at_position]))
            data_vtk.SetName(dataset)
            output.GetPointData().AddArray(data_vtk)
        return 1
Esempio n. 3
0
def execute(self, expression):
    """
    **Internal Method**
    Called by vtkPythonCalculator in its RequestData(...) method. This is not
    intended for use externally except from within
    vtkPythonCalculator::RequestData(...).
    """

    # Add inputs.
    inputs = []

    for index in range(self.GetNumberOfInputConnections(0)):
        # wrap all input data objects using vtkmodules.numpy_interface.dataset_adapter
        wdo_input = dsa.WrapDataObject(self.GetInputDataObject(0, index))
        t, t_index = get_data_time(self, wdo_input.VTKObject,
                                   self.GetInputInformation(0, index))
        wdo_input.time_value = wdo_input.t_value = t
        wdo_input.time_index = wdo_input.t_index = t_index
        inputs.append(wdo_input)

    # Setup output.
    output = dsa.WrapDataObject(self.GetOutputDataObject(0))

    if self.GetCopyArrays():
        if self.GetInputDataObject(0, 0).IsA('vtkTable'):
            output.GetRowData().PassData(inputs[0].GetRowData())
        else:
            output.GetPointData().PassData(inputs[0].GetPointData())
            output.GetCellData().PassData(inputs[0].GetCellData())

    # get a dictionary for arrays in the dataset attributes. We pass that
    # as the variables in the eval namespace for compute.
    variables = get_arrays(inputs[0].GetAttributes(self.GetArrayAssociation()))
    variables.update({
        "time_value": inputs[0].time_value,
        "t_value": inputs[0].t_value,
        "time_index": inputs[0].time_index,
        "t_index": inputs[0].t_index
    })
    retVal = compute(inputs, expression, ns=variables)

    if retVal is not None:
        if hasattr(retVal, "Association"):
            output.GetAttributes(retVal.Association).append(\
              retVal, self.GetArrayName())
        else:
            # if somehow the association was removed we
            # fall back to the input array association
            output.GetAttributes(self.GetArrayAssociation()).append(\
              retVal, self.GetArrayName())
Esempio n. 4
0
def pointIsNear(locations, distance, inputs):
    array = vtkDoubleArray()
    array.SetNumberOfComponents(3)
    array.SetNumberOfTuples(len(locations))
    for i in range(len(locations)):
        array.SetTuple(i, locations[i])
    node = vtkSelectionNode()
    node.SetFieldType(vtkSelectionNode.POINT)
    node.SetContentType(vtkSelectionNode.LOCATIONS)
    node.GetProperties().Set(vtkSelectionNode.EPSILON(), distance)
    node.SetSelectionList(array)

    from paraview.vtk.vtkFiltersExtraction import vtkLocationSelector
    selector = vtkLocationSelector()
    selector.SetInsidednessArrayName("vtkInsidedness")
    selector.Initialize(node)

    inputDO = inputs[0].VTKObject
    outputDO = inputDO.NewInstance()
    outputDO.CopyStructure(inputDO)

    output = dsa.WrapDataObject(outputDO)
    if outputDO.IsA('vtkCompositeDataSet'):
        it = inputDO.NewIterator()
        it.InitTraversal()
        while not it.IsDoneWithTraversal():
            outputDO.SetDataSet(it, inputDO.GetDataSet(it).NewInstance())
            it.GoToNextItem()
    selector.Execute(inputDO, outputDO)

    return output.PointData.GetArray('vtkInsidedness')
Esempio n. 5
0
def execute_on_attribute_data(self, evaluate_locally):
    """Called by vtkAnnotateAttributeDataFilter."""
    inputDO = self.GetCurrentInputDataObject()
    if not inputDO:
        return True

    inputs = [dsa.WrapDataObject(inputDO)]

    info = self.GetInputArrayInformation(0)
    association = info.Get(vtkDataObject.FIELD_ASSOCIATION())
    # sanitize name
    array_name = paraview.make_name_valid(info.Get(vtkDataObject.FIELD_NAME()))

    # note: _get_ns() needs to be called on all ranks to avoid deadlocks.
    ns = _get_ns(self, inputs[0], association)
    if array_name not in ns:
        print("Failed to locate array '%s'." % array_name, file=sys.stderr)
        raise RuntimeError("Failed to locate array")

    if not evaluate_locally:
        # don't evaluate the expression locally.
        return True

    array = ns[array_name]
    if array.IsA("vtkStringArray"):
        chosen_element = array.GetValue(self.GetElementId())
    else:
        chosen_element = array[self.GetElementId()]
    expression = self.GetPrefix() if self.GetPrefix() else ""
    expression += str(chosen_element)
    self.SetComputedAnnotationValue(expression)
    return True
Esempio n. 6
0
    def _RequestParticleData(self, executive, poutput, outInfo):
        from vtkmodules.numpy_interface import dataset_adapter as dsa
        from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid, vtkPartitionedDataSet

        piece = outInfo.Get(executive.UPDATE_PIECE_NUMBER())
        npieces = outInfo.Get(executive.UPDATE_NUMBER_OF_PIECES())

        data_time = self._get_update_time(outInfo)
        idx = self._timemap[data_time]
        itr = self._series.iterations[idx]

        array_by_species = {}
        narrays = self._particlearrayselection.GetNumberOfArrays()
        for i in range(narrays):
            if self._particlearrayselection.GetArraySetting(i):
                name = self._particlearrayselection.GetArrayName(i)
                names = self._get_particle_array_and_component(itr, name)
                if names[0] and self._speciesselection.ArrayIsEnabled(
                        names[0]):
                    if not names[0] in array_by_species:
                        array_by_species[names[0]] = []
                    array_by_species[names[0]].append(names)
        ids = 0
        for species, arrays in array_by_species.items():
            pds = vtkPartitionedDataSet()
            ugrid = vtkUnstructuredGrid()
            pds.SetPartition(0, ugrid)
            poutput.SetPartitionedDataSet(ids, pds)
            ids += 1
            self._load_species(itr, species, arrays, piece, npieces,
                               dsa.WrapDataObject(ugrid))
Esempio n. 7
0
def cellContainsPoint(inputs, locations):
    array = vtkDoubleArray()
    array.SetNumberOfComponents(3)
    array.SetNumberOfTuples(len(locations))
    for i in range(len(locations)):
        array.SetTuple(i, locations[i])
    node = vtkSelectionNode()
    node.SetFieldType(vtkSelectionNode.CELL)
    node.SetContentType(vtkSelectionNode.LOCATIONS)
    node.SetSelectionList(array)

    from visocyte.vtk.vtkFiltersExtraction import vtkLocationSelector
    selector = vtkLocationSelector()
    selector.Initialize(node, "vtkInsidedness")

    inputDO = inputs[0].VTKObject
    outputDO = inputDO.NewInstance()
    outputDO.CopyStructure(inputDO)

    output = dsa.WrapDataObject(outputDO)
    if outputDO.IsA('vtkCompositeDataSet'):
        it = inputDO.NewIterator()
        it.InitTraversal()
        while not it.IsDoneWithTraversal():
            outputDO.SetDataSet(it, inputDO.GetDataSet(it).NewInstance())
            it.GoToNextItem()
    selector.ComputeSelectedElements(inputDO, outputDO)

    return output.CellData.GetArray('vtkInsidedness')
Esempio n. 8
0
    def RequestData(self, request, inInfo, outInfo):
        logger.info("Loading waveform data...")
        start_time = time.time()

        output = dsa.WrapDataObject(vtkTable.GetData(outInfo))

        if (self._filename is not None and self._subfile is not None
                and len(self.mode_names) > 0):
            with h5py.File(self._filename, "r") as f:
                strain = f[self._subfile]
                t = strain["Y_l2_m2.dat"][:, 0]
                col_time = vtknp.numpy_to_vtk(t, deep=False)
                col_time.SetName("Time")
                output.AddColumn(col_time)

                for mode_name in self.mode_names:
                    logger.debug(f"Reading mode '{mode_name}'...")
                    col_mode = vtknp.numpy_to_vtk(strain[mode_name +
                                                         ".dat"][:, 1:],
                                                  deep=False)
                    col_mode.SetName(mode_name)
                    output.AddColumn(col_mode)

        logger.info(
            f"Waveform data loaded in {time.time() - start_time:.3f}s.")

        return 1
    def update_coil_mesh(self, polydata):
        if self.connection is not None:
            wrapped = dataset_adapter.WrapDataObject(polydata)

            points = np.asarray(wrapped.Points)
            polygons_raw = np.asarray(wrapped.Polygons)

            # The polygons are returned as 1d-array of the form
            #
            # [n_0, id_0(0), id_0(1), ..., id_0(n_0),
            #  n_1, id_1(0), id_1(1), ..., id_1(n_1),
            #  ...]
            #
            # where n_i is the number of vertices in polygon i, and id_i's are indices to the vertex list.
            #
            # Assert that all polygons have an equal number of vertices, reshape the array, and drop n_i's.
            #
            assert np.all(polygons_raw[0::self.N_VERTICES_IN_POLYGON +
                                       1] == self.N_VERTICES_IN_POLYGON)

            polygons = polygons_raw.reshape(-1,
                                            self.N_VERTICES_IN_POLYGON + 1)[:,
                                                                            1:]

            self.connection.update_coil_mesh(
                points=points,
                polygons=polygons,
            )
    def RequestData(self, request, inInfo, outInfo):
        output = dsa.WrapDataObject(vtkUnstructuredGrid.GetData(outInfo))

        # Use h5py to read the mesh
        import h5py

        f = h5py.File(self._filename, "r")
        mesh = f[list(f.keys())[0]]
        points = mesh["coordinates"][()]
        celltype = mesh["topology"].attrs["celltype"].decode("utf-8")
        cells = mesh["topology"][()]

        # Points
        if points.shape[1] == 2:
            points = np.hstack([points, np.zeros((len(points), 1))])
        output.SetPoints(points)

        # Cells
        vtk_type = dolfinx_to_vtk_type[celltype]
        ncells, npoints_per_cell = cells.shape
        cell_types = np.full(ncells, vtk_type, dtype=np.ubyte)
        cell_offsets = (1 + npoints_per_cell) * np.arange(ncells, dtype=int)
        cell_conn = np.hstack(
            [npoints_per_cell * np.ones((ncells, 1), dtype=int),
             cells]).flatten()
        output.SetCells(cell_types, cell_offsets, cell_conn)

        return 1
Esempio n. 11
0
    def RequestData(self, request, inInfo, outInfo):
        logger.debug("Requesting data...")
        input = self.GetInputDataObject(0, 0)
        trajectory_data = dsa.WrapDataObject(input)
        output = dsa.WrapDataObject(vtkPolyData.GetData(outInfo))

        # Shallow-copy input trajectory data to output
        output.ShallowCopy(input)

        # Retrieve current time
        time = timesteps_util.get_timestep(self, logger=logger)

        # Add age data to the points
        age = time - trajectory_data.PointData["Time"]
        age_vtk = vtknp.numpy_to_vtk(age, deep=True)
        age_vtk.SetName("Age")
        output.GetPointData().AddArray(age_vtk)

        return 1
Esempio n. 12
0
    def RequestData(self, request, inInfoVec, outInfoVec):
        mesh = dsa.WrapDataObject(vtkUnstructuredGrid.GetData(inInfoVec[0]))

        # Read points
        points = np.asarray(mesh.GetPoints())

        # Read cells
        # Adapted from test/legacy_reader.py
        cell_conn = mesh.GetCells()
        cell_offsets = mesh.GetCellLocations()
        cell_types = mesh.GetCellTypes()
        cells_dict = {}
        for vtk_cell_type in np.unique(cell_types):
            offsets = cell_offsets[cell_types == vtk_cell_type]
            ncells = len(offsets)
            npoints = cell_conn[offsets[0]]
            array = np.empty((ncells, npoints), dtype=int)
            for i in range(npoints):
                array[:, i] = cell_conn[offsets + i + 1]
            cells_dict[vtk_to_meshio_type[vtk_cell_type]] = array
        cells = [meshio.CellBlock(key, cells_dict[key]) for key in cells_dict]

        # Read point and field data
        # Adapted from test/legacy_reader.py
        def _read_data(data):
            out = {}
            for i in range(data.VTKObject.GetNumberOfArrays()):
                name = data.VTKObject.GetArrayName(i)
                array = np.asarray(data.GetArray(i))
                out[name] = array
            return out

        point_data = _read_data(mesh.GetPointData())
        field_data = _read_data(mesh.GetFieldData())

        # Read cell data
        cell_data_flattened = _read_data(mesh.GetCellData())
        cell_data = {}
        for name, array in cell_data_flattened.items():
            cell_data[name] = []
            for cell_type in cells_dict:
                vtk_cell_type = meshio_to_vtk_type[cell_type]
                mask_cell_type = cell_types == vtk_cell_type
                cell_data[name].append(array[mask_cell_type])

        # Use meshiah to write mesh
        meshio.write_point_cells(
            self._filename,
            points,
            cells,
            point_data=point_data,
            cell_data=cell_data,
            field_data=field_data,
        )
        return 1
Esempio n. 13
0
def execute_on_global_data(self):
    """Called by vtkAnnotateGlobalDataFilter."""
    inputDO = self.GetCurrentInputDataObject()
    if not inputDO:
        return True

    inputs = [dsa.WrapDataObject(inputDO)]
    association = self.GetArrayAssociation()
    ns = _get_ns(self, inputs[0], association)
    if self.GetFieldArrayName() not in ns:
        print("Failed to locate global array '%s'." % self.GetFieldArrayName(),
              file=sys.stderr)
        raise RuntimeError("Failed to locate global array")

    array = ns[self.GetFieldArrayName()]
    chosen_element = array
    try:
        # if the array has as many elements as the timesteps, pick the element
        # matching the current timestep.
        if self.GetNumberOfTimeSteps() > 0 and \
            array.shape[0] == self.GetNumberOfTimeSteps():
            chosen_element = array[ns["time_index"]]

        # if the array has as many elements as the `mode_shape_range`, pick the
        # element matching the `mode_shape` (BUG #0015322).
        elif "mode_shape" in ns and "mode_shape_range" in ns and \
            ns["mode_shape_range"].shape[1] == 2 and \
            array.shape[0] == (ns["mode_shape_range"].GetValue(1) - ns["mode_shape_range"].GetValue(0) + 1):
            chosen_element = array[ns["mode_shape"].GetValue(0) -
                                   ns["mode_shape_range"].GetValue(0)]
        elif array.shape[0] == 1:
            # for single element arrays, just extract the value.
            # This avoids the extra () when converting to string
            # (see BUG #15321).
            chosen_element = array[0]
    except AttributeError:
        pass

    try:
        # hack for string array.
        if chosen_element.IsA("vtkStringArray"):
            chosen_element = chosen_element.GetValue(0)
    except:
        pass
    expression = self.GetPrefix() if self.GetPrefix() else ""
    try:
        if type(chosen_element) is not dsa.VTKNoneArray:
            expression += self.GetFormat() % (chosen_element, )
    except TypeError:
        expression += chosen_element
        print("Warning: invalid format for Annotate Global Data")
    expression += self.GetPostfix() if self.GetPostfix() else ""
    self.SetComputedAnnotationValue(expression)
    return True
Esempio n. 14
0
    def RequestData(self, request, inInfoVec, outInfoVec):
        from vtkmodules.vtkCommonDataModel import vtkTable
        from vtkmodules.numpy_interface import dataset_adapter as dsa

        table = dsa.WrapDataObject(vtkTable.GetData(inInfoVec[0], 0))
        kwargs = {}
        for aname in table.RowData.keys():
            kwargs[aname] = table.RowData[aname]

        import numpy
        numpy.savez_compressed(self._filename, **kwargs)
        return 1
    def RequestData(self, request, inInfoVec, outInfoVec):
        output = dsa.WrapDataObject(vtkUnstructuredGrid.GetData(outInfoVec))

        # Determine how to read the mesh
        self._file_format = get_erdc_extensions(self._filename)
        if (self._file_format):
            mesh = meshiah.read(self._filename)
            points, cells = mesh.points, mesh.cells
        elif (meshioLib and not self._file_format):
            mesh = meshio.read(self._filename, self._file_format)
            points, cells = mesh.points, mesh.cells
        else:
            print(f"Unable to deduce file format from file: {self._filename}")

        # Points
        if points.shape[1] == 2:
            points = np.hstack([points, np.zeros((len(points), 1))])
        output.SetPoints(points)

        # CellBlock, adapted from test/legacy_writer.py
        cell_types = np.array([], dtype=np.ubyte)
        cell_offsets = np.array([], dtype=int)
        cell_conn = np.array([], dtype=int)
        for meshio_type, data in cells:
            vtk_type = meshio.vtk._vtk.meshio_to_vtk_type[meshio_type]
            ncells, npoints = data.shape
            cell_types = np.hstack(
                [cell_types,
                 np.full(ncells, vtk_type, dtype=np.ubyte)])
            offsets = len(cell_conn) + (1 + npoints) * \
                np.arange(ncells, dtype=int)
            cell_offsets = np.hstack([cell_offsets, offsets])
            conn = np.hstack([npoints * np.ones((ncells, 1), dtype=int),
                              data]).flatten()
            cell_conn = np.hstack([cell_conn, conn])
        output.SetCells(cell_types, cell_offsets, cell_conn)

        # Point data
        for name, array in mesh.point_data.items():
            output.PointData.append(array, name)

        # Cell data
        for name, data in mesh.cell_data.items():
            array = np.concatenate(data)
            output.CellData.append(array, name)

        # Field data
        for name, array in mesh.field_data.items():
            output.FieldData.append(array, name)

        return 1
Esempio n. 16
0
    def RequestData(self, request, inInfoVec, outInfoVec):
        from vtkmodules.vtkCommonDataModel import vtkTable
        from vtkmodules.numpy_interface import dataset_adapter as dsa

        data_time = self._get_update_time(outInfoVec.GetInformationObject(0))
        raw_data = self._get_raw_data(data_time)
        output = dsa.WrapDataObject(vtkTable.GetData(outInfoVec, 0))
        for name in raw_data.dtype.names:
            if self._arrayselection.ArrayIsEnabled(name):
                output.RowData.append(raw_data[name], name)

        if data_time is not None:
            output.GetInformation().Set(output.DATA_TIME_STEP(), data_time)
        return 1
Esempio n. 17
0
        def f() -> str:
            x_d, y_d, _ = img.GetDimensions()
            single_slice = vtkExtractVOI()
            single_slice.SetSampleRate([1, 1, 1])
            single_slice.SetInputData(img)
            single_slice.SetVOI(0, x_d - 1, 0, y_d - 1, z, z)
            single_slice.Update()

            image = dsa.WrapDataObject(single_slice.GetOutput())
            point_data = image.PointData['ImageScalars'].reshape(
                image.GetDimensions()[0:2], order='F').T
            pil_im = Image.fromarray(point_data)
            pil_im.convert('1')
            pil_im.save(os.path.join(self.img_folder, img_name))

            return "M95 P%i I%i X%f Y%f S%f; Print layer\n" % (printhead_num,
                                                               z, x, y, speed)
Esempio n. 18
0
    def RequestData(self, request, inInfoVec, outInfoVec):
        output = dsa.WrapDataObject(vtkUnstructuredGrid.GetData(outInfoVec))

        # Use meshio to read the mesh
        mesh = meshio.read(self._filename, self._file_format)
        points, cells = mesh.points, mesh.cells

        # Points
        if points.shape[1] == 2:
            points = np.hstack([points, np.zeros((len(points), 1))])
        output.SetPoints(points)

        # Cells, adapted from
        # https://github.com/nschloe/meshio/blob/master/test/legacy_writer.py
        cell_types = np.array([], dtype=np.ubyte)
        cell_offsets = np.array([], dtype=int)
        cell_conn = np.array([], dtype=int)
        for meshio_type, data in cells:
            vtk_type = meshio_to_vtk_type[meshio_type]
            ncells, npoints = data.shape
            cell_types = np.hstack(
                [cell_types,
                 np.full(ncells, vtk_type, dtype=np.ubyte)])
            offsets = len(cell_conn) + (1 + npoints) * np.arange(ncells,
                                                                 dtype=int)
            cell_offsets = np.hstack([cell_offsets, offsets])
            conn = np.hstack([npoints * np.ones((ncells, 1), dtype=int),
                              data]).flatten()
            cell_conn = np.hstack([cell_conn, conn])
        output.SetCells(cell_types, cell_offsets, cell_conn)

        # Point data
        for name, array in mesh.point_data.items():
            output.PointData.append(array, name)

        # Cell data
        print(mesh.cell_data)
        for name, data in mesh.cell_data.items():
            array = np.concatenate(data)
            output.CellData.append(array, name)

        # Field data
        for name, array in mesh.field_data.items():
            output.FieldData.append(array, name)

        return 1
Esempio n. 19
0
 def RequestData(self, request, inInfo, outInfo):
     from vtkmodules.numpy_interface import dataset_adapter as dsa
     from vtkmodules.vtkCommonDataModel import vtkUnstructuredGrid
     import vtk
     inData = self.GetInputData(inInfo, 0, 0)
     # outData = self.GetOutputData(outInfo, 0)
     output = dsa.WrapDataObject(vtkUnstructuredGrid.GetData(outInfo, 0))
     output.ShallowCopy(inData)
     # output = dsa.WrapDataObject(vtkUnstructuredGrid.GetData(outData, 0))
     data = self._read_flux_file()
     data_vtk = vtk.vtkDoubleArray()
     data_vtk.SetNumberOfValues(len(data))
     data_vtk.SetName('FluxData')
     for i, val in enumerate(data):
         data_vtk.InsertValue(i, val[0] / self._numlights)
     output.GetCellData().AddArray(data_vtk)
     return 1
    def RequestData(self, request, inInfoVec, outInfoVec):
        data_time = self.getCurrentTime(outInfoVec.GetInformationObject(0))
        output = dsa.WrapDataObject(vtkUnstructuredGrid.GetData(outInfoVec))

        currentFile = self.fileList[data_time]

        p = partio.read(currentFile)

        if p == None:
            return 1

        totalParticles = p.numParticles()

        for i in range(p.numAttributes()):
            attr = p.attributeInfo(i)
            if attr.name == "position":
                pos = np.array(p.data_buffer(attr), copy=False)
                output.SetPoints(pos)

        # cell_conn contains tuples (num indices, vertex ids)
        # e.g. particles (1,0), (1,1), (1,2), (1,3), ...
        # e.g. triangles (3, 0, 1, 2), (3, 2, 3, 4), ...
        cell_conn = np.hstack([
            np.ones((totalParticles, 1)),
            np.arange(0, totalParticles, 1, dtype=int).reshape(-1, 1)
        ]).flatten()
        # for particles use type VERTEX=1
        cell_types = np.full((totalParticles), 1, np.ubyte)
        # offset between two particles is 2 since cell_conn always contains the number of indices
        cell_offsets = 2 * np.arange(totalParticles, dtype=int)

        output.SetCells(cell_types, cell_offsets, cell_conn)

        # add field data
        for i in range(p.numAttributes()):
            attr = p.attributeInfo(i)
            if attr.name != "position":
                values = np.array(p.data_buffer(attr), copy=False)
                output.PointData.append(values, attr.name)
        return 1
Esempio n. 21
0
    def RequestData(self, request, inInfo, outInfo):
        logger.debug("Requesting data...")
        output = dsa.WrapDataObject(vtkPolyData.GetData(outInfo))

        with h5py.File(self._filename, "r") as trajectory_file:
            subfile = trajectory_file[self._subfile]
            coords = np.array(subfile[self._coords_dataset])
        coords[:, 1:] *= self._radial_scale
        logger.debug(f"Loaded coordinates with shape {coords.shape}.")

        # Construct a line of points
        points_vtk = vtk.vtkPoints()
        # Each ID is composed of (1) the order of the point in the line and (2)
        # the index in the `vtkPoints` constructed above
        line_vtk = vtk.vtkPolyLine()
        point_ids = line_vtk.GetPointIds()
        point_ids.SetNumberOfIds(len(coords))
        for i, point in enumerate(coords):
            points_vtk.InsertPoint(i, *point[1:])
            point_ids.SetId(i, i)
        output.SetPoints(points_vtk)
        # Set the line ordering as "cell data"
        output.Allocate(1, 1)
        output.InsertNextCell(line_vtk.GetCellType(), line_vtk.GetPointIds())

        # Add time data to the points
        time = vtknp.numpy_to_vtk(coords[:, 0])
        time.SetName("Time")
        output.GetPointData().AddArray(time)

        # Add remaining datasets from file to trajectory points
        with h5py.File(self._filename, "r") as trajectory_file:
            subfile = trajectory_file[self._subfile]
            for dataset in subfile:
                if dataset == self._coords_dataset:
                    continue
                dataset_vtk = vtknp.numpy_to_vtk(subfile[dataset][:, 1:])
                dataset_vtk.SetName(dataset.replace(".dat", ""))
                output.GetPointData().AddArray(dataset_vtk)
        return 1
Esempio n. 22
0
def execute(self):
    """Called by vtkPythonAnnotationFilter."""
    expression = self.GetExpression()
    inputDO = self.GetCurrentInputDataObject()
    if not expression or not inputDO:
        return True

    inputs = [dsa.WrapDataObject(inputDO)]

    association = self.GetArrayAssociation()
    ns = _get_ns(self, inputs[0], association)

    try:
        result = calculator.compute(inputs, expression, ns=ns)
    except:
        print("Failed to evaluate expression '%s'. "\
            "The following exception stack should provide additional "\
            "developer specific information. This typically implies a malformed "\
            "expression. Verify that the expression is valid.\n\n" \
            "Variables in current scope are %s \n" % (expression, list(ns)), file=sys.stderr)
        raise
    self.SetComputedAnnotationValue("%s" % result)
    return True
    def RequestData(self, request, inInfo, outInfo):
        output = vtkImageData.GetData(outInfo, 0)
        if output is None:
            return 0

        # create 3D coordinate grid
        xs = np.linspace(0.0, 2.0, self.Dimensions[0])
        ys = np.linspace(0.0, 1.0, self.Dimensions[1])
        ts = np.linspace(0.0, 10.0, self.Dimensions[2])
        x, y, t = np.meshgrid(xs, ys, ts, indexing='ij')

        # allocate image data output
        output.SetDimensions(self.Dimensions)
        output.SetSpacing(abs(xs[1] - xs[0]), abs(ys[1] - ys[0]),
                          abs(ts[1] - ts[0]))
        output.SetOrigin(xs[0], ys[0], ts[0])
        output = dsa.WrapDataObject(output)

        # double gyre (https://shaddenlab.berkeley.edu/uploads/LCS-tutorial/examples.html)
        a = self.eps * np.sin(2.0 * np.pi / self.T * t)
        b = 1.0 - 2.0 * self.eps * np.sin(2.0 * np.pi / self.T * t)
        f = a * x**2 + b * x
        df = 2.0 * a * x + b
        dx = -np.pi * self.A * np.sin(np.pi * f) * np.cos(np.pi * y)
        dy = np.pi * self.A * np.cos(np.pi * f) * np.sin(np.pi * y) * df
        dt = np.full_like(t, 1.0)

        # stack and flatten such that layout matches VTK
        v = np.stack([dx, dy, dt], axis=-1)
        v = v.reshape((-1, 3), order='F')
        output.PointData.append(v, 'v')

        # compute and output velocity magnitude
        v_mag = np.linalg.norm(v[..., :2], axis=-1)
        output.PointData.append(v_mag, 'v_mag')

        return 1
    def compute(self, debug=False):
        from vtkmodules.vtkFiltersCore import vtkCellDataToPointData
        from vtkmodules.vtkFiltersSources import vtkPlaneSource
        from vtkmodules.vtkFiltersPoints import vtkVoronoiKernel, vtkPointInterpolator
        from vtkmodules.vtkFiltersVerdict import vtkCellSizeFilter

        d = 1 / self.n
        y = np.linspace(d / 2, 2 - d / 2, 2 * self.n)

        reader = self.read(self.time, debug=debug)
        caseData = reader.GetOutput()
        internalBlock = caseData.GetBlock(0)
        patchBlocks = caseData.GetBlock(1)

        bounds = internalBlock.GetBounds()

        if debug:
            print(bounds)

        fieldNames = dsa.WrapDataObject(internalBlock).GetCellData().keys()

        averaged = {}
        for i, field in enumerate(fieldNames):
            averaged[field] = []

        pointData = vtkCellDataToPointData()
        pointData.SetInputData(internalBlock)
        pointData.Update()

        plane = vtkPlaneSource()
        plane.SetResolution(int(bounds[1] / d), int(bounds[5] / d))

        kernel = vtkVoronoiKernel()

        interpolator = vtkPointInterpolator()
        interpolator.SetSourceData(pointData.GetOutput())
        interpolator.SetKernel(kernel)

        # Internal field, go layer by layer
        for i in range(y.size):
            plane.SetOrigin(0.55 * (bounds[0] + bounds[1]), y[i],
                            0.15 * (bounds[4] + bounds[5]))
            plane.SetPoint1(bounds[0], y[i], bounds[4])
            plane.SetPoint2(bounds[1], y[i], bounds[5])
            plane.Update()

            interpolator.SetInputConnection(plane.GetOutputPort())
            interpolator.Update()

            interpolatedData = dsa.WrapDataObject(
                interpolator.GetOutput()).GetPointData()
            for field in fieldNames:
                averaged[field].append(np.mean(interpolatedData[field],
                                               axis=0))

        # Patch data
        for wall in ["bottomWall", "topWall"]:
            wallBlock = patchBlocks.GetBlock(
                self.get_block_index(patchBlocks, wall))
            cellSizeFilter = vtkCellSizeFilter()
            cellSizeFilter.SetInputData(wallBlock)
            cellSizeFilter.Update()
            area = dsa.WrapDataObject(
                cellSizeFilter.GetOutput()).CellData['Area']

            wallData = dsa.WrapDataObject(wallBlock).CellData

            for field in fieldNames:
                # area weighted average
                avrg = np.sum(wallData[field] * area, axis=0) / np.sum(area)
                if wall == "bottomWall":
                    averaged[field].insert(0, avrg)
                else:
                    averaged[field].append(avrg)

        for field in fieldNames:
            averaged[field] = np.array(averaged[field])

        self.y = np.append(np.append(0, y), 2)
        self.avrgFields = averaged

        self.u = self.avrgFields['UMean'][:, 0]
        self.uu = self.avrgFields['UPrime2Mean'][:, 0]
        self.vv = self.avrgFields['UPrime2Mean'][:, 1]
        self.ww = self.avrgFields['UPrime2Mean'][:, 2]
        self.uv = self.avrgFields['UPrime2Mean'][:, 3]
        self.k = 0.5 * (self.uu + self.vv + self.ww)
        self.nut = self.avrgFields['nutMean']

        self.tau = 0
        if self.wallModel:
            self.wss = self.avrgFields['wallShearStressMean'][:, 0]
            self.tau = 0.5 * (self.wss[0] + self.wss[-1])
        else:
            self.tau = self.nu * 0.5 * (self.u[1] + self.u[-2]) / self.y[1]

        self.uTau = np.sqrt(self.tau)
        self.delta = 0.5 * (self.y[-1] - self.y[0])
        self.uB = simps(self.u, self.y) / (2 * self.delta)
        self.uC = 0.5 * (self.u[int(self.y.size / 2)] +
                         self.u[int(self.y.size / 2) - 1])

        self.yPlus = self.y * self.uTau / self.nu
        self.uPlus = self.u / self.uTau
        self.uuPlus = self.uu / self.uTau**2
        self.vvPlus = self.vv / self.uTau**2
        self.wwPlus = self.ww / self.uTau**2
        self.uvPlus = self.uv / self.uTau**2
        self.kPlus = self.k / self.uTau**2
        self.uRms = np.sqrt(self.uu) / self.uTau
        self.vRms = np.sqrt(self.vv) / self.uTau
        self.wRms = np.sqrt(self.ww) / self.uTau

        self.reTau = self.uTau * self.delta / self.nu
        self.reB = self.uB * self.delta / self.nu
        self.reC = self.uC * self.delta / self.nu

        self.theta = tbl.momentum_thickness(self.y[:int(self.y.size / 2)],
                                            self.u[:int(self.u.size / 2)],
                                            interpolate=True)
        self.delta99 = tbl.delta_99(self.y[:int(self.y.size / 2)],
                                    self.u[:int(self.u.size / 2)],
                                    interpolate=True)
        #
        self.deltaStar = tbl.delta_star(self.y[:int(self.y.size / 2)],
                                        self.u[:int(self.u.size / 2)],
                                        interpolate=True)
        #
        self.reTheta = self.theta * self.uC / self.nu
        self.reDelta99 = self.delta99 * self.uC / self.nu
        self.reDeltaStar = self.deltaStar * self.uC / self.nu
Esempio n. 25
0
 def _get_trajectory_data(self):
     return dsa.WrapDataObject(self.GetInputDataObject(0, 0))
Esempio n. 26
0
from vtkmodules.vtkIOLegacy import vtkPolyDataReader, vtkPolyDataWriter
from vtkmodules.vtkCommonDataModel import vtkPolyData
from vtkmodules.vtkCommonCore import vtkIdList
from vtkmodules.numpy_interface import dataset_adapter as dsa
import numpy

rd = vtkPolyDataReader()
rd.SetFileName('T7.vtk')
rd.Update()

pd = rd.GetOutput()
pdw = dsa.WrapDataObject(pd)

# Get the number of neighbors for each point
pd.BuildLinks()
valence = []
neighbors = vtkIdList()

for i in range(pd.GetNumberOfPoints()):
    pd.GetPointCells(i, neighbors)
    valence.append(neighbors.GetNumberOfIds())

# Add valence and position vectors as pointdata
pdw.PointData.append(numpy.array(valence), 'Valence')
pdw.PointData.append(pdw.Points, 'PositionVectors')

wr = vtkPolyDataWriter()
wr.SetFileName('TryThis.vtk')
wr.SetInputData(pd)
wr.Write()
Esempio n. 27
0
    def RequestData(self, request, inInfoVec, outInfoVec):
        output = dsa.WrapDataObject(vtkUnstructuredGrid.GetData(outInfoVec))

        # Use meshio to read the mesh
        # mesh = meshiah.read(self._filename, self._file_format)
        print(f"Opening mesh: {self._filename}")
        with open(self._filename) as ifile:
            points = []
            facets = []
            cells = []
            mats = []
            for line in ifile.readlines():
                strip = line.strip()
                split = strip.split()

                if split[0] == "ND":
                    # Vertex
                    points.append([float(x) for x in split[2:]])
                elif split[0] == "E3T":
                    # Triangle
                    data = [int(x) for x in split[2:]]
                    facets.append(data[0:3])
                    mats.append(data[3])
                elif split[0] == "E4T":
                    # Tetrahedron
                    data = [int(x) for x in split[2:]]
                    facets.append(data[0:4])
                    mats.append(data[4])
                else:
                    continue

        points_np = np.array(points)
        cells_np = np.array(facets)
        mats_np = np.array(mats, dtype=np.int32)

        if cells_np.shape[1] == 3:
            cells.append(["triangle", cells_np - 1])
        elif cells_np.shape[1] == 4:
            cells.append(["tetrahedron", cells_np - 1])
        else:
            logging.warning(
                "ERDC writer only support triangles and tetrahedrons at this time"
                "Skipping {} polygons with {} nodes".format(
                    cells_np.shape[0], cells_np.shape[1]))
        cell_data = {}
        cell_data['Region'] = []
        cell_data['Region'].append(mats_np)

        # Points
        # if points.shape[1] == 2:
        #    points = np.hstack([points, np.zeros((len(points), 1))])
        output.SetPoints(points_np)

        # CellBlock, adapted from test/legacy_writer.py
        cell_types = np.array([], dtype=np.ubyte)
        cell_offsets = np.array([], dtype=int)
        cell_conn = np.array([], dtype=int)
        # triangle - vtk type = 5
        # tetrahedron - vtk type = 10

        for erdc_type, data in cells:
            vtk_type = vtk_type_from_erdc(erdc_type)
            ncells, npoints = data.shape
            cell_types = np.hstack(
                [cell_types,
                 np.full(ncells, vtk_type, dtype=np.ubyte)])
            offsets = len(cell_conn) + (1 + npoints) * np.arange(ncells,
                                                                 dtype=int)
            cell_offsets = np.hstack([cell_offsets, offsets])
            conn = np.hstack([npoints * np.ones((ncells, 1), dtype=int),
                              data]).flatten()
            cell_conn = np.hstack([cell_conn, conn])
        print(f"cell_types: \n {cell_types}")
        print(f"cell_offsets: \n {cell_offsets}")
        print(f"cell_conn: \n {cell_conn}")
        output.SetCells(cell_types, cell_offsets, cell_conn)

        # Point data
        #        for name, array in mesh.point_data.items():
        #            output.PointData.append(array, name)

        # Cell data
        for name, data in cell_data.items():
            array = np.concatenate(data)
            output.CellData.append(array, name)

        # Field data


#        for name, array in mesh.field_data.items():
#            output.FieldData.append(array, name)

        return 1
Esempio n. 28
0
    def RequestData(self, request, inInfo, outInfo):
        logger.debug("Requesting data...")
        info = outInfo.GetInformationObject(0)
        logger.debug(f"Information object: {info}")
        update_extents = info.Get(self.GetExecutive().UPDATE_EXTENT())
        logger.debug(
            f"Responsible for updating these extents: {update_extents}")

        output = dsa.WrapDataObject(vtkDataSet.GetData(outInfo))

        logger.info("Computing SWSH grid...")
        start_time = time.time()

        # Setup grid
        # TODO: Take the `update_extents` into account to support rendering
        # in parallel
        N = self.num_points_per_dim
        N_y = N // 2 if self.clip_y_normal else N
        size = self.size
        spacing = 2.0 * size / N
        output.SetDimensions(N, N_y, N)
        output.SetOrigin(*(3 * (-size, )))
        output.SetSpacing(*(3 * (spacing, )))

        # Compute the SWSHs on the grid
        swsh_grid, r = swsh_cache.cached_swsh_grid(
            size=size,
            num_points=N,
            spin_weight=self.spin_weight,
            ell_max=self.ell_max,
            clip_y_normal=self.clip_y_normal,
            clip_z_normal=False,
            cache_dir=self.swsh_cache_dir,
        )

        # Expose radial coordinate to VTK
        r_vtk = vtknp.numpy_to_vtk(r, deep=False)
        r_vtk.SetName("RadialCoordinate")
        output.GetPointData().AddArray(r_vtk)

        for l in range(abs(self.spin_weight), self.ell_max + 1):
            for m in range(1, l + 1):
                mode_profile = (swsh_grid[:, LM_index(l, m, 0)] +
                                swsh_grid[:, LM_index(l, -m, 0)])
                mode_name = "Y_l{}_m{}".format(l, m)
                # Expose complex field to VTK as two arrays of floats
                mode_real_vtk = vtknp.numpy_to_vtk(np.real(mode_profile),
                                                   deep=True)
                mode_imag_vtk = vtknp.numpy_to_vtk(np.imag(mode_profile),
                                                   deep=True)
                mode_abs_vtk = vtknp.numpy_to_vtk(np.abs(mode_profile),
                                                  deep=True)
                mode_real_vtk.SetName(mode_name + " Real")
                mode_imag_vtk.SetName(mode_name + " Imag")
                mode_abs_vtk.SetName(mode_name + " Abs")
                output.GetPointData().AddArray(mode_real_vtk)
                output.GetPointData().AddArray(mode_imag_vtk)
                output.GetPointData().AddArray(mode_abs_vtk)

        logger.info(f"SWSH grid computed in {time.time() - start_time:.3f}s.")
        return 1
Esempio n. 29
0
    def RequestData(self, request, inInfoVec, outInfoVec):
        global _has_openpmd
        if not _has_openpmd:
            print_error("Required Python module 'openpmd_api' missing!")
            return 0

        from vtkmodules.vtkCommonDataModel import vtkImageData, vtkUnstructuredGrid
        from vtkmodules.vtkCommonDataModel import vtkPartitionedDataSet, vtkPartitionedDataSetCollection
        from vtkmodules.vtkCommonExecutionModel import vtkExtentTranslator, vtkStreamingDemandDrivenPipeline
        from vtkmodules.numpy_interface import dataset_adapter as dsa

        executive = vtkStreamingDemandDrivenPipeline
        output = vtkPartitionedDataSet.GetData(outInfoVec, 0)
        poutput = vtkPartitionedDataSetCollection.GetData(outInfoVec, 1)
        outInfo = outInfoVec.GetInformationObject(0)
        piece = outInfo.Get(executive.UPDATE_PIECE_NUMBER())
        npieces = outInfo.Get(executive.UPDATE_NUMBER_OF_PIECES())
        nghosts = outInfo.Get(executive.UPDATE_NUMBER_OF_GHOST_LEVELS())
        et = vtkExtentTranslator()

        data_time = self._get_update_time(outInfo)
        idx = self._timemap[data_time]

        itr = self._series.iterations[idx]
        arrays = []
        narrays = self._arrayselection.GetNumberOfArrays()
        for i in range(narrays):
            if self._arrayselection.GetArraySetting(i):
                name = self._arrayselection.GetArrayName(i)
                arrays.append((name, self._find_array(itr, name)))
        shp = None
        spacing = None
        theta_modes = None
        grid_offset = None
        for _, ary in arrays:
            var = ary[0]
            for name, scalar in var.items():
                shape = scalar.shape
                break
            spc = list(ary[1])
            if not spacing:
                spacing = spc
            elif spacing != spc:  # all meshes need to have the same spacing
                return 0
            offset = list(ary[2])
            if not grid_offset:
                grid_offset = offset
            elif grid_offset != offset:  # all meshes need to have the same spacing
                return 0
            if not shp:
                shp = shape
            elif shape != shp:  # all arrays needs to have the same shape
                return 0
            if not theta_modes:
                theta_modes = ary[3]

        if theta_modes:
            et.SetWholeExtent(0, shp[0] - 1, 0, shp[1] - 1, 0, shp[2] - 1)
            et.SetSplitModeToZSlab()  # note: Y and Z are both fine
            et.SetPiece(piece)
            et.SetNumberOfPieces(npieces)
            # et.SetGhostLevel(nghosts)
            et.PieceToExtentByPoints()
            ext = et.GetExtent()

            chunk_offset = [ext[0], ext[2], ext[4]]
            chunk_extent = [
                ext[1] - ext[0] + 1, ext[3] - ext[2] + 1, ext[5] - ext[4] + 1
            ]

            data = []
            nthetas = 100  # user parameter
            thetas = np.linspace(0., 2. * np.pi, nthetas)
            chunk_cyl_shape = (chunk_extent[1], chunk_extent[2], nthetas
                               )  # z, r, theta
            for name, var in arrays:
                cyl_values = np.zeros(chunk_cyl_shape)
                values = self._load_array(var[0], chunk_offset, chunk_extent)
                self._series.flush()

                print(chunk_cyl_shape)
                print(values.shape)
                print("+++++++++++")
                for ntheta in range(nthetas):
                    cyl_values[:, :, ntheta] += values[0, :, :]
                data.append((name, cyl_values))
                # add all other modes via loop
                # for m in range(theta_modes):

            cyl_spacing = [spacing[0], spacing[1], thetas[1] - thetas[0]]

            z_coord = np.linspace(0., cyl_spacing[0] * chunk_cyl_shape[0],
                                  chunk_cyl_shape[0])
            r_coord = np.linspace(0., cyl_spacing[1] * chunk_cyl_shape[1],
                                  chunk_cyl_shape[1])
            t_coord = thetas

            # to cartesian
            print(z_coord.shape, r_coord.shape, t_coord.shape)
            cyl_coords = np.meshgrid(r_coord, z_coord, t_coord)
            rs = cyl_coords[1]
            zs = cyl_coords[0]
            thetas = cyl_coords[2]

            y_coord = rs * np.sin(thetas)
            x_coord = rs * np.cos(thetas)
            z_coord = zs
            # mesh_pts = np.zeros((chunk_cyl_shape[0], chunk_cyl_shape[1], chunk_cyl_shape[2], 3))
            # mesh_pts[:, :, :, 0] = z_coord

            img = vtkImageData()
            img.SetExtent(chunk_offset[1],
                          chunk_offset[1] + chunk_cyl_shape[0] - 1,
                          chunk_offset[2],
                          chunk_offset[2] + chunk_cyl_shape[1] - 1, 0,
                          nthetas - 1)
            img.SetSpacing(cyl_spacing)

            imgw = dsa.WrapDataObject(img)
            output.SetPartition(0, img)
            for name, array in data:
                # print(array.shape)
                # print(array.transpose(2,1,0).flatten(order='C').shape)
                imgw.PointData.append(
                    array.transpose(2, 1, 0).flatten(order='C'), name)

            # data = []
            # for name, var in arrays:
            #     unit_SI = var[0].unit_SI
            #     data.append((name, unit_SI * var[0].load_chunk(chunk_offset, chunk_extent)))
            # self._series.flush()

        else:
            et.SetWholeExtent(0, shp[0] - 1, 0, shp[1] - 1, 0, shp[2] - 1)
            et.SetPiece(piece)
            et.SetNumberOfPieces(npieces)
            et.SetGhostLevel(nghosts)
            et.PieceToExtent()
            ext = et.GetExtent()

            chunk_offset = [ext[0], ext[2], ext[4]]
            chunk_extent = [
                ext[1] - ext[0] + 1, ext[3] - ext[2] + 1, ext[5] - ext[4] + 1
            ]

            data = []
            for name, var in arrays:
                values = self._load_array(var[0], chunk_offset, chunk_extent)
                self._series.flush()
                data.append((name, values))

            img = vtkImageData()
            img.SetExtent(ext[0], ext[1], ext[2], ext[3], ext[4], ext[5])
            img.SetSpacing(spacing)
            img.SetOrigin(grid_offset)

            et.SetGhostLevel(0)
            et.PieceToExtent()
            ext = et.GetExtent()
            ext = [ext[0], ext[1], ext[2], ext[3], ext[4], ext[5]]
            img.GenerateGhostArray(ext)
            imgw = dsa.WrapDataObject(img)
            output.SetPartition(0, img)
            for name, array in data:
                imgw.PointData.append(array, name)

        itr = self._series.iterations[idx]
        array_by_species = {}
        narrays = self._particlearrayselection.GetNumberOfArrays()
        for i in range(narrays):
            if self._particlearrayselection.GetArraySetting(i):
                name = self._particlearrayselection.GetArrayName(i)
                names = self._get_particle_array_and_component(itr, name)
                if names[0] and self._speciesselection.ArrayIsEnabled(
                        names[0]):
                    if not names[0] in array_by_species:
                        array_by_species[names[0]] = []
                    array_by_species[names[0]].append(names)
        ids = 0
        for species, arrays in array_by_species.items():
            pds = vtkPartitionedDataSet()
            ugrid = vtkUnstructuredGrid()
            pds.SetPartition(0, ugrid)
            poutput.SetPartitionedDataSet(ids, pds)
            ids += 1
            self._load_species(itr, species, arrays, piece, npieces,
                               dsa.WrapDataObject(ugrid))

        return 1
Esempio n. 30
0
def execute(inputDO, selectionNode, insidednessArrayName, outputDO):
    field_type = selectionNode.GetFieldType()
    if field_type == selectionNode.CELL:
        attributeType = vtkDataObject.CELL
    elif field_type == selectionNode.POINT:
        attributeType = vtkDataObject.POINT
    elif field_type == selectionNode.ROW:
        attributeType = vtkDataObject.ROW
    else:
        raise RuntimeError("Unsupported field attributeType %r" % field_type)
    # Evaluate expression on the inputDO.
    # This is equivalent to executing the Python Calculator on the input dataset
    # to produce a mask array.

    inputs = []
    inputs.append(dsa.WrapDataObject(inputDO))

    query = selectionNode.GetQueryString()

    # Get a dictionary for arrays in the dataset attributes. We pass that
    # as the variables in the eval namespace for calculator.compute().
    elocals = calculator.get_arrays(inputs[0].GetAttributes(attributeType))
    if ("id" not in elocals) and re.search(r'\bid\b', query):
        # Add "id" array if the query string refers to id.
        # This is a temporary fix. We should look into
        # accelerating id-based selections in the future.
        elocals["id"] = _create_id_array(inputs[0], attributeType)
    try:
        maskArray = calculator.compute(inputs, query, ns=elocals)
    except:
        from sys import stderr
        print ("Error: Failed to evaluate Expression '%s'. "\
            "The following exception stack should provide additional developer "\
            "specific information. This typically implies a malformed "\
            "expression. Verify that the expression is valid.\n" % query, file=stderr)
        raise

    if not maskarray_is_valid(maskArray):
        raise RuntimeError(
            "Expression '%s' did not produce a valid mask array. The value "\
            "produced is of the type '%s'. This typically implies a malformed "\
            "expression. Verify that the expression is valid." % \
            (query, type(maskArray)))

    # Preserve topology. Just add the mask array as vtkSignedCharArray to the
    # output.
    # Note: we must force the data type to VTK_SIGNED_CHAR or the array will
    # be ignored by the freeze selection operation
    from visocyte.vtk.util import numpy_support
    output = dsa.WrapDataObject(outputDO)
    if type(maskArray) is not dsa.VTKNoneArray:
        if isinstance(maskArray, dsa.VTKCompositeDataArray):
            for ds, array in izip(output, maskArray.Arrays):
                if array is not None:
                    insidedness = numpy_support.numpy_to_vtk(
                        array, deep=1, array_type=vtkConstants.VTK_SIGNED_CHAR)
                    insidedness.SetName(insidednessArrayName)
                    ds.GetAttributes(attributeType).VTKObject.AddArray(
                        insidedness)
        else:
            insidedness = numpy_support.numpy_to_vtk(
                maskArray, deep=1, array_type=vtkConstants.VTK_SIGNED_CHAR)
            insidedness.SetName(insidednessArrayName)
            output.GetAttributes(attributeType).VTKObject.AddArray(insidedness)