Beispiel #1
0
    def mesh_provider(self):
        """Mesh provider operator.

        This operator reads a mesh from the result file. The underlying
        operator symbol is the class:`ansys.dpf.core.operators.mesh.mesh_provider`
        operator.

        Returns
        -------
        mesh_provider : :class:`ansys.dpf.core.operators.mesh.mesh_provider`
            Mesh provider operator.

        """
        try:
            tmp = Operator("MeshSelectionManagerProvider", server=self._server)
            tmp.inputs.connect(self._stream_provider.outputs)
            tmp.run()
        except:
            pass
        mesh_provider = Operator("MeshProvider", server=self._server)
        if self._stream_provider:
            mesh_provider.inputs.connect(self._stream_provider.outputs)
        else:
            mesh_provider.inputs.connect(self.data_sources)
        return mesh_provider
Beispiel #2
0
    def __init__(self, model, result_info):
        self._model = model
        self._time_scoping = None
        self._mesh_scoping = None
        self._location = None
        if isinstance(result_info, str):
            from ansys.dpf.core.available_result import available_result_from_name
            self._result_info = available_result_from_name(result_info)
        else:
            self._result_info = result_info
        self._specific_fc_type = None
        from ansys.dpf.core import operators

        try:
            # create the operator to read its documentation
            # if the operator doesn't exist, the method will not be added
            doc = Operator(self._result_info.operator_name,
                           server=self._model._server).__str__()
            self.__doc__ = doc
            if hasattr(operators, "result") and hasattr(
                    operators.result, self._result_info.name):
                self._operator = getattr(
                    operators.result,
                    self._result_info.name)(server=self._model._server)
            else:
                self._operator = Operator(self._result_info.operator_name,
                                          server=self._model._server)
            self._operator._add_sub_res_operators(
                self._result_info.sub_results)
            self._model.__connect_op__(self._operator)
        except errors.DPFServerException:
            pass
        except Exception as e:
            print(self._result_info.name)
            raise e
Beispiel #3
0
 def _load_result_info(self):
     """Returns a result info object"""
     op = Operator("ResultInfoProvider", server=self._server)
     op.inputs.connect(self._stream_provider.outputs)
     try:
         result_info = op.get_output(0, types.result_info)
     except _InactiveRpcError as e:
         # give the user a more helpful error
         if "results file is not defined in the Data sources" in e.details():
             raise RuntimeError("Unable to open result file") from None
         else:
             raise e
     except:
         return None
     return result_info
Beispiel #4
0
    def operator(self, name):
        """Operator associated with the data sources of this model.

        Parameters
        ----------
        name : str
            Operator name, which must be valid.

        Examples
        --------
        Create a displacement operator.

        >>> from ansys.dpf.core import Model
        >>> from ansys.dpf.core import examples
        >>> transient = examples.download_transient_result()
        >>> model = Model(transient)
        >>> disp = model.operator('U')

        Create a sum operator.

        >>> sum = model.operator('accumulate')

        """
        op = Operator(name=name, server=self._server)
        self.__connect_op__(op)
        return op
Beispiel #5
0
    def _cache_streams_provider(self):
        """Create a stream provider and cache it."""
        from ansys.dpf.core import operators

        if hasattr(operators, "metadata") and hasattr(
                operators.metadata, "stream_provider"
        ):
            self._stream_provider = operators.metadata.streams_provider(
                data_sources=self._data_sources, server=self._server
            )
        else:
            self._stream_provider = Operator("stream_provider", server=self._server)
            self._stream_provider.inputs.connect(self._data_sources)
        try:
            self._stream_provider.run()
        except:
            self._stream_provider = None
Beispiel #6
0
def test_plot_fieldscontainer_on_mesh(allkindofcomplexity):
    model = Model(allkindofcomplexity)
    mesh = model.metadata.meshed_region
    stress = model.results.stress()
    stress.inputs.requested_location.connect("Elemental")
    avg_op = Operator("to_elemental_fc")
    avg_op.inputs.fields_container.connect(stress.outputs.fields_container)
    fc = avg_op.outputs.fields_container()
    mesh.plot(fc)
Beispiel #7
0
def test_plotter_on_fields_container_nodal(allkindofcomplexity):
    model = Model(allkindofcomplexity)
    stress = model.results.stress()
    stress.inputs.requested_location.connect("Elemental")
    avg_op = Operator("to_nodal_fc")
    avg_op.inputs.fields_container.connect(stress.outputs.fields_container)
    fc = avg_op.outputs.fields_container()
    pl = DpfPlotter(model.metadata.meshed_region)
    cpos = pl.plot_contour(fc)
Beispiel #8
0
    def time_freq_support(self):
        """Time frequency support.

        Returns
        -------
        ansys.dpf.core.time_freq_support.TimeFreqSupport
            Time frequency support.

        Examples
        --------
        >>> from ansys.dpf.core import Model
        >>> from ansys.dpf.core import examples
        >>> transient = examples.download_transient_result()
        >>> model = Model(transient)

        Get the number of sets from the result file.

        >>> tf = model.metadata.time_freq_support
        >>> tf.n_sets
        35

        Get the time values for the active result.

        >>> tf.time_frequencies.data
        array([0.        , 0.019975  , 0.039975  , 0.059975  , 0.079975  ,
               0.099975  , 0.119975  , 0.139975  , 0.159975  , 0.179975  ,
               0.199975  , 0.218975  , 0.238975  , 0.258975  , 0.278975  ,
               0.298975  , 0.318975  , 0.338975  , 0.358975  , 0.378975  ,
               0.398975  , 0.417975  , 0.437975  , 0.457975  , 0.477975  ,
               0.497975  , 0.517975  , 0.53754972, 0.55725277, 0.57711786,
               0.59702054, 0.61694639, 0.63683347, 0.65673452, 0.67662783])

        """
        if self._time_freq_support is None:
            timeProvider = Operator("TimeFreqSupportProvider", server=self._server)
            if self._stream_provider:
                timeProvider.inputs.connect(self._stream_provider.outputs)
            else:
                timeProvider.inputs.connect(self.data_sources)
            self._time_freq_support = timeProvider.get_output(
                0, types.time_freq_support
            )
        return self._time_freq_support
Beispiel #9
0
def test_plotter_on_field(allkindofcomplexity):
    model = Model(allkindofcomplexity)
    stress = model.results.stress()
    stress.inputs.requested_location.connect("Elemental")
    avg_op = Operator("to_elemental_fc")
    avg_op.inputs.fields_container.connect(stress.outputs.fields_container)
    fc = avg_op.outputs.fields_container()
    field = fc[1]
    pl = DpfPlotter(model.metadata.meshed_region)
    fields_container = dpf.core.FieldsContainer()
    fields_container.add_label("time")
    fields_container.add_field({"time": 1}, field)
    cpos = pl.plot_contour(fields_container)
Beispiel #10
0
    def _add_split_on_property_type(self, prop):
        previous_mesh_scoping = self._mesh_scoping
        from ansys.dpf.core import operators

        if hasattr(operators, "scoping") and hasattr(operators.scoping,
                                                     "split_on_property_type"):
            self._mesh_scoping = operators.scoping.split_on_property_type()
        else:
            self._mesh_scoping = Operator("scoping::by_property")

        self._mesh_scoping.inputs.requested_location(
            self._result_info.native_scoping_location)
        self._mesh_scoping.inputs.mesh(self._model.metadata.mesh_provider)
        self._mesh_scoping.inputs.label1(prop)
        if previous_mesh_scoping:
            try:
                self._mesh_scoping.inputs.mesh_scoping(previous_mesh_scoping)
            except:
                pass
        return self
Beispiel #11
0
    def _connect_operators(self):
        """Dynamically add operators for results.

        The new operator's subresults are connected to the model's
        streams.

        Examples
        --------
        >>> from ansys.dpf.core import Model
        >>> from ansys.dpf.core import examples
        >>> transient = examples.download_transient_result()
        >>> model = Model(transient)
        >>> disp_operator = model.results.displacement()
        >>> stress_operator = model.results.stress()
        >>> disp_x = model.results.displacement().X()
        >>> disp_y = model.results.displacement().Y()
        >>> disp_z = model.results.displacement().Z()

        """
        if self._result_info is None:
            return
        # dynamically add function based on input type
        self._op_map_rev = {}
        for result_type in self._result_info:
            try:
                doc = Operator(result_type.operator_name,
                               server=self._model._server).__str__()
                bound_method = self.__result__
                method2 = functools.partial(bound_method, result_type)
                setattr(self.__class__, result_type.name,
                        property(method2, doc=doc))

                self._op_map_rev[result_type.name] = result_type.name
            except errors.DPFServerException:
                pass
            except Exception as e:
                print(result_type.name)
                raise e
Beispiel #12
0
class Result:
    """Helps with using DPF's result providers.

    This class helps to connect common inputs to the operator and
    recover its fields container output. 'Result' is created by the model.

    Examples
    --------
    Create a displacement result from the model and choose its time and mesh scopings

    >>> from ansys.dpf import core as dpf
    >>> from ansys.dpf.core import examples
    >>> model = dpf.Model(examples.msup_transient)
    >>> disp = model.results.displacement.on_last_time_freq.on_named_selection('_CONSTRAINEDNODES')
    >>> last_time_disp = disp.eval()

    Create a stress result from the model and split the result by element shapes (solid,
    shell, and beam).

    >>> model = dpf.Model(examples.download_all_kinds_of_complexity())
    >>> stress = model.results.stress
    >>> stress_split = stress.split_by_shape.eval()
    >>> solid_stress = stress_split.solid_field()

    Create a strain result from the model on all time sets and recover the
    operator to connect it to other operators.

    >>> model = dpf.Model(examples.msup_transient)
    >>> strain = model.results.elastic_strain.on_all_time_freqs()
    >>> eqv = dpf.operators.invariant.von_mises_eqv_fc(strain)
    >>> strain_eqv = eqv.outputs.fields_container()

    """
    def __init__(self, model, result_info):
        self._model = model
        self._time_scoping = None
        self._mesh_scoping = None
        self._location = None
        if isinstance(result_info, str):
            from ansys.dpf.core.available_result import available_result_from_name
            self._result_info = available_result_from_name(result_info)
        else:
            self._result_info = result_info
        self._specific_fc_type = None
        from ansys.dpf.core import operators

        try:
            # create the operator to read its documentation
            # if the operator doesn't exist, the method will not be added
            doc = Operator(self._result_info.operator_name,
                           server=self._model._server).__str__()
            self.__doc__ = doc
            if hasattr(operators, "result") and hasattr(
                    operators.result, self._result_info.name):
                self._operator = getattr(
                    operators.result,
                    self._result_info.name)(server=self._model._server)
            else:
                self._operator = Operator(self._result_info.operator_name,
                                          server=self._model._server)
            self._operator._add_sub_res_operators(
                self._result_info.sub_results)
            self._model.__connect_op__(self._operator)
        except errors.DPFServerException:
            pass
        except Exception as e:
            print(self._result_info.name)
            raise e

    def __call__(self, time_scoping=None, mesh_scoping=None):
        op = self._operator
        if time_scoping:
            op.inputs.time_scoping(time_scoping)
        elif self._time_scoping:
            op.inputs.time_scoping(self._time_scoping)

        if mesh_scoping:
            op.inputs.mesh_scoping(mesh_scoping)
        elif self._mesh_scoping:
            op.inputs.mesh_scoping(self._mesh_scoping)

        if self._location:
            op.inputs.requested_location(self._location)

        return op

    def eval(self):
        """Evaluate the result provider with the previously specified
        inputs and return the result fields container.

        Returns
        -------
        fields_container : FieldsContainer, ElShapeFieldsContainer, BodyFieldsContainer
            If ``split_by_body`` is used, a ``BodyFieldsContainer`` is returned.
            if ``split_by_shape`` is used, an ``ElShapeFieldsContainer`` is returned.

        Examples
        --------
        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> model = dpf.Model(examples.msup_transient)
        >>> disp = model.results.displacement
        >>> fc = disp.on_all_time_freqs.eval()

        """
        fc = self.__call__().outputs.fields_container()
        if self._specific_fc_type == "shape":
            fc = ElShapeFieldsContainer(fields_container=fc, server=fc._server)
        elif self._specific_fc_type == "body":
            fc = BodyFieldsContainer(fields_container=fc, server=fc._server)
        return fc

    @property
    def on_all_time_freqs(self):
        """Sets the time scoping to all the time frequencies available in the time frequency support.

        Returns
        -------
        self : Result

        Examples
        --------
        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> model = dpf.Model(examples.msup_transient)
        >>> disp = model.results.displacement
        >>> disp.on_all_time_freqs.eval().get_label_scoping("time").ids
        [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]

        """
        self._time_scoping = list(
            range(
                1,
                len(self._model.metadata.time_freq_support.time_frequencies) +
                1))
        return self

    @property
    def on_first_time_freq(self):
        """Sets the time scoping to the first time frequency available in the time frequency support.

        Returns
        -------
        self : Result

        Examples
        --------
        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> model = dpf.Model(examples.msup_transient)
        >>> disp = model.results.displacement
        >>> disp.on_first_time_freq.eval().get_label_scoping("time").ids
        [1]

        """
        self._time_scoping = 1
        return self

    @property
    def on_last_time_freq(self):
        """Sets the time scoping to the last time frequency available in the time frequency support.

        Returns
        -------
        self : Result

        Examples
        --------
        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> model = dpf.Model(examples.msup_transient)
        >>> disp = model.results.displacement
        >>> disp.on_last_time_freq.eval().get_label_scoping("time").ids
        [20]

        """
        self._time_scoping = len(
            self._model.metadata.time_freq_support.time_frequencies)
        return self

    def on_time_scoping(self, time_scoping):
        """Sets the time scoping to a given one.

        Parameters
        ----------
        time_scoping :  float, list[float], int, list[int], Scoping
            One or more times or frequencies.

        Returns
        -------
        self : Result

        Examples
        --------
        Choose time sets.

        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> model = dpf.Model(examples.msup_transient)
        >>> stress = model.results.stress
        >>> fc = stress.on_time_scoping([1,2,3,19]).eval()
        >>> len(fc)
        4

        Choose times. If the times chosen are not in the time frequency support,
        results are extrapolated.

        >>> fc = stress.on_time_scoping([0.115,0.125]).eval()
        >>> len(fc)
        2
        >>> fc.time_freq_support.time_frequencies.data
        array([0.115, 0.125])

        """
        self._time_scoping = time_scoping
        return self

    def on_named_selection(self, named_selection):
        """Set the mesh scoping to a given named selection.

        Parameters
        ----------
        named_selection : str
            Name of the named selection or component in upper case.

        Returns
        -------
        self : Result

        Examples
        --------
        Add a requested location to the average result on the nodes.

        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> model = dpf.Model(examples.msup_transient)
        >>> stress = model.results.stress
        >>> fc = stress.on_first_time_freq.on_named_selection('_CONSTRAINEDNODES').eval()
        >>> len(fc[0].scoping)
        40

        """

        self._mesh_scoping = self._model.metadata.named_selection(
            named_selection)
        return self

    @property
    def split_by_body(self):
        """Set the mesh scoping to a scopings container where each scoping is a body.

        Returns
        -------
        self : Result

        Examples
        --------
        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> model = dpf.Model(examples.download_all_kinds_of_complexity())
        >>> disp = model.results.displacement
        >>> fc_disp = disp.split_by_body.eval()
        >>> len(fc_disp)
        11
        >>> fc_disp.get_mat_scoping().ids
        [1, 5, 6, 10, 2, 7, 8, 13, 4, 12, 15]
        >>> disp_mat_10 = fc_disp.get_field_by_mat_id(10)

        """
        self._specific_fc_type = "body"
        return self._add_split_on_property_type("mat")

    @property
    def split_by_shape(self):
        """Set the mesh scoping to a scopings container where each scoping is an element shape.
        The evaluated fields container will have one field on 'solid',
        one on 'shell', one on 'beam' and one on 'unknown_shape'.

        Returns
        -------
        self : Result

        Examples
        --------
        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> model = dpf.Model(examples.download_all_kinds_of_complexity())
        >>> disp = model.results.displacement
        >>> fc_disp = disp.split_by_shape.eval()
        >>> len(fc_disp)
        4

        >>> shell_disp = fc_disp.shell_field()
        >>> solid_disp = fc_disp.solid_field()

        """
        self._specific_fc_type = "shape"
        return self._add_split_on_property_type("elshape")

    def _add_split_on_property_type(self, prop):
        previous_mesh_scoping = self._mesh_scoping
        from ansys.dpf.core import operators

        if hasattr(operators, "scoping") and hasattr(operators.scoping,
                                                     "split_on_property_type"):
            self._mesh_scoping = operators.scoping.split_on_property_type()
        else:
            self._mesh_scoping = Operator("scoping::by_property")

        self._mesh_scoping.inputs.requested_location(
            self._result_info.native_scoping_location)
        self._mesh_scoping.inputs.mesh(self._model.metadata.mesh_provider)
        self._mesh_scoping.inputs.label1(prop)
        if previous_mesh_scoping:
            try:
                self._mesh_scoping.inputs.mesh_scoping(previous_mesh_scoping)
            except:
                pass
        return self

    def on_mesh_scoping(self, mesh_scoping):
        """Set the mesh scoping to a given mesh scoping.

        Parameters
        ----------
        mesh_scoping : Scoping, list[int]

        Returns
        -------
        self : Result

        Examples
        --------
        Use a list of nodes.

        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> model = dpf.Model(examples.complex_rst)
        >>> disp = model.results.displacement
        >>> fc = disp.on_mesh_scoping([1,2,3]).eval()
        >>> len(fc[0].scoping)
        3

        Use a scoping to specify a list of entity IDs with their locations.

        >>> stress = model.results.stress
        >>> scop = dpf.Scoping(ids=[3,4,5], location= dpf.locations.nodal)
        >>> fc = stress.on_mesh_scoping(scop).eval()
        >>> len(fc[0].scoping)
        3
        >>> fc[0].location
        'Nodal'

        """
        if isinstance(mesh_scoping, list):
            mesh_scoping = Scoping(
                ids=mesh_scoping,
                location=self._result_info.native_scoping_location,
                server=self._model._server,
            )

        self._mesh_scoping = mesh_scoping
        return self

    def on_location(self, location):
        """Set the requested location of the provider.

        Elemental nodal fields can be averaged to a nodal or elemental location.

        Parameters
        ----------
        location : str, locations

        Returns
        -------
        self : Result

        Examples
        --------
        Add a requested location to the average result on the nodes.

        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> model = dpf.Model(examples.complex_rst)
        >>> stress = model.results.stress
        >>> fc = stress.eval()
        >>> fc[0].location
        'ElementalNodal'

        >>> fc = stress.on_location(dpf.locations.nodal).eval()
        >>> fc[0].location
        'Nodal'

        """
        self._location = location
        return self
Beispiel #13
0
class Metadata:
    """Contains the metadata of a data source.

    Parameters
    ----------
    data_sources : DataSources

    server : server.DPFServer
        Server with the channel connected to the remote or local instance.

    """

    def __init__(self, data_sources, server):
        self._server = server
        self._set_data_sources(data_sources)
        self._meshed_region = None
        self._result_info = None
        self._stream_provider = None
        self._time_freq_support = None
        self._cache_streams_provider()

    def _cache_result_info(self):
        """Store result information."""
        if not self._result_info:
            self._result_info = self._load_result_info()

    def _cache_streams_provider(self):
        """Create a stream provider and cache it."""
        from ansys.dpf.core import operators

        if hasattr(operators, "metadata") and hasattr(
                operators.metadata, "stream_provider"
        ):
            self._stream_provider = operators.metadata.streams_provider(
                data_sources=self._data_sources, server=self._server
            )
        else:
            self._stream_provider = Operator("stream_provider", server=self._server)
            self._stream_provider.inputs.connect(self._data_sources)
        try:
            self._stream_provider.run()
        except:
            self._stream_provider = None

    @property
    @protect_source_op_not_found
    def time_freq_support(self):
        """Time frequency support.

        Returns
        -------
        ansys.dpf.core.time_freq_support.TimeFreqSupport
            Time frequency support.

        Examples
        --------
        >>> from ansys.dpf.core import Model
        >>> from ansys.dpf.core import examples
        >>> transient = examples.download_transient_result()
        >>> model = Model(transient)

        Get the number of sets from the result file.

        >>> tf = model.metadata.time_freq_support
        >>> tf.n_sets
        35

        Get the time values for the active result.

        >>> tf.time_frequencies.data
        array([0.        , 0.019975  , 0.039975  , 0.059975  , 0.079975  ,
               0.099975  , 0.119975  , 0.139975  , 0.159975  , 0.179975  ,
               0.199975  , 0.218975  , 0.238975  , 0.258975  , 0.278975  ,
               0.298975  , 0.318975  , 0.338975  , 0.358975  , 0.378975  ,
               0.398975  , 0.417975  , 0.437975  , 0.457975  , 0.477975  ,
               0.497975  , 0.517975  , 0.53754972, 0.55725277, 0.57711786,
               0.59702054, 0.61694639, 0.63683347, 0.65673452, 0.67662783])

        """
        if self._time_freq_support is None:
            timeProvider = Operator("TimeFreqSupportProvider", server=self._server)
            if self._stream_provider:
                timeProvider.inputs.connect(self._stream_provider.outputs)
            else:
                timeProvider.inputs.connect(self.data_sources)
            self._time_freq_support = timeProvider.get_output(
                0, types.time_freq_support
            )
        return self._time_freq_support

    @property
    def data_sources(self):
        """Data sources instance.

        This data source can be connected to other operators.

        Returns
        -------
        data_sources : DataSources

        Examples
        --------
        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> transient = examples.download_transient_result()
        >>> model = dpf.Model(transient)

        Connect the model data sources to the 'U' operator.

        >>> ds = model.metadata.data_sources
        >>> op = dpf.operators.result.displacement()
        >>> op.inputs.data_sources.connect(ds)

        """
        return self._data_sources

    @property
    def streams_provider(self):
        """Streams provider operator connected to the data sources.

        This streams provider can be connected to other operators.

        Returns
        -------
        streams_provider : :class:`ansys.dpf.core.operators.metadata.streams_provider`

        Examples
        --------
        >>> from ansys.dpf import core as dpf
        >>> from ansys.dpf.core import examples
        >>> transient = examples.download_transient_result()
        >>> model = dpf.Model(transient)

        Connect the model data sources to the ``U`` operator.

        >>> streams = model.metadata.streams_provider
        >>> op = dpf.operators.result.displacement()
        >>> op.inputs.streams_container.connect(streams)

        """
        return self._stream_provider

    def _set_data_sources(self, var_inp):
        from pathlib import Path
        if isinstance(var_inp, dpf.core.DataSources):
            self._data_sources = var_inp
        elif isinstance(var_inp, (str, Path)):
            self._data_sources = DataSources(var_inp, server=self._server)
        else:
            self._data_sources = DataSources(server=self._server)
        self._cache_streams_provider()

    def _load_result_info(self):
        """Returns a result info object"""
        op = Operator("ResultInfoProvider", server=self._server)
        op.inputs.connect(self._stream_provider.outputs)
        try:
            result_info = op.get_output(0, types.result_info)
        except _InactiveRpcError as e:
            # give the user a more helpful error
            if "results file is not defined in the Data sources" in e.details():
                raise RuntimeError("Unable to open result file") from None
            else:
                raise e
        except:
            return None
        return result_info

    @property
    @protect_source_op_not_found
    def meshed_region(self):
        """Meshed region instance.

        Returns
        -------
        mesh : :class:`ansys.dpf.core.meshed_region.MeshedRegion`
            Mesh
        """
        # NOTE: this uses the cached mesh and we might consider
        # changing this
        if self._meshed_region is None:
            self._meshed_region = self.mesh_provider.get_output(0, types.meshed_region)
            self._meshed_region._set_stream_provider(self._stream_provider)

        return self._meshed_region

    @property
    def mesh_provider(self):
        """Mesh provider operator.

        This operator reads a mesh from the result file. The underlying
        operator symbol is the class:`ansys.dpf.core.operators.mesh.mesh_provider`
        operator.

        Returns
        -------
        mesh_provider : :class:`ansys.dpf.core.operators.mesh.mesh_provider`
            Mesh provider operator.

        """
        try:
            tmp = Operator("MeshSelectionManagerProvider", server=self._server)
            tmp.inputs.connect(self._stream_provider.outputs)
            tmp.run()
        except:
            pass
        mesh_provider = Operator("MeshProvider", server=self._server)
        if self._stream_provider:
            mesh_provider.inputs.connect(self._stream_provider.outputs)
        else:
            mesh_provider.inputs.connect(self.data_sources)
        return mesh_provider

    @property
    @protect_source_op_not_found
    def result_info(self):
        """Result Info instance.

        Returns
        -------
        result_info : :class:`ansys.dpf.core.result_info.ResultInfo`
        """
        self._cache_result_info()

        return self._result_info

    @property
    def available_named_selections(self):
        """List of available named selections.

        Returns
        -------
        named_selections : list str
        """
        return self.meshed_region.available_named_selections

    def named_selection(self, named_selection):
        """Scoping containing the list of nodes or elements in the named selection.

        Parameters
        ----------
        named_selection : str
            name of the named selection

        Returns
        -------
        named_selection : :class:`ansys.dpf.core.scoping.Scoping`
        """
        return self.meshed_region.named_selection(named_selection)