def test_multi_process_chain_remote_workflow():
    files = examples.download_distributed_files()
    wf = core.Workflow()
    op = ops.result.displacement()
    average = core.operators.math.norm_fc(op)

    wf.add_operators([op, average])
    wf.set_input_name("data_sources", op.inputs.data_sources)
    wf.set_output_name("distrib", average.outputs.fields_container)
    workflows = []
    for i in files:
        data_sources1 = core.DataSources(files[i])

        grpc_stream_provider = ops.metadata.streams_provider()
        grpc_data_sources = core.DataSources()
        grpc_data_sources.set_result_file_path(
            local_servers[i].ip + ":" + str(local_servers[i].port), "grpc")
        grpc_stream_provider.inputs.data_sources(grpc_data_sources)

        remote_workflow_prov = core.Operator("remote_workflow_instantiate")
        remote_workflow_prov.connect(3, grpc_stream_provider, 0)
        remote_workflow_prov.connect(0, wf)
        remote_workflow = remote_workflow_prov.get_output(
            0, core.types.workflow)

        remote_workflow.connect("data_sources", data_sources1)
        workflows.append(remote_workflow)

    local_wf = core.Workflow()
    merge = ops.utility.merge_fields_containers()
    min_max = ops.min_max.min_max_fc(merge)
    local_wf.add_operator(merge)
    local_wf.add_operator(min_max)
    local_wf.set_output_name("tot_output", min_max.outputs.field_max)
    for i, wf in enumerate(workflows):
        local_wf.set_input_name("distrib" + str(i), merge, i)
    grpc_stream_provider = ops.metadata.streams_provider()
    grpc_data_sources = core.DataSources()
    grpc_data_sources.set_result_file_path(
        local_servers[2].ip + ":" + str(local_servers[2].port), "grpc")
    grpc_stream_provider.inputs.data_sources(grpc_data_sources)

    remote_workflow_prov = core.Operator("remote_workflow_instantiate")
    remote_workflow_prov.connect(3, grpc_stream_provider, 0)
    remote_workflow_prov.connect(0, local_wf)
    remote_workflow = remote_workflow_prov.get_output(0, core.types.workflow)

    for i, wf in enumerate(workflows):
        remote_workflow.connect_with(wf, ("distrib", "distrib" + str(i)))

    max = remote_workflow.get_output("tot_output", core.types.field)
    assert np.allclose(max.data, [10.03242272])
Exemple #2
0
def test_plot_meshes_container_1(multishells):
    model = core.Model(multishells)
    mesh = model.metadata.meshed_region
    split_mesh_op = core.Operator("split_mesh")
    split_mesh_op.connect(7, mesh)
    split_mesh_op.connect(13, "mat")
    meshes_cont = split_mesh_op.get_output(0, core.types.meshes_container)
    disp_op = core.Operator("U")
    disp_op.connect(7, meshes_cont)
    ds = core.DataSources(multishells)
    disp_op.connect(4, ds)
    disp_fc = disp_op.outputs.fields_container()
    meshes_cont.plot(disp_fc)
def test_simple_remote_workflow(simple_bar, local_server):
    data_sources1 = core.DataSources(simple_bar)
    wf = core.Workflow()
    op = ops.result.displacement(data_sources=data_sources1)
    average = core.operators.math.norm_fc(op)

    wf.add_operators([op, average])
    wf.set_output_name("out", average.outputs.fields_container)

    local_wf = core.Workflow()
    min_max = ops.min_max.min_max_fc()
    local_wf.add_operator(min_max)
    local_wf.set_input_name("in", min_max.inputs.fields_container)
    local_wf.set_output_name("tot_output", min_max.outputs.field_max)

    grpc_stream_provider = ops.metadata.streams_provider()
    grpc_data_sources = core.DataSources()
    grpc_data_sources.set_result_file_path(
        local_server.ip + ":" + str(local_server.port), "grpc")
    grpc_stream_provider.inputs.data_sources(grpc_data_sources)

    remote_workflow_prov = core.Operator("remote_workflow_instantiate")
    remote_workflow_prov.connect(3, grpc_stream_provider, 0)
    remote_workflow_prov.connect(0, wf)

    remote_workflow = remote_workflow_prov.get_output(0, core.types.workflow)

    local_wf.connect_with(remote_workflow, ("out", "in"))
    max = local_wf.get_output("tot_output", core.types.field)
    assert np.allclose(max.data, [2.52368345e-05])
Exemple #4
0
def create_mesh_and_field_mapped(multishells):
    # get metadata
    model = core.Model(multishells)
    mesh = model.metadata.meshed_region
    disp_fc = model.results.displacement().outputs.fields_container()
    field = disp_fc[0]
    # coordinates field to map
    coordinates = [[-0.02, 0.006, 0.014], [-0.02, 0.006, 0.012],
                   [-0.018, 0.006, 0.012], [-0.018, 0.006, 0.014]]
    field_coord = core.Field()
    field_coord.location = core.locations.nodal
    field_coord.data = coordinates
    scoping = core.Scoping()
    scoping.location = core.locations.nodal
    scoping.ids = list(range(1, len(coordinates) + 1))
    field_coord.scoping = scoping
    # mapping operator
    mapping_operator = core.Operator("mapping")
    mapping_operator.inputs.fields_container.connect(disp_fc)
    mapping_operator.inputs.coordinates.connect(field_coord)
    mapping_operator.inputs.mesh.connect(mesh)
    mapping_operator.inputs.create_support.connect(True)
    fields_mapped = mapping_operator.outputs.fields_container()
    # mesh path
    assert len(fields_mapped) == 1
    field_m = fields_mapped[0]
    mesh_m = field_m.meshed_region
    return field, field_m, mesh, mesh_m
Exemple #5
0
def test_cff(cff_data_sources):
    m = dpf.Model(cff_data_sources)
    assert m.metadata.meshed_region.nodes.n_nodes == 1430
    op = dpf.Operator("cff::cas::SV_DENSITY")
    op.connect(4, m.metadata.data_sources)
    fc = op.get_output(0, dpf.types.fields_container)
    assert len(fc[0]) == 1380
Exemple #6
0
def test_plot_meshes_container_only(multishells):
    model = core.Model(multishells)
    mesh = model.metadata.meshed_region
    split_mesh_op = core.Operator("split_mesh")
    split_mesh_op.connect(7, mesh)
    split_mesh_op.connect(13, "mat")
    meshes_cont = split_mesh_op.get_output(0, core.types.meshes_container)
    meshes_cont.plot()
Exemple #7
0
def test_eng(engineering_data_sources):
    dpf.load_library("composite_operators.dll", "compo")
    dpf.load_library("Ans.Dpf.EngineeringData.dll", "eng")
    m = dpf.Model(engineering_data_sources)
    stress_op = dpf.operators.result.stress()
    stress_op.inputs.data_sources.connect(engineering_data_sources)
    result_info_provider = dpf.operators.metadata.result_info_provider()
    result_info_provider.inputs.data_sources.connect(engineering_data_sources)
    mat_support_operator = dpf.operators.metadata.material_support_provider()
    mat_support_operator.inputs.data_sources.connect(engineering_data_sources)
    ans_mat_operator = dpf.Operator("eng_data::ans_mat_material_provider")
    ans_mat_operator.connect(0, mat_support_operator, 0)
    ans_mat_operator.connect(1, result_info_provider, 0)
    ans_mat_operator.connect(4, engineering_data_sources)
    field_variable_provider = dpf.Operator(
        "composite::inistate_field_variables_provider")
    field_variable_provider.connect(4, engineering_data_sources)
    field_variable_provider.inputs.mesh.connect(m.metadata.mesh_provider)
    field_variable_provider.run()
Exemple #8
0
def test_hdf5_ellipsis_any_pins(simple_bar, tmpdir):
    tmp_path = str(tmpdir.join("hdf5.h5"))
    model = core.Model(simple_bar)
    u = model.results.displacement()
    s = model.operator("S")
    op = core.Operator("serialize_to_hdf5")
    op.inputs.file_path.connect(tmp_path)
    op.inputs.data1.connect(u.outputs)
    op.inputs.data2.connect(s.outputs)
    assert len(op.inputs._connected_inputs) == 3
Exemple #9
0
def test_plotter_add_mesh(multishells):
    model = core.Model(multishells)
    mesh = model.metadata.meshed_region
    split_mesh_op = core.Operator("split_mesh")
    split_mesh_op.connect(7, mesh)
    split_mesh_op.connect(13, "mat")
    meshes_cont = split_mesh_op.get_output(0, core.types.meshes_container)
    from ansys.dpf.core.plotter import DpfPlotter
    pl = DpfPlotter()
    for i in range(len(meshes_cont) - 10):
        pl.add_mesh(meshes_cont[i])
    pl.show_figure()
Exemple #10
0
def test_plot_meshes_container_2(multishells):
    from ansys.dpf import core
    model = core.Model(multishells)
    mesh = model.metadata.meshed_region
    split_mesh_op = core.Operator("split_mesh")
    split_mesh_op.connect(7, mesh)
    split_mesh_op.connect(13, "mat")
    meshes_cont = split_mesh_op.get_output(0, core.types.meshes_container)
    disp_op = core.Operator("U")
    disp_op.connect(7, meshes_cont)
    ds = core.DataSources(multishells)
    disp_op.connect(4, ds)
    disp_fc = disp_op.outputs.fields_container()
    meshes_cont_2 = core.MeshesContainer()
    meshes_cont_2.labels = meshes_cont.labels
    disp_fc_2 = core.FieldsContainer()
    disp_fc_2.labels = meshes_cont.labels
    for i in range(len(meshes_cont) - 10):
        lab = meshes_cont.get_label_space(i)
        meshes_cont_2.add_mesh(lab, meshes_cont.get_mesh(lab))
        disp_fc_2.add_field(lab, disp_fc.get_field(lab))
    meshes_cont_2.plot(disp_fc_2)
Exemple #11
0
def test_field_shell_plot_scoping_elemental(multishells):
    model = core.Model(multishells)
    mesh = model.metadata.meshed_region
    stress = model.results.stress()
    scoping = core.Scoping()
    scoping.location = "Elemental"
    l = list(range(3000, 4500))
    scoping.ids = l
    stress.inputs.mesh_scoping.connect(scoping)
    avg = core.Operator("to_elemental_fc")
    avg.inputs.fields_container.connect(stress.outputs.fields_container)
    s = avg.outputs.fields_container()
    f = s[1]
    f.plot(shell_layers=core.shell_layers.top)
def test_remote_workflow_info(local_server):
    wf = core.Workflow()
    op = ops.result.displacement()
    average = core.operators.math.norm_fc(op)

    wf.add_operators([op, average])
    wf.set_input_name("data_sources", op.inputs.data_sources)
    wf.set_output_name("distrib", average.outputs.fields_container)
    grpc_stream_provider = ops.metadata.streams_provider()
    grpc_data_sources = core.DataSources()
    grpc_data_sources.set_result_file_path(
        local_server.ip + ":" + str(local_server.port), "grpc")
    grpc_stream_provider.inputs.data_sources(grpc_data_sources)
    remote_workflow_prov = core.Operator("remote_workflow_instantiate")
    remote_workflow_prov.connect(3, grpc_stream_provider, 0)
    remote_workflow_prov.connect(0, wf)
    remote_workflow = remote_workflow_prov.get_output(0, core.types.workflow)
    assert "data_sources" in remote_workflow.input_names
    assert "distrib" in remote_workflow.output_names
Exemple #13
0
cyc = examples.download_multi_stage_cyclic_result()
model = dpf.Model(cyc)
print(model)

###############################################################################
# Expand displacement results
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~
# In this example we expand displacement results, by default on all
# nodes and the first time step.

# Create displacement cyclic operator
UCyc = model.results.displacement()
UCyc.inputs.read_cyclic(2)

# expand the displacements and get a total deformation
nrm = dpf.Operator("norm_fc")
nrm.inputs.connect(UCyc.outputs)
fields = nrm.outputs.fields_container()

# # get the expanded mesh
mesh_provider = model.metadata.mesh_provider
mesh_provider.inputs.read_cyclic(2)
mesh = mesh_provider.outputs.mesh()

# # plot the expanded result on the expanded mesh
mesh.plot(fields)

###############################################################################
# Expand stresses at a given time step
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Exemple #14
0
# use component wise divide to averaged the stress by the volume
divide = ops.math.component_wise_divide(seqvsum, volsum)
divide.run()

###############################################################################
# Plot elemental seqv and volume averaged elemental seqv
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
mesh.plot(values_to_sum_field)
mesh.plot(divide.outputs.field())

###############################################################################
# Use the Operator instead
# ~~~~~~~~~~~~~~~~~~~~~~~~~
# An operator with the same algorithm has been implemented
s_fc = s.outputs.fields_container()
single_field_vol_fc = dpf.fields_container_factory.over_time_freq_fields_container(
    [vol_field])

single_field_fc = dpf.fields_container_factory.over_time_freq_fields_container(
    [values_to_sum_field])

op = dpf.Operator("volume_stress")
op.inputs.scoping.connect(nodes)
op.inputs.stress_fields.connect(single_field_fc)
op.inputs.volume_fields(single_field_vol_fc)
op.inputs.volume(volume_check * 10.0)

out = op.get_output(0, dpf.types.field)
mesh.plot(out)
Exemple #15
0
# Create a total displacement operator and set its time scoping to
# the entire time freq support and its nodes scoping into a user defined nodes.
disp_op = ops.result.raw_displacement(data_sources=model)
time_ids = list(range(1, model.metadata.time_freq_support.n_sets + 1))

# define nodal scoping
nodes = dpf.Scoping()
nodes.ids = [2, 18]

# connect the frequencies and the nodes scopings to the result
# provider operator
disp_op.inputs.mesh_scoping.connect(nodes)
disp_op.inputs.time_scoping.connect(time_ids)

# extract Rz component using the component selector operator
comp = dpf.Operator("component_selector_fc")
comp.inputs.connect(disp_op.outputs)
comp.inputs.component_number.connect(5)

# Compute the multi-harmonic response based on Rz and a set of RPMs
rpms = dpf.Scoping()
rpms.ids = [1, 2, 3]

fft = ops.math.fft_multi_harmonic_minmax()

fft.inputs.connect(comp.outputs)
fft.inputs.rpm_scoping.connect(rpms)

fields = fft.outputs.field_max()
len(fields)  # one multi-harmonic field response per node
Exemple #16
0
###############################################################################
# Create a model object to establish a connection with an
# example result file:
model = dpf.Model(examples.static_rst)
print(model)

###############################################################################
# Next, create a raw displacement operator ``"U"``.  Each operator
# contains ``input`` and ``output`` pins that can be connected to
# various sources to include other operators.  This allows operators
# to be "chained" to allow for highly efficient operations.
#
# To print out the available inputs and outputs of the
# displacement operator:
disp_op = dpf.Operator("U")
print(disp_op.inputs)
print(disp_op.outputs)

###############################################################################
# Compute the Maximum Normalized Displacement
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This example demonstrate how to chain various operators. It connects the input
# of the operator to the data sources contained within the ``model`` object and
# then the maximum of the norm of the operator.

# Connect to the data sources of the model.
disp_op.inputs.data_sources.connect(model.metadata.data_sources)

# Create a field container norm operator and connect it to the
# displacement operator to chain the operators.
Exemple #17
0
    def plot_contour(self,
                     field_or_fields_container,
                     notebook=None,
                     shell_layers=None,
                     off_screen=None,
                     show_axes=True,
                     meshed_region=None,
                     **kwargs):
        """Plot the contour result on its mesh support.

        You cannot plot a fields container containing results at several
        time steps.

        Parameters
        ----------
        field_or_fields_container : dpf.core.Field or dpf.core.FieldsContainer
            Field or field container that contains the result to plot.
        notebook : bool, optional
            Whether to plot a static image within an iPython notebook
            if available. The default is `None`, in which case an attempt is
            made to plot a static imaage within an iPython notebook. When ``False``,
            a plot external to the notebook is generated with an interactive window.
            When ``True``, a plot is always generated within a notebook.
        shell_layers : core.shell_layers, optional
            Enum used to set the shell layers if the model to plot
            contains shell elements.
        off_screen : bool, optional
            Whether to render off screen, which is useful for automated
            screenshots. The default is ``None``.
        show_axes : bool, optional
            Whether to show a VTK axes widget. The default is ``True``.
        **kwargs : optional
            Additional keyword arguments for the plotter. For more information,
            see ``help(pyvista.plot)``.
        """
        if not sys.warnoptions:
            import warnings

            warnings.simplefilter("ignore")

        if isinstance(field_or_fields_container,
                      (dpf.core.Field, dpf.core.FieldsContainer)):
            fields_container = None
            if isinstance(field_or_fields_container, dpf.core.Field):
                fields_container = dpf.core.FieldsContainer(
                    server=field_or_fields_container._server)
                fields_container.add_label(DefinitionLabels.time)
                fields_container.add_field({DefinitionLabels.time: 1},
                                           field_or_fields_container)
            elif isinstance(field_or_fields_container,
                            dpf.core.FieldsContainer):
                fields_container = field_or_fields_container
        else:
            raise TypeError("Only field or fields_container can be plotted.")

        # pre-loop to check if the there are several time steps
        labels = fields_container.get_label_space(0)
        if DefinitionLabels.complex in labels.keys():
            raise dpf_errors.ComplexPlottingError
        if DefinitionLabels.time in labels.keys():
            first_time = labels[DefinitionLabels.time]
            for i in range(1, len(fields_container)):
                label = fields_container.get_label_space(i)
                if label[DefinitionLabels.time] != first_time:
                    raise dpf_errors.FieldContainerPlottingError

        if meshed_region is not None:
            mesh = meshed_region
        else:
            mesh = self._mesh

        # get mesh scoping
        location = None
        component_count = None
        name = None

        # pre-loop to get location and component count
        for field in fields_container:
            if len(field.data) != 0:
                location = field.location
                component_count = field.component_count
                name = field.name.split("_")[0]
                break

        if location == locations.nodal:
            mesh_location = mesh.nodes
        elif location == locations.elemental:
            mesh_location = mesh.elements
        else:
            raise ValueError(
                "Only elemental or nodal location are supported for plotting.")

        # pre-loop: check if shell layers for each field, if yes, set the shell layers
        changeOp = core.Operator("change_shellLayers")
        for field in fields_container:
            shell_layer_check = field.shell_layers
            if shell_layer_check in [
                    eshell_layers.topbottom,
                    eshell_layers.topbottommid,
            ]:
                changeOp.inputs.fields_container.connect(fields_container)
                sl = eshell_layers.top
                if shell_layers is not None:
                    if not isinstance(shell_layers, eshell_layers):
                        raise TypeError(
                            "shell_layer attribute must be a core.shell_layers instance."
                        )
                    sl = shell_layers
                changeOp.inputs.e_shell_layer.connect(
                    sl.value)  # top layers taken
                fields_container = changeOp.get_output(
                    0, core.types.fields_container)
                break

        # Merge field data into a single array
        if component_count > 1:
            overall_data = np.full((len(mesh_location), component_count),
                                   np.nan)
        else:
            overall_data = np.full(len(mesh_location), np.nan)

        for field in fields_container:
            ind, mask = mesh_location.map_scoping(field.scoping)
            overall_data[ind] = field.data[mask]

        # create the plotter and add the meshes
        background = kwargs.pop("background", None)
        cpos = kwargs.pop("cpos", None)
        return_cpos = kwargs.pop("return_cpos", None)

        # plotter = pv.Plotter(notebook=notebook, off_screen=off_screen)
        if notebook is not None:
            self._internal_plotter._plotter.notebook = notebook
        if off_screen is not None:
            self._internal_plotter._plotter.off_screen = off_screen

        # add meshes
        kwargs.setdefault("show_edges", True)
        kwargs.setdefault("nan_color", "grey")
        kwargs.setdefault("stitle", name)
        text = kwargs.pop('text', None)
        if text is not None:
            self._internal_plotter._plotter.add_text(text,
                                                     position='lower_edge')
        self._internal_plotter._plotter.add_mesh(mesh.grid,
                                                 scalars=overall_data,
                                                 **kwargs)

        if background is not None:
            self._internal_plotter._plotter.set_background(background)

        if cpos is not None:
            self._internal_plotter._plotter.camera_position = cpos

        # show result
        if show_axes:
            self._internal_plotter._plotter.add_axes()
        if return_cpos is None:
            return self._internal_plotter._plotter.show()
        else:
            import pyvista as pv
            pv_version = pv.__version__
            version_to_reach = '0.32.0'
            meet_ver = meets_version(pv_version, version_to_reach)
            if meet_ver:
                return self._internal_plotter._plotter.show(
                    return_cpos=return_cpos)
            else:
                txt = """To use the return_cpos option, please upgrade
                your pyvista module with a version higher than """
                txt += version_to_reach
                raise core.errors.DpfVersionNotSupported(version_to_reach, txt)
Exemple #18
0
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

# define stress expansion operator and request stresses at time set = 8
scyc_op = model.operator("mapdl::rst::S_cyclic")
scyc_op.inputs.read_cyclic(2)
scyc_op.inputs.time_scoping.connect([8])

# request the results averaged on the nodes
scyc_op.inputs.requested_location.connect("Nodal")

# connect the base mesh and the expanded mesh, to avoid rexpanding the mesh
scyc_op.inputs.sector_mesh.connect(model.metadata.meshed_region)
# scyc_op.inputs.expanded_meshed_region.connect(mesh)

# request equivalent von mises operator and connect it to stress operator
eqv = dpf.Operator("eqv_fc")
eqv.inputs.connect(scyc_op.outputs)

# expand the results and get stress eqv
fields = eqv.outputs.fields_container()

# plot the expanded result on the expanded mesh
# mesh.plot(fields[0])

###############################################################################
# Expand stresses at given sectors
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

# define stress expansion operator and request stresses at time set = 8
scyc_op = model.operator("mapdl::rst::S_cyclic")
scyc_op.inputs.read_cyclic(2)
Exemple #19
0
def test_hdf5_loaded():
    op = core.Operator("serialize_to_hdf5")
    assert op.inputs is not None
Exemple #20
0
def create_mesh_and_field_mapped_2(multishells):
    # get metadata
    model = core.Model(multishells)
    mesh = model.metadata.meshed_region
    disp_fc = model.results.displacement().outputs.fields_container()
    field = disp_fc[0]
    # coordinates field to map
    coordinates = [[-0.0195, 0.006, -0.0025]]
    for i in range(1, 101):
        coord_copy = []
        coord_copy.append(coordinates[0][0])
        coord_copy.append(coordinates[0][1])
        coord_copy.append(coordinates[0][2])
        coord_copy[0] = coord_copy[0] + i * 0.0003
        coordinates.append(coord_copy)
    ref = [-0.0155, 0.00600634, -0.0025]
    coordinates.append(ref)
    for i in range(1, 101):
        coord_copy = []
        coord_copy.append(ref[0])
        coord_copy.append(ref[1])
        coord_copy.append(ref[2])
        coord_copy[0] = coord_copy[0] + i * 0.0003
        coordinates.append(coord_copy)
    ref = [-0.0125, 0.00600507, -0.0025]
    coordinates.append(ref)
    for i in range(1, 101):
        coord_copy = []
        coord_copy.append(ref[0])
        coord_copy.append(ref[1])
        coord_copy.append(ref[2])
        coord_copy[0] = coord_copy[0] + i * 0.0003
        coordinates.append(coord_copy)
    ref = [-0.0125, 0.00600444, -0.0025]
    coordinates.append(ref)
    for i in range(1, 101):
        coord_copy = []
        coord_copy.append(ref[0])
        coord_copy.append(ref[1])
        coord_copy.append(ref[2])
        coord_copy[0] = coord_copy[0] + i * 0.0003
        coordinates.append(coord_copy)
    field_coord = core.Field()
    field_coord.location = core.locations.nodal
    field_coord.data = coordinates
    scoping = core.Scoping()
    scoping.location = core.locations.nodal
    scoping.ids = list(range(1, len(coordinates) + 1))
    field_coord.scoping = scoping
    # mapping operator
    mapping_operator = core.Operator("mapping")
    mapping_operator.inputs.fields_container.connect(disp_fc)
    mapping_operator.inputs.coordinates.connect(field_coord)
    mapping_operator.inputs.mesh.connect(mesh)
    mapping_operator.inputs.create_support.connect(True)
    fields_mapped = mapping_operator.outputs.fields_container()
    # mesh path
    assert len(fields_mapped) == 1
    field_m = fields_mapped[0]
    mesh_m = field_m.meshed_region
    return field, field_m, mesh, mesh_m
Exemple #21
0
        "Ans.Dpf.Hdf5.dll",
    ]

local_dir = os.path.dirname(os.path.abspath(__file__))
TARGET_PATH = os.path.join(local_dir, os.pardir, "ansys", "dpf", "core",
                           "operators")
files = glob.glob(os.path.join(TARGET_PATH, "*"))
for f in files:
    if Path(f).stem == "specification":
        continue
    try:
        if os.path.isdir(f):
            shutil.rmtree(f)
        else:
            os.remove(f)
    except:
        pass
core.start_local_server()
code_gen = core.Operator("python_generator")
code_gen.connect(1, TARGET_PATH)
for lib in LIB_TO_GENERATE:
    code_gen.connect(0, lib)
    if lib != LIB_TO_GENERATE[0]:
        code_gen.connect(2, False)
    else:
        code_gen.connect(2, True)
    code_gen.run()
    time.sleep(0.1)

core.SERVER.shutdown()
Exemple #22
0
# Create displacement cyclic operator
UCyc = dpf.operators.result.cyclic_expanded_displacement()
UCyc.inputs.data_sources(model.metadata.data_sources)
# Select the sectors to expand on the first stage
UCyc.inputs.sectors_to_expand([0, 1, 2])
# Or select the sectors to expand stage by stage
sectors_scopings = dpf.ScopingsContainer()
sectors_scopings.labels = ["stage"]
sectors_scopings.add_scoping({"stage": 0}, dpf.Scoping(ids=[0, 1, 2]))
sectors_scopings.add_scoping({"stage": 1},
                             dpf.Scoping(ids=[0, 1, 2, 3, 4, 5, 6]))
UCyc.inputs.sectors_to_expand(sectors_scopings)

# expand the displacements and get a total deformation
nrm = dpf.Operator("norm_fc")
nrm.inputs.connect(UCyc.outputs)
fields = nrm.outputs.fields_container()

# # get the expanded mesh
mesh_provider = model.metadata.mesh_provider
mesh_provider.inputs.read_cyclic(2)
mesh = mesh_provider.outputs.mesh()

###############################################################################
# plot the expanded result on the expanded mesh
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
mesh.plot(fields)

###############################################################################
# Choose to expand only some sectors for the mesh