def test_generated_operator_several_output_types2(): inpt = core.Field(nentities=3) inpt.data = [1, 2, 3, 4, 5, 6, 7, 8, 9] inpt.scoping.ids = [1, 2, 3] inpt.unit = "m" uc = op.scoping.rescope(inpt, core.Scoping(ids=[1, 2])) f = uc.outputs.fields_as_field() assert np.allclose(f.data.flatten("C"), [1, 2, 3, 4, 5, 6]) fc = core.FieldsContainer() fc.labels = ["time"] fc.add_field({"time": 1}, inpt) uc = op.scoping.rescope(fc, core.Scoping(ids=[1, 2])) fc2 = uc.outputs.fields_as_fields_container() assert np.allclose(fc2[0].data.flatten("C"), [1, 2, 3, 4, 5, 6])
def create_mesh_and_field_mapped(multishells): # get metadata model = core.Model(multishells) mesh = model.metadata.meshed_region disp_fc = model.results.displacement().outputs.fields_container() field = disp_fc[0] # coordinates field to map coordinates = [[-0.02, 0.006, 0.014], [-0.02, 0.006, 0.012], [-0.018, 0.006, 0.012], [-0.018, 0.006, 0.014]] field_coord = core.Field() field_coord.location = core.locations.nodal field_coord.data = coordinates scoping = core.Scoping() scoping.location = core.locations.nodal scoping.ids = list(range(1, len(coordinates) + 1)) field_coord.scoping = scoping # mapping operator mapping_operator = core.Operator("mapping") mapping_operator.inputs.fields_container.connect(disp_fc) mapping_operator.inputs.coordinates.connect(field_coord) mapping_operator.inputs.mesh.connect(mesh) mapping_operator.inputs.create_support.connect(True) fields_mapped = mapping_operator.outputs.fields_container() # mesh path assert len(fields_mapped) == 1 field_m = fields_mapped[0] mesh_m = field_m.meshed_region return field, field_m, mesh, mesh_m
def test_workflowwithgeneratedcode(allkindofcomplexity): disp = core.operators.result.displacement() ds = core.DataSources(allkindofcomplexity) nodes = [1] scop = core.Scoping() scop.ids = nodes scop.location = "Nodal" disp.inputs.data_sources.connect(ds) disp.inputs.mesh_scoping.connect(scop) a = disp.outputs.fields_container.get_data() assert a[0].data[0][0] == 7.120546307743541e-07 assert len(a[0].data[0]) == 3 assert len(a[0].data) == 1 norm = core.operators.math.norm() norm.inputs.field.connect(disp.outputs.fields_container) b = norm.outputs.field() assert b.data[0] == 1.26387078548793e-06 filt = core.operators.filter.scoping_high_pass() filt.inputs.field.connect(norm.outputs.field) filt.inputs.threshold.connect(1e-05) pow_op = core.operators.math.pow() pow_op.inputs.factor.connect(3.0) pow_op.inputs.field.connect(norm.outputs.field) d = pow_op.outputs.field.get_data() assert d.data[0] == 2.0188684707833254e-18
def test_scopingdata_property_field(): pfield = dpf.core.PropertyField() list_ids = [1, 2, 4, 6, 7] scop = core.Scoping(ids=list_ids, location=locations.nodal) pfield.scoping = scop list_data = [20, 30, 50, 70, 80] pfield.data = list_data pfield.data assert np.allclose(pfield.data, list_data) assert np.allclose(pfield.scoping.ids, list_ids)
def test_set_get_data_property_field(): field = core.Field(nentities=20, nature=dpf.core.natures.scalar) scoping = core.Scoping() ids = [] data = [] for i in range(0, 20): ids.append(i + 1) data.append(i + 0.001) scoping.ids = ids field.scoping = scoping field.data = data assert np.allclose(field.data, data)
def test_throw_on_several_time_steps(plate_msup): model = core.Model(plate_msup) scoping = core.Scoping() scoping.ids = range( 3, len(model.metadata.time_freq_support.time_frequencies) + 1) stress = model.results.displacement() stress.inputs.time_scoping.connect(scoping) fc = stress.outputs.fields_container() mesh = model.metadata.meshed_region with pytest.raises(dpf_errors.FieldContainerPlottingError): mesh.plot(fc)
def test_plot_fields_on_mesh_scoping_title(multishells): model = core.Model(multishells) mesh = model.metadata.meshed_region stress = model.results.stress() stress.inputs.requested_location.connect("Nodal") scoping = core.Scoping() scoping.location = "Nodal" l = list(range(0, 400)) l += list(range(1500, 2000)) l += list(range(2200, 2600)) scoping.ids = l stress.inputs.mesh_scoping.connect(scoping) s = stress.outputs.fields_container() mesh.plot(s[0], text="test")
def test_field_shell_plot_scoping_elemental(multishells): model = core.Model(multishells) mesh = model.metadata.meshed_region stress = model.results.stress() scoping = core.Scoping() scoping.location = "Elemental" l = list(range(3000, 4500)) scoping.ids = l stress.inputs.mesh_scoping.connect(scoping) avg = core.Operator("to_elemental_fc") avg.inputs.fields_container.connect(stress.outputs.fields_container) s = avg.outputs.fields_container() f = s[1] f.plot(shell_layers=core.shell_layers.top)
# # - Analysis type # - Available results # - Size of the mesh # - Number of results # model = dpf.Model(examples.msup_transient) print(model) ############################################################################### # Get the stress tensor and connect time scoping. # Make sure to define ``"Nodal"`` as the requested location, # as the labels are supported only for Nodal results. # stress_tensor = model.results.stress() time_scope = dpf.Scoping() time_scope.ids = [1, 2] stress_tensor.inputs.time_scoping.connect(time_scope) stress_tensor.inputs.requested_location.connect("Nodal") ############################################################################### # This code performs solution combination on two load cases. # =>LC1 - LC2 # You can access individual loadcases as the fields of a fields_container for `stress_tensor` # # LC1: stress_tensor.outputs.fields_container.get_data()[0] # LC2: stress_tensor.outputs.fields_container.get_data()[1] # # Scale LC2 to -1 field_lc2 = stress_tensor.outputs.fields_container.get_data()[1] stress_tensor_lc2_sc = dpf.operators.math.scale(field=field_lc2,
elements_indexes = [] # get elements attached to nodes for current_node_index in current_node_indexes: elements_indexes.extend( nodal_connectivity.get_entity_data(i).flatten()) current_node_indexes = [] for index in elements_indexes: # sum up the volume on those elements volume += vol.get_entity_data(index)[0] # get all nodes of the current elements for next iteration current_node_indexes.extend( connectivity.get_entity_data(index)) node_index_to_el_ids[i] = dpf.Scoping( ids=[elements_ids[index] for index in elements_indexes], location=dpf.locations().elemental, ) node_index_to_found_volume[i] = volume ############################################################################### # Create workflow # ~~~~~~~~~~~~~~~~ # For each list of elements surrounding nodes: # compute stress eqv averaged on elements # apply dot product seqv.volume # sum up those on the list of elements # divide this sum by the total volume on those elements s = model.results.stress() to_elemental = ops.averaging.to_elemental_fc(s) eqv = ops.invariant.von_mises_eqv_fc(to_elemental)
tf = model.metadata.time_freq_support print("Number of solution sets", tf.n_sets) ############################################################################### # Compute multi harmonic response # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # In this example we compute the Rz multi harmonic responses based on # a selected nodes and a set of EOs (multiple engine orders). # Create a total displacement operator and set its time scoping to # the entire time freq support and its nodes scoping into a user defined nodes. disp_op = ops.result.raw_displacement(data_sources=model) time_ids = list(range(1, model.metadata.time_freq_support.n_sets + 1)) # define nodal scoping nodes = dpf.Scoping() nodes.ids = [2, 18] # connect the frequencies and the nodes scopings to the result # provider operator disp_op.inputs.mesh_scoping.connect(nodes) disp_op.inputs.time_scoping.connect(time_ids) # extract Rz component using the component selector operator comp = dpf.Operator("component_selector_fc") comp.inputs.connect(disp_op.outputs) comp.inputs.component_number.connect(5) # Compute the multi-harmonic response based on Rz and a set of RPMs rpms = dpf.Scoping() rpms.ids = [1, 2, 3]
# Create a model object to establish a connection with an example result file: model = dpf.Model(examples.download_all_kinds_of_complexity()) print(model) ############################################################################### # Choose specific nodes # ~~~~~~~~~~~~~~~~~~~~~ # If some nodes or elements are specifically of interest, a nodal ``mesh_scoping`` # can be connected. nodes_scoping = dpf.mesh_scoping_factory.nodal_scoping(range(400, 500)) print(nodes_scoping) ############################################################################### # or nodes_scoping = dpf.Scoping(ids=range(400, 500), location=dpf.locations.nodal) print(nodes_scoping) ############################################################################### disp = model.results.displacement.on_mesh_scoping(nodes_scoping).eval() model.metadata.meshed_region.plot(disp) ############################################################################### # Equivalent to: disp_op = model.results.displacement() disp_op.inputs.mesh_scoping(nodes_scoping) disp = disp_op.outputs.fields_container() ###############################################################################
def create_mesh_and_field_mapped_2(multishells): # get metadata model = core.Model(multishells) mesh = model.metadata.meshed_region disp_fc = model.results.displacement().outputs.fields_container() field = disp_fc[0] # coordinates field to map coordinates = [[-0.0195, 0.006, -0.0025]] for i in range(1, 101): coord_copy = [] coord_copy.append(coordinates[0][0]) coord_copy.append(coordinates[0][1]) coord_copy.append(coordinates[0][2]) coord_copy[0] = coord_copy[0] + i * 0.0003 coordinates.append(coord_copy) ref = [-0.0155, 0.00600634, -0.0025] coordinates.append(ref) for i in range(1, 101): coord_copy = [] coord_copy.append(ref[0]) coord_copy.append(ref[1]) coord_copy.append(ref[2]) coord_copy[0] = coord_copy[0] + i * 0.0003 coordinates.append(coord_copy) ref = [-0.0125, 0.00600507, -0.0025] coordinates.append(ref) for i in range(1, 101): coord_copy = [] coord_copy.append(ref[0]) coord_copy.append(ref[1]) coord_copy.append(ref[2]) coord_copy[0] = coord_copy[0] + i * 0.0003 coordinates.append(coord_copy) ref = [-0.0125, 0.00600444, -0.0025] coordinates.append(ref) for i in range(1, 101): coord_copy = [] coord_copy.append(ref[0]) coord_copy.append(ref[1]) coord_copy.append(ref[2]) coord_copy[0] = coord_copy[0] + i * 0.0003 coordinates.append(coord_copy) field_coord = core.Field() field_coord.location = core.locations.nodal field_coord.data = coordinates scoping = core.Scoping() scoping.location = core.locations.nodal scoping.ids = list(range(1, len(coordinates) + 1)) field_coord.scoping = scoping # mapping operator mapping_operator = core.Operator("mapping") mapping_operator.inputs.fields_container.connect(disp_fc) mapping_operator.inputs.coordinates.connect(field_coord) mapping_operator.inputs.mesh.connect(mesh) mapping_operator.inputs.create_support.connect(True) fields_mapped = mapping_operator.outputs.fields_container() # mesh path assert len(fields_mapped) == 1 field_m = fields_mapped[0] mesh_m = field_m.meshed_region return field, field_m, mesh, mesh_m
) ############################################################################### # Expand displacement results # ~~~~~~~~~~~~~~~~~~~~~~~~~~~ # In this example we expand displacement results, on chosen sectors # Create displacement cyclic operator UCyc = dpf.operators.result.cyclic_expanded_displacement() UCyc.inputs.data_sources(model.metadata.data_sources) # Select the sectors to expand on the first stage UCyc.inputs.sectors_to_expand([0, 1, 2]) # Or select the sectors to expand stage by stage sectors_scopings = dpf.ScopingsContainer() sectors_scopings.labels = ["stage"] sectors_scopings.add_scoping({"stage": 0}, dpf.Scoping(ids=[0, 1, 2])) sectors_scopings.add_scoping({"stage": 1}, dpf.Scoping(ids=[0, 1, 2, 3, 4, 5, 6])) UCyc.inputs.sectors_to_expand(sectors_scopings) # expand the displacements and get a total deformation nrm = dpf.Operator("norm_fc") nrm.inputs.connect(UCyc.outputs) fields = nrm.outputs.fields_container() # # get the expanded mesh mesh_provider = model.metadata.mesh_provider mesh_provider.inputs.read_cyclic(2) mesh = mesh_provider.outputs.mesh() ###############################################################################