def test_compute_gradients(): mesh = examples.load_random_hills() grad = mesh.compute_gradient() assert 'gradient' in grad.array_names assert np.shape(grad['gradient'])[0] == mesh.n_points assert np.shape(grad['gradient'])[1] == 3 with pytest.raises(TypeError): grad = mesh.compute_gradient(object) mesh.point_arrays.clear() with pytest.raises(TypeError): grad = mesh.compute_gradient()
############################################################################### # plot the arrows and the sphere p = pv.Plotter() p.add_mesh(sphere.arrows, scalars='GlyphScale', lighting=False, stitle="Vector Magnitude") p.add_mesh(sphere, color="grey", ambient=0.6, opacity=0.5, show_edges=False) p.show() ############################################################################### # Subset of Glyphs # ++++++++++++++++ # # Sometimes you might not want glyphs for every node in the input dataset. In # this case, you can choose to build glyphs for a subset of the input dataset # by using a merging tolerance. Here we specify a merging tolerance of five # percent which equats to five perfect of the bounding box's length. # Example dataset with normals mesh = examples.load_random_hills() # create a subset of arrows using the glyph filter arrows = mesh.glyph(scale="Normals", orient="Normals", tolerance=0.05) p = pv.Plotter() p.add_mesh(arrows, color="black") p.add_mesh(mesh, scalars="Elevation", cmap="terrain") p.show()
def test_compute_derivatives(): mesh = examples.load_random_hills() vector = np.zeros((mesh.n_points, 3)) vector[:, 1] = np.ones(mesh.n_points) mesh['vector'] = vector derv = mesh.compute_derivative(scalars='vector', gradient=True, divergence=True, vorticity=True, qcriterion=True) assert 'gradient' in derv.array_names assert np.shape(derv['gradient'])[0] == mesh.n_points assert np.shape(derv['gradient'])[1] == 9 assert 'divergence' in derv.array_names assert np.shape(derv['divergence'])[0] == mesh.n_points assert len(np.shape(derv['divergence'])) == 1 assert 'vorticity' in derv.array_names assert np.shape(derv['vorticity'])[0] == mesh.n_points assert np.shape(derv['vorticity'])[1] == 3 assert 'qcriterion' in derv.array_names assert np.shape(derv['qcriterion'])[0] == mesh.n_points assert len(np.shape(derv['qcriterion'])) == 1 derv = mesh.compute_derivative(scalars='vector', gradient='gradienttest', divergence='divergencetest', vorticity='vorticitytest', qcriterion='qcriteriontest') assert 'gradienttest' in derv.array_names assert np.shape(derv['gradienttest'])[0] == mesh.n_points assert np.shape(derv['gradienttest'])[1] == 9 assert 'divergencetest' in derv.array_names assert np.shape(derv['divergencetest'])[0] == mesh.n_points assert len(np.shape(derv['divergencetest'])) == 1 assert 'vorticitytest' in derv.array_names assert np.shape(derv['vorticitytest'])[0] == mesh.n_points assert np.shape(derv['vorticitytest'])[1] == 3 assert 'qcriteriontest' in derv.array_names assert np.shape(derv['qcriteriontest'])[0] == mesh.n_points assert len(np.shape(derv['qcriteriontest'])) == 1 grad = mesh.compute_derivative(scalars='Elevation', gradient=True) assert 'gradient' in grad.array_names assert np.shape(grad['gradient'])[0] == mesh.n_points assert np.shape(grad['gradient'])[1] == 3 grad = mesh.compute_derivative(scalars='Elevation', gradient=True, faster=True) assert 'gradient' in grad.array_names assert np.shape(grad['gradient'])[0] == mesh.n_points assert np.shape(grad['gradient'])[1] == 3 grad = mesh.compute_derivative(scalars='vector', gradient=True, faster=True) assert 'gradient' in grad.array_names assert np.shape(grad['gradient'])[0] == mesh.n_points assert np.shape(grad['gradient'])[1] == 9 with pytest.raises(ValueError): grad = mesh.compute_derivative(scalars='Elevation', gradient=False) with pytest.raises(TypeError): derv = mesh.compute_derivative(object) mesh.point_arrays.clear() with pytest.raises(TypeError): derv = mesh.compute_derivative()
def test_load_random_hills(): mesh = examples.load_random_hills() assert mesh.n_cells
# surface mesh via the :func:`pyvista.DataSet.Filters.clip_surface` filter. # This will triangulate/tessellate the mesh geometries along the clip. clipped = dataset.clip_surface(surface, invert=False) # Visualize the results p = pv.Plotter() p.add_mesh(surface, color='w', opacity=0.75, label='Surface') p.add_mesh(clipped, color='gold', show_edges=True, label="clipped", opacity=0.75) p.add_legend() p.enable_depth_peeling() p.show() ############################################################################### # Here is another example of clipping a mesh by a surface. This time, we'll # generate a :class:`pyvista.UniformGrid` around a topography surface and then # clip that grid using the surface to create a closed 3D model of the surface surface = examples.load_random_hills() # Create a grid around that surface grid = pv.create_grid(surface) # Clip the grid using the surface model = grid.clip_surface(surface) # Compute height and display it model.elevation().plot()
def test_compute_gradients(): mesh = examples.load_random_hills() grad = mesh.compute_gradient() assert 'gradient' in grad.array_names assert np.shape(grad['gradient'])[0] == mesh.n_points assert np.shape(grad['gradient'])[1] == 3
def random_hills(): return mesh.load_mesh(examples.load_random_hills())
""" Project to a Plane ~~~~~~~~~~~~~~~~~~ :class:`pyvista.PolyData` surfaces and pointsets can easily be projected to a plane defined by a normal and origin """ # sphinx_gallery_thumbnail_number = 2 import pyvista as pv from pyvista import examples poly = examples.load_random_hills() poly.plot() ############################################################################### # Project that surface to a plane underneath the surface origin = poly.center origin[-1] -= poly.length / 3.0 projected = poly.project_points_to_plane(origin=origin) # Display the results p = pv.Plotter() p.add_mesh(poly) p.add_mesh(projected) p.show()