Exemplo n.º 1
0
def _generate_examples():
    # --- Example 1 ---
    ex1 = np.tile(np.linspace(1, 0, 5), [4, 1])
    ex1 = math.expand_dims(math.expand_dims(ex1, -1), 0) - math.mean(ex1)
    # --- Example 2 ---
    ex2 = np.zeros([1, 4, 5, 1])
    ex2[0, :, 2, 0] = 1
    ex2 -= math.mean(ex2)
    # --- Stack examples to batch ---
    return math.concat([ex1, ex2], axis=0)
Exemplo n.º 2
0
    def points(self,
               points: Tensor or Number or tuple or list,
               values: Tensor or Number = None,
               radius: Tensor or float or int or None = None,
               extrapolation: math.Extrapolation = math.extrapolation.ZERO,
               color: str or Tensor or tuple or list or None = None) -> PointCloud:
        """
        Create a `phi.field.PointCloud` from the given `points`.
        The created field has no channel dimensions and all points carry the value `1`.

        Args:
            points: point locations in physical units
            values: (optional) values of the particles, defaults to 1.
            radius: (optional) size of the particles
            extrapolation: (optional) extrapolation to use, defaults to extrapolation.ZERO
            color: (optional) color used when plotting the points

        Returns:
            `phi.field.PointCloud` object
        """
        extrapolation = extrapolation if isinstance(extrapolation, math.Extrapolation) else self.boundaries[extrapolation]
        if radius is None:
            radius = math.mean(self.bounds.size) * 0.005
        # --- Parse points: tuple / list ---
        if isinstance(points, (tuple, list)):
            if len(points) == 0:  # no points
                points = math.zeros(instance(points=0), channel(vector=1))
            elif isinstance(points[0], Number):  # single point
                points = math.tensor([points], instance('points'), channel('vector'))
            else:
                points = math.tensor(points, instance('points'), channel('vector'))
        elements = Sphere(points, radius)
        if values is None:
            values = math.tensor(1.)
        return PointCloud(elements, values, extrapolation, add_overlapping=False, bounds=self.bounds, color=color)
Exemplo n.º 3
0
def respect_boundaries(particles: PointCloud,
                       domain: Domain,
                       not_accessible: list,
                       offset: float = 0.5) -> PointCloud:
    """
    Enforces boundary conditions by correcting possible errors of the advection step and shifting particles out of 
    obstacles or back into the domain.
    
    Args:
        particles: PointCloud holding particle positions as elements
        domain: Domain for which any particles outside should get shifted inwards
        not_accessible: List of Obstacle or Geometry objects where any particles inside should get shifted outwards
        offset: Minimum distance between particles and domain boundary / obstacle surface after particles have been shifted.

    Returns:
        PointCloud where all particles are inside the domain / outside of obstacles.
    """
    new_positions = particles.elements.center
    for obj in not_accessible:
        if isinstance(obj, Obstacle):
            obj = obj.geometry
        new_positions = obj.push(new_positions, shift_amount=offset)
    new_positions = (~domain.bounds).push(new_positions, shift_amount=offset)
    return particles.with_(
        elements=Sphere(new_positions,
                        math.mean(particles.bounds.size) * 0.005))
Exemplo n.º 4
0
 def grid_sample(self, resolution, size, batch_size=1, channels=None):
     channels = channels or self.channels or len(size)
     shape = (batch_size, ) + tuple(resolution) + (channels, )
     rndj = math.to_complex(
         self.math.random_normal(shape)) + 1j * math.to_complex(
             self.math.random_normal(shape))  # Note: there is no complex32
     k = math.fftfreq(
         resolution) * resolution / size * self.scale  # in physical units
     k = math.sum(k**2, axis=-1, keepdims=True)
     lowest_frequency = 0.1
     weight_mask = 1 / (1 + math.exp(
         (lowest_frequency - k) * 1e3))  # High pass filter
     # --- Compute 1/k ---
     k[(0, ) * len(k.shape)] = np.inf
     inv_k = 1 / k
     inv_k[(0, ) * len(k.shape)] = 0
     # --- Compute result ---
     fft = rndj * inv_k**self.smoothness * weight_mask
     array = math.real(math.ifft(fft))
     array /= math.std(array,
                       axis=tuple(range(1, math.ndims(array))),
                       keepdims=True)
     array -= math.mean(array,
                        axis=tuple(range(1, math.ndims(array))),
                        keepdims=True)
     array = math.to_float(array)
     return array
Exemplo n.º 5
0
    def log_scalars(self, frame: int, **values: float or math.Tensor):
        """
        Adds `values` to the curves by name.
        This can be used to log the evolution of scalar quantities or summaries.

        The values are stored in a text file within the scene directory.
        The curves may also be directly viewed in the user interface.

        Args:
            frame: step
            values: Values and names to append to the curves, must be numbers or `phi.math.Tensor`.
                If a curve does not yet exists, a new one is created.
        """
        for name, value in values.items():
            assert isinstance(name, str)
            value = float(math.mean(value).mean)
            if name not in self._scalars:
                self._scalars[name] = []
                if self.scene is not None:
                    path = self.scene.subpath(f"log_{name}.txt")
                    self._scalar_streams[name] = open(path, "w")
            self._scalars[name].append((frame, value))
            if self.scene is not None:
                self._scalar_streams[name].write(f"{frame} {value}\n")
                self._scalar_streams[name].flush()
Exemplo n.º 6
0
 def grid_sample(self,
                 resolution: math.Shape,
                 size,
                 shape: math.Shape = None):
     shape = (self._shape if shape is None else shape) & resolution
     for dim in channel(self._shape):
         if dim.item_names[0] is None:
             warnings.warn(
                 f"Please provide item names for Noise dim {dim} using {dim}='x,y,z'",
                 FutureWarning)
             shape &= channel(**{dim.name: resolution.names})
     rndj = math.to_complex(random_normal(shape)) + 1j * math.to_complex(
         random_normal(shape))  # Note: there is no complex32
     with math.NUMPY:
         k = math.fftfreq(resolution) * resolution / math.tensor(
             size) * math.tensor(self.scale)  # in physical units
         k = math.vec_squared(k)
     lowest_frequency = 0.1
     weight_mask = math.to_float(k > lowest_frequency)
     # --- Compute 1/k ---
     k._native[(0, ) * len(k.shape)] = np.inf
     inv_k = 1 / k
     inv_k._native[(0, ) * len(k.shape)] = 0
     # --- Compute result ---
     fft = rndj * inv_k**self.smoothness * weight_mask
     array = math.real(math.ifft(fft))
     array /= math.std(array, dim=array.shape.non_batch)
     array -= math.mean(array, dim=array.shape.non_batch)
     array = math.to_float(array)
     return array
Exemplo n.º 7
0
 def normalize(self):
     v_length = math.sqrt(
         math.add(
             [self.staggered[..., i]**2 for i in range(self.shape[-1])]))
     global_mean = math.mean(v_length, axis=range(1, self.spatial_rank + 1))
     for i in range(self.spatial_rank + 1):
         global_mean = math.expand_dims(global_mean, -1)
     return StaggeredGrid(self.staggered / global_mean)
Exemplo n.º 8
0
def mean(field: SampledField) -> Tensor:
    """
    Computes the mean value by reducing all spatial / instance dimensions.

    Args:
        field: `SampledField`

    Returns:
        `phi.Tensor`
    """
    return math.mean(field.values, field.shape.non_channel.non_batch)
Exemplo n.º 9
0
 def test_grid_sample_gradient_1d(self):
     for backend in BACKENDS:
         if backend.supports(Backend.gradients):
             with backend:
                 grid = math.tensor([0., 1, 2, 3], spatial('x'))
                 coords = math.tensor([0.5, 1.5], instance('points'))
                 with math.record_gradients(grid, coords):
                     sampled = math.grid_sample(grid, coords, extrapolation.ZERO)
                     loss = math.mean(math.l2_loss(sampled)) / 2
                     grad_grid, grad_coords = math.gradients(loss, grid, coords)
                 math.assert_close(grad_grid, math.tensor([0.125, 0.5, 0.375, 0], spatial('x')), msg=backend)
                 math.assert_close(grad_coords, math.tensor([0.25, 0.75], instance('points')), msg=backend)
Exemplo n.º 10
0
 def step(self, u, dt=1.0, **dependent_states):
     assert isinstance(u, CenteredGrid)
     grad = u.gradient()
     laplace = u.laplace()
     laplace2 = laplace.laplace()
     du_dt = -laplace - laplace2 - 0.5 * grad**2
     result = u + dt * du_dt
     result -= math.mean(result.data,
                         axis=tuple(
                             range(1, len(math.staticshape(result.data)))),
                         keepdims=True)
     return result.copied_with(age=u.age + dt, name=u.name)
Exemplo n.º 11
0
def _run_higher_order_fft_reconstruction(in_field, set_accuracy, tolerance=20, order=2):
    # Higher Order FFT test
    mean = math.mean(in_field).data
    centered_field = in_field - mean
    fft_poisson = math.fourier_poisson(in_field, times=order)
    fft_poisson += mean
    fft_reconst = math.fourier_laplace(fft_poisson)
    for _ in range(order - 1):
        fft_reconst = math.fourier_laplace(fft_reconst)
    error = (in_field - fft_reconst) / in_field
    max_error = np.max(np.abs(error.data))
    passed = max_error < tolerance * set_accuracy
    print("{:.2g} vs. {:.2g}".format(max_error, tolerance * set_accuracy))
Exemplo n.º 12
0
def _test_reconstruction_first_order(in_field, solve_func, laplace_func, set_accuracy, name, first_order_tolerance=2):
    # Test Reconstruction
    mean = math.mean(in_field).data
    centered_field = in_field - mean
    ret = solve_func(centered_field)
    try:
        solved_field, it = ret
    except:
        solved_field = ret
    reconst1 = laplace_func(solved_field) + mean  # Reconstruct Input
    error = (in_field - reconst1) / in_field
    max_error = np.max(np.abs(error.data))
    print("{:.2g}/{:.2g}".format(max_error, first_order_tolerance * set_accuracy))
    assert max_error < first_order_tolerance * set_accuracy, "{} reconstruction not within set accuracy. {:.2g} vs. {:.2g}".format(name, max_error, first_order_tolerance * set_accuracy)
Exemplo n.º 13
0
def plot_solves():
    """
    While `plot_solves()` is active, certain performance optimizations and algorithm implementations may be disabled.
    """
    from . import math
    import pylab
    cycle = pylab.rcParams['axes.prop_cycle'].by_key()['color']
    with math.SolveTape(record_trajectories=True) as solves:
        try:
            yield solves
        finally:
            for i, result in enumerate(solves):
                assert isinstance(result, math.SolveInfo)
                from phi.math._tensors import disassemble_tree
                _, (residual, ) = disassemble_tree(result.residual)
                residual_mse = math.mean(math.sqrt(math.sum(residual**2)),
                                         residual.shape.without('trajectory'))
                residual_mse_max = math.max(
                    math.sqrt(math.sum(residual**2)),
                    residual.shape.without('trajectory'))
                # residual_mean = math.mean(math.abs(residual), residual.shape.without('trajectory'))
                residual_max = math.max(math.abs(residual),
                                        residual.shape.without('trajectory'))
                pylab.plot(residual_mse.numpy(),
                           label=f"{i}: {result.method}",
                           color=cycle[i % len(cycle)])
                pylab.plot(residual_max.numpy(),
                           '--',
                           alpha=0.2,
                           color=cycle[i % len(cycle)])
                pylab.plot(residual_mse_max.numpy(),
                           alpha=0.2,
                           color=cycle[i % len(cycle)])
                print(
                    f"Solve {i}: {result.method} ({1000 * result.solve_time:.1f} ms)\n"
                    f"\t{result.solve}\n"
                    f"\t{result.msg}\n"
                    f"\tConverged: {result.converged}\n"
                    f"\tDiverged: {result.diverged}\n"
                    f"\tIterations: {result.iterations}\n"
                    f"\tFunction evaulations: {result.function_evaluations.trajectory[-1]}"
                )
            pylab.yscale('log')
            pylab.ylabel("Residual: MSE / max / individual max")
            pylab.xlabel("Iteration")
            pylab.title(f"Solve Convergence")
            pylab.legend(loc='upper right')
            pylab.savefig(f"pressure-solvers-FP32.png")
            pylab.show()
Exemplo n.º 14
0
 def grid_sample(self, resolution, size, batch_size=1, dtype=np.float32):
     shape = (batch_size,) + tuple(resolution) + (self.channels,)
     rndj = math.randn(shape, dtype) + 1j * math.randn(shape, dtype)
     k = math.fftfreq(resolution) * resolution / size * self.scale  # in physical units
     k = math.sum(k ** 2, axis=-1, keepdims=True)
     lowest_frequency = 0.1
     weight_mask = 1 / (1 + math.exp((lowest_frequency - k) * 1e3))  # High pass filter
     # --- Compute 1/k ---
     k[(0,) * len(k.shape)] = np.inf
     inv_k = 1 / k
     inv_k[(0,) * len(k.shape)] = 0
     # --- Compute result ---
     fft = rndj * inv_k ** self.smoothness * weight_mask
     array = math.real(math.ifft(fft)).astype(dtype)
     array /= math.std(array, axis=tuple(range(1, math.ndims(array))), keepdims=True)
     array -= math.mean(array, axis=tuple(range(1, math.ndims(array))), keepdims=True)
     return array
Exemplo n.º 15
0
 def _sample(self, geometry: Geometry) -> Tensor:
     if geometry == self.bounds:
         return math.mean(self._values, self._resolution)
     if isinstance(geometry, GeometryStack):
         sampled = [self.sample(g) for g in geometry.geometries]
         return math.stack(sampled, geometry.stack_dim)
     if isinstance(geometry, GridCell):
         if self.elements == geometry:
             return self.values
         elif math.close(self.dx, geometry.size):
             fast_resampled = self._shift_resample(geometry.resolution,
                                                   geometry.bounds)
             if fast_resampled is not NotImplemented:
                 return fast_resampled
     points = geometry.center
     local_points = self.box.global_to_local(points) * self.resolution - 0.5
     return math.grid_sample(self.values, local_points, self.extrapolation)
Exemplo n.º 16
0
def _plot_points(axis, data: PointCloud, **plt_args):
    x, y = [d.numpy() for d in data.points.vector.unstack_spatial('x,y')]
    color = [d.native() for d in data.color.points.unstack(len(x))]
    if isinstance(data.elements, Sphere):
        symbol = 'o'
        size = data.elements.bounding_radius().numpy() * 1.41
    elif isinstance(data.elements, BaseBox):
        symbol = 's'
        size = math.mean(data.elements.bounding_half_extent(), 'vector').numpy()
    elif isinstance(data.elements, Point):
        symbol = 'x'
        size = 6 / _get_pixels_per_unit(axis.figure, axis)
    else:
        symbol = '*'
        size = data.elements.bounding_radius().numpy()
    size_px = size * _get_pixels_per_unit(axis.figure, axis)
    axis.scatter(x, y, marker=symbol, color=color, s=size_px ** 2, alpha=0.8)
    _annotate_points(axis, data.points, instance(data))
Exemplo n.º 17
0
 def grid_sample(self, resolution: math.Shape, size, shape: math.Shape = None):
     shape = (self._shape if shape is None else shape).combined(resolution)
     rndj = math.to_complex(random_normal(shape)) + 1j * math.to_complex(random_normal(shape))  # Note: there is no complex32
     with math.NUMPY_BACKEND:
         k = math.fftfreq(resolution) * resolution / size * self.scale  # in physical units
         k = math.vec_squared(k)
     lowest_frequency = 0.1
     weight_mask = 1 / (1 + math.exp((lowest_frequency - k) * 1e3))  # High pass filter
     # --- Compute 1/k ---
     k.native()[(0,) * len(k.shape)] = np.inf
     inv_k = 1 / k
     inv_k.native()[(0,) * len(k.shape)] = 0
     # --- Compute result ---
     fft = rndj * inv_k ** self.smoothness * weight_mask
     array = math.real(math.ifft(fft))
     array /= math.std(array, dim=array.shape.non_batch)
     array -= math.mean(array, dim=array.shape.non_batch)
     array = math.to_float(array)
     return array
Exemplo n.º 18
0
def poisson_solve(input_field, poisson_domain, solver=None, guess=None, gradient='implicit'):
    """
    Solves the Poisson equation Δp = input_field for p.

    :param gradient: one of ('implicit', 'autodiff', 'inverse')
        If 'autodiff', use the built-in autodiff for backpropagation.
        The intermediate results of each loop iteration will be permanently stored if backpropagation is used.
        If 'implicit', computes a forward pressure solve in reverse accumulation backpropagation.
        This requires less memory but is only accurate if the solution is fully converged.
    :param input_field: CenteredGrid
    :param poisson_domain: PoissonDomain instance
    :param solver: PoissonSolver to use, None for default
    :param guess: CenteredGrid with same size and resolution as input_field
    :return: p as CenteredGrid, iteration count as int or None if not available
    :rtype: CenteredGrid, int
    """
    assert isinstance(input_field, CenteredGrid)
    if guess is not None:
        assert isinstance(guess, CenteredGrid)
        assert guess.compatible(input_field)
        guess = guess.data
    if isinstance(poisson_domain, Domain):
        poisson_domain = PoissonDomain(poisson_domain)
    if solver is None:
        solver = _choose_solver(input_field.resolution, math.choose_backend([input_field.data, poisson_domain.active.data, poisson_domain.accessible.data]))
    if not struct.any(Material.open(poisson_domain.domain.boundaries)):  # has no open boundary
        input_field = input_field - math.mean(input_field.data, axis=tuple(range(1, 1 + input_field.rank)), keepdims=True)  # Subtract mean divergence

    assert gradient in ('autodiff', 'implicit', 'inverse')
    if gradient == 'autodiff':
        pressure, iteration = solver.solve(input_field.data, poisson_domain, guess, enable_backprop=True)
    else:
        if gradient == 'implicit':
            def poisson_gradient(_op, grad):
                return poisson_solve(CenteredGrid.sample(grad, poisson_domain.domain), poisson_domain, solver, None, gradient=gradient)[0].data
        else:  # gradient = 'inverse'
            def poisson_gradient(_op, grad):
                return CenteredGrid.sample(grad, poisson_domain.domain).laplace(physical_units=False).data
        pressure, iteration = math.with_custom_gradient(solver.solve, [input_field.data, poisson_domain, guess, False], poisson_gradient, input_index=0, output_index=0, name_base='poisson_solve')

    pressure = CenteredGrid(pressure, input_field.box, extrapolation=input_field.extrapolation, name='pressure')
    return pressure, iteration
Exemplo n.º 19
0
def _test_reconstruction_second_order(in_field, solve_func, laplace_func, set_accuracy, name, second_order_tolerance=20):
    # Calculate 1st order
    mean = math.mean(in_field).data
    centered_field = in_field - mean
    ret = solve_func(centered_field)
    try:
        solved_field, it = ret
    except:
        solved_field = ret
    # Calculate 2nd order
    ret2 = solve_func(solved_field)
    try:
        solved_field2, it = ret2
    except:
        solved_field2 = ret2
    reconst2 = laplace_func(laplace_func(solved_field2)) + mean
    error2 = (in_field - reconst2) / in_field
    max_error2 = np.max(np.abs(error2.data))
    passed = max_error2 < second_order_tolerance*set_accuracy
    print("{}^2 reconstruction {}within set accuracy: {:.2g} vs. {:.2g}".format(name, 'NOT ' if not passed else '', max_error2, second_order_tolerance * set_accuracy))
Exemplo n.º 20
0
def divergence_free(velocity,
                    domain=None,
                    obstacles=(),
                    pressure_solver=None,
                    return_info=False):
    """
Projects the given velocity field by solving for and subtracting the pressure.
    :param return_info: if True, returns a dict holding information about the solve as a second object
    :param velocity: StaggeredGrid
    :param domain: Domain matching the velocity field, used for boundary conditions
    :param obstacles: list of Obstacles
    :param pressure_solver: PressureSolver. Uses default solver if none provided.
    :return: divergence-free velocity as StaggeredGrid
    """
    assert isinstance(velocity, StaggeredGrid)
    # --- Set up FluidDomain ---
    if domain is None:
        domain = Domain(velocity.resolution, OPEN)
    obstacle_mask = mask(union([obstacle.geometry for obstacle in obstacles]),
                         antialias=False)
    if obstacle_mask is not None:
        obstacle_grid = obstacle_mask.at(
            velocity.center_points).copied_with(extrapolation='constant')
        active_mask = 1 - obstacle_grid
    else:
        active_mask = math.ones(
            domain.centered_shape(name='active', extrapolation='constant'))
    accessible_mask = active_mask.copied_with(
        extrapolation=Material.accessible_extrapolation_mode(
            domain.boundaries))
    fluiddomain = FluidDomain(domain,
                              active=active_mask,
                              accessible=accessible_mask)
    # --- Boundary Conditions, Pressure Solve ---
    velocity = fluiddomain.with_hard_boundary_conditions(velocity)
    for obstacle in obstacles:
        if not obstacle.is_stationary:
            obs_mask = mask(obstacle.geometry, antialias=True)
            angular_velocity = AngularVelocity(
                location=obstacle.geometry.center,
                strength=obstacle.angular_velocity,
                falloff=None)
            velocity = ((1 - obs_mask) * velocity + obs_mask *
                        (angular_velocity + obstacle.velocity)).at(velocity)
    divergence_field = velocity.divergence(physical_units=False)
    if not struct.any(Material.open(
            domain.boundaries)):  # has no open boundary
        divergence_field = divergence_field - math.mean(
            divergence_field.data,
            axis=tuple(range(1, 1 + divergence_field.rank)),
            keepdims=True)  # Subtract mean divergence
    pressure, iterations = solve_pressure(divergence_field,
                                          fluiddomain,
                                          pressure_solver=pressure_solver)
    pressure *= velocity.dx[0]
    gradp = StaggeredGrid.gradient(pressure)
    velocity -= fluiddomain.with_hard_boundary_conditions(gradp)
    return velocity if not return_info else (velocity, {
        'pressure': pressure,
        'iterations': iterations,
        'divergence': divergence_field
    })
Exemplo n.º 21
0
def _plot(axis, data, show_color_bar, vmin, vmax, **plt_args):
    if isinstance(data, Grid) and data.spatial_rank == 1:
        x = data.points.staggered_direction[0].vector[0].numpy()
        requires_legend = False
        for c in channel(data).meshgrid(names=True):
            label = ", ".join([i for dim, i in c.items() if isinstance(i, str)])
            values = data.values[c].numpy()
            if values.dtype in (np.complex64, np.complex128):
                axis.plot(x, values.real, label=f"real({label})" if label else "real")
                axis.plot(x, values.imag, label=f"imag({label})" if label else "real")
                requires_legend = True
            else:
                axis.plot(x, values, label=label)
                requires_legend = requires_legend or label
        if requires_legend:
            axis.legend()
    elif isinstance(data, Grid) and channel(data).volume == 1 and data.spatial_rank == 2:
        dims = spatial(data)
        if data.bounds.upper.vector.item_names is not None:
            left, bottom = data.bounds.lower.vector[dims]
            right, top = data.bounds.upper.vector[dims]
        else:
            dim_indices = data.resolution.indices(dims)
            left, bottom = data.bounds.lower.vector[dim_indices]
            right, top = data.bounds.upper.vector[dim_indices]
        extent = (float(left), float(right), float(bottom), float(top))
        im = axis.imshow(data.values.numpy(dims.reversed), origin='lower', extent=extent, vmin=vmin, vmax=vmax, **plt_args)
        if show_color_bar:
            axis.figure.colorbar(im, ax=axis)  # adds a new Axis to the figure
        axis.set_xlabel(dims.names[0])
        axis.set_ylabel(dims.names[1])
    elif isinstance(data, Grid) and data.spatial_rank == 2:  # vector field
        if isinstance(data, StaggeredGrid):
            data = data.at_centers()
        x, y = [d.numpy('x,y') for d in data.points.vector.unstack_spatial('x,y')]
        u, v = [d.numpy('x,y') for d in data.values.vector.unstack_spatial('x,y')]
        color = axis.xaxis.label.get_color()
        axis.quiver(x, y, u, v, color=color, units='xy', scale=1)
        axis.set_aspect('equal', adjustable='box')
    elif isinstance(data, Grid) and channel(data).volume > 1 and data.spatial_rank == 3:
        x, y, z = [d.numpy('x,y,z') for d in data.points.vector.unstack_spatial('x,y,z')]
        u, v, w = [d.numpy('x,y,z') for d in data.values.vector.unstack_spatial('x,y,z')]
        axis.quiver(x, y, z, u, v, w)
        axis.set_xlabel('x')
        axis.set_ylabel('y')
        axis.set_zlabel('z')
    elif isinstance(data, Grid) and channel(data).volume == 1 and data.spatial_rank == 3:
        x, y, z = [d.numpy('x,y,z') for d in data.points.vector.unstack_spatial('x,y,z')]
        values = data.values.numpy('x,y,z')
        cmap = plt.get_cmap('viridis')
        norm = matplotlib.colors.Normalize(vmin=np.min(values), vmax=np.max(values))
        colors = cmap(norm(values))
        axis.voxels(values, facecolors=colors, edgecolor='k')
    elif isinstance(data, PointCloud) and data.spatial_rank == 2 and 'vector' in channel(data):
        axis.set_aspect('equal', adjustable='box')
        vector = data.points.shape['vector']
        x, y = math.reshaped_native(data.points, [vector, data.shape.without('vector')], to_numpy=True, force_expand=True)
        u, v = math.reshaped_native(data.values, [vector, data.shape.without('vector')], to_numpy=True, force_expand=True)
        lower_x, lower_y = [float(d) for d in data.bounds.lower.vector]
        upper_x, upper_y = [float(d) for d in data.bounds.upper.vector]
        axis.set_xlim((lower_x, upper_x))
        axis.set_ylim((lower_y, upper_y))
        if data.color.shape:
            color = data.color.numpy(data.shape.non_channel).reshape(-1)
        else:
            color = data.color.native()
        axis.quiver(x, y, u, v, color=color, units='xy', scale=1)
        if data.points.vector.item_names:
            axis.set_xlabel(data.points.vector.item_names[0])
            axis.set_ylabel(data.points.vector.item_names[1])
    elif isinstance(data, PointCloud) and data.spatial_rank == 2:
        axis.set_aspect('equal', adjustable='box')
        lower_x, lower_y = [float(d) for d in data.bounds.lower.vector]
        upper_x, upper_y = [float(d) for d in data.bounds.upper.vector]
        axis.set_xlim((lower_x, upper_x))
        axis.set_ylim((lower_y, upper_y))
        if data.points.shape.non_channel.rank > 1:  # multiple instance / spatial dimensions
            data_list = field.unstack(data, data.points.shape.non_channel[0].name)
            for d in data_list:
                _plot_points(axis, d, **plt_args)
        else:
            _plot_points(axis, data, **plt_args)
    elif isinstance(data, PointCloud) and data.spatial_rank == 3:
        if data.points.shape.non_channel.rank > 1:
            data_list = field.unstack(data, data.points.shape.non_channel[0].name)
            for d in data_list:
                _plot(axis, d, show_color_bar, vmin, vmax, **plt_args)
        else:
            x, y, z = [d.numpy() for d in data.points.vector.unstack_spatial('x,y,z')]
            color = [d.native() for d in data.color.points.unstack(len(x))]
            M = axis.transData.get_matrix()
            x_scale, y_scale, z_scale = M[0, 0], M[1, 1], M[2, 2]
            if isinstance(data.elements, Sphere):
                symbol = 'o'
                size = data.elements.bounding_radius().numpy() * 0.4
            elif isinstance(data.elements, BaseBox):
                symbol = 's'
                size = math.mean(data.elements.bounding_half_extent(), 'vector').numpy() * 0.35
            elif isinstance(data.elements, Point):
                symbol = 'x'
                size = 6 / (0.5 * (x_scale+y_scale+z_scale)/3)
            else:
                symbol = 'X'
                size = data.elements.bounding_radius().numpy()
            axis.scatter(x, y, z, marker=symbol, color=color, s=(size * 0.5 * (x_scale+y_scale+z_scale)/3) ** 2)
        lower_x, lower_y, lower_z = [float(d) for d in data.bounds.lower.vector.unstack_spatial('x,y,z')]
        upper_x, upper_y, upper_z = [float(d) for d in data.bounds.upper.vector.unstack_spatial('x,y,z')]
        axis.set_xlim((lower_x, upper_x))
        axis.set_ylim((lower_y, upper_y))
        axis.set_zlim((lower_z, upper_z))
    else:
        raise NotImplementedError(f"No figure recipe for {data}")
Exemplo n.º 22
0
 def test_mean_collapsed(self):
     ones = math.ones(x=40000, y=30000)
     data = math.spatial_stack([ones, ones * 2], 'vector')
     self.assertEqual(1.5, math.mean(data))
Exemplo n.º 23
0
def _plot(data: SampledField,
          fig: graph_objects.Figure,
          size: tuple,
          colormap: str or None,
          show_color_bar: bool,
          row: int = None, col: int = None,
          ):
    subplot = fig.get_subplot(row, col)
    vector = data.points.shape['vector']
    if data.spatial_rank == 1 and isinstance(data, Grid):
        x = data.points.vector[0].numpy().flatten()
        channels = data.values.shape.channel
        if channels.rank == 1 and channels.get_item_names(0) is not None:
            for i, name in enumerate(channels.get_item_names(0)):
                y = math.reshaped_native(real_values(data[{channels.name: i}]), [data.shape.spatial], to_numpy=True)
                fig.add_trace(graph_objects.Scatter(x=x, y=y, mode='lines+markers', name=name), row=row, col=col)
            fig.update_layout(showlegend=True)
        else:
            for ch_idx in channels.meshgrid():
                y = math.reshaped_native(real_values(data[ch_idx]), [data.shape.spatial], to_numpy=True)
                fig.add_trace(graph_objects.Scatter(x=x, y=y, mode='lines+markers', name='Multi-channel'), row=row, col=col)
            fig.update_layout(showlegend=False)
    elif data.spatial_rank == 2 and isinstance(data, Grid) and 'vector' not in data.shape:  # heatmap
        dims = spatial(data)
        values = real_values(data).numpy(dims.reversed)
        x = data.points.vector[dims[0].name].dimension(dims[1].name)[0].numpy()
        y = data.points.vector[dims[1].name].dimension(dims[0].name)[0].numpy()
        min_val, max_val = numpy.nanmin(values), numpy.nanmax(values)
        min_val, max_val = min_val if numpy.isfinite(min_val) else 0, max_val if numpy.isfinite(max_val) else 0
        color_scale = get_div_map(min_val, max_val, equal_scale=True, colormap=colormap)
        # color_bar = graph_objects.heatmap.ColorBar(x=1.15)   , colorbar=color_bar
        fig.add_heatmap(row=row, col=col, x=x, y=y, z=values, zauto=False, zmin=min_val, zmax=max_val, colorscale=color_scale, showscale=show_color_bar)
        subplot.xaxis.update(scaleanchor=f'y{subplot.yaxis.plotly_name[5:]}', scaleratio=1, constrain='domain', title=dims.names[0])
        subplot.yaxis.update(constrain='domain', title=dims.names[1])
    elif data.spatial_rank == 2 and isinstance(data, Grid):  # vector field
        if isinstance(data, StaggeredGrid):
            data = data.at_centers()
        x, y = math.reshaped_native(data.points.vector[spatial(data)], [vector, data.shape.without(vector)], to_numpy=True, force_expand=True)
        extra_channels = data.shape.channel.without('vector')
        data_x, data_y = math.reshaped_native(data.values, [vector, extra_channels, spatial(data)], to_numpy=True, force_expand=True)
        lower_x, lower_y = [float(l) for l in data.bounds.lower.vector.unstack_spatial('x,y')]
        upper_x, upper_y = [float(u) for u in data.bounds.upper.vector.unstack_spatial('x,y')]
        x_range = [lower_x, upper_x]
        y_range = [lower_y, upper_y]
        for ch in range(data_x.shape[0]):
            # quiver = figure_factory.create_quiver(x, y, data_x[ch], data_y[ch], scale=1.0)  # 7 points per arrow
            # fig.add_trace(quiver, row=row, col=col)
            data_y_flat = data_y[ch].flatten()
            data_x_flat = data_x[ch].flatten()
            # lines_y = numpy.stack([y, y + data_y_flat, [None] * len(x)], -1).flatten()  # 3 points per arrow
            # lines_x = numpy.stack([x, x + data_x_flat, [None] * len(x)], -1).flatten()
            lines_y = numpy.stack([y - data_y_flat / 2, y + data_y_flat / 2, [None] * len(x)], -1).flatten()  # 3 points per arrow
            lines_x = numpy.stack([x - data_x_flat / 2, x + data_x_flat / 2, [None] * len(x)], -1).flatten()
            name = extra_channels.get_item_names(0)[ch] if extra_channels.rank == 1 and extra_channels.get_item_names(0) is not None else None
            fig.add_scatter(x=lines_x, y=lines_y, mode='lines', row=row, col=col, name=name)
        if data_x.shape[0] == 1:
            fig.update_layout(showlegend=False)
        subplot.xaxis.update(range=x_range)
        subplot.yaxis.update(range=y_range)
        subplot.xaxis.update(scaleanchor=f'y{subplot.yaxis.plotly_name[5:]}', scaleratio=1, constrain='domain')
        subplot.yaxis.update(constrain='domain')
    elif data.spatial_rank == 3 and isinstance(data, Grid) and data.shape.channel.volume == 1:  # 3D heatmap
        values = real_values(data).numpy('z,y,x')
        x = data.points.vector['x'].numpy('z,y,x')
        y = data.points.vector['y'].numpy('z,y,x')
        z = data.points.vector['z'].numpy('z,y,x')
        min_val, max_val = numpy.nanmin(values), numpy.nanmax(values)
        min_val, max_val = min_val if numpy.isfinite(min_val) else 0, max_val if numpy.isfinite(max_val) else 0
        color_scale = get_div_map(min_val, max_val, equal_scale=True, colormap=colormap)
        fig.add_volume(x=x.flatten(), y=y.flatten(), z=z.flatten(), value=values.flatten(),
                       showscale=show_color_bar, colorscale=color_scale, cmin=min_val, cmax=max_val, cauto=False,
                       isomin=0.1, isomax=0.8,
                       opacity=0.1,  # needs to be small to see through all surfaces
                       surface_count=17,  # needs to be a large number for good volume rendering
                       row=row, col=col)
        fig.update_layout(uirevision=True)
    elif data.spatial_rank == 3 and isinstance(data, Grid):  # 3D vector field
        if isinstance(data, StaggeredGrid):
            data = data.at_centers()
        u = real_values(data).vector['x'].numpy('z,y,x')
        v = real_values(data).vector['y'].numpy('z,y,x')
        w = real_values(data).vector['z'].numpy('z,y,x')
        x = data.points.vector['x'].numpy('z,y,x')
        y = data.points.vector['y'].numpy('z,y,x')
        z = data.points.vector['z'].numpy('z,y,x')
        fig.add_cone(x=x.flatten(), y=y.flatten(), z=z.flatten(), u=u.flatten(), v=v.flatten(), w=w.flatten(),
                     colorscale='Blues',
                     sizemode="absolute", sizeref=1,
                     row=row, col=col)
    elif isinstance(data, PointCloud) and data.spatial_rank == 2 and 'vector' in channel(data):
        x, y = math.reshaped_native(data.points, [vector, data.shape.without('vector')], to_numpy=True, force_expand=True)
        u, v = math.reshaped_native(data.values, [vector, data.shape.without('vector')], to_numpy=True, force_expand=True)
        lower_x, lower_y = [float(d) for d in data.bounds.lower.vector]
        upper_x, upper_y = [float(d) for d in data.bounds.upper.vector]
        subplot.xaxis.update(range=[lower_x, upper_x])
        subplot.yaxis.update(range=[lower_y, upper_y])
        quiver = figure_factory.create_quiver(x, y, u, v, scale=1.0).data[0]  # 7 points per arrow
        if data.color.shape:
            # color = data.color.numpy(data.shape.non_channel).reshape(-1)
            warnings.warn("Multi-colored vector plots not yet supported")
        else:
            color = data.color.native()
            quiver.line.update(color=color)
        fig.add_trace(quiver, row=row, col=col)
        if data.points.vector.item_names:
            subplot.xaxis.update(title=data.points.vector.item_names[0])
            subplot.yaxis.update(title=data.points.vector.item_names[1])
        subplot.xaxis.update(scaleanchor=f'y{subplot.yaxis.plotly_name[5:]}', scaleratio=1, constrain='domain')
        subplot.yaxis.update(constrain='domain')
    elif isinstance(data, PointCloud) and data.spatial_rank == 2:
        lower_x, lower_y = [float(d) for d in data.bounds.lower.vector]
        upper_x, upper_y = [float(d) for d in data.bounds.upper.vector]
        if data.points.shape.non_channel.rank > 1:
            data_list = field.unstack(data, data.points.shape.non_channel[0].name)
            for d in data_list:
                _plot(d, fig, size, colormap, show_color_bar, row, col)
        else:
            x, y = [d.numpy() for d in data.points.vector.unstack_spatial('x,y')]
            color = data.color.native()
            subplot_height = (subplot.yaxis.domain[1] - subplot.yaxis.domain[0]) * size[1]
            if isinstance(data.elements, Sphere):
                symbol = 'circle'
                marker_size = data.elements.bounding_radius().numpy() * 1.9
            elif isinstance(data.elements, BaseBox):
                symbol = 'square'
                marker_size = math.mean(data.elements.bounding_half_extent(), 'vector').numpy() * 1
            elif isinstance(data.elements, Point):
                symbol = 'x'
                marker_size = 12 / (subplot_height / (upper_y - lower_y))
            else:
                symbol = 'asterisk'
                marker_size = data.elements.bounding_radius().numpy()
            marker_size *= subplot_height / (upper_y - lower_y)
            marker = graph_objects.scatter.Marker(size=marker_size, color=color, sizemode='diameter', symbol=symbol)
            fig.add_scatter(mode='markers', x=x, y=y, marker=marker, row=row, col=col)
        subplot.xaxis.update(range=[lower_x, upper_x])
        subplot.yaxis.update(range=[lower_y, upper_y])
        fig.update_layout(showlegend=False)
        subplot.xaxis.update(scaleanchor=f'y{subplot.yaxis.plotly_name[5:]}', scaleratio=1, constrain='domain')
        subplot.yaxis.update(constrain='domain')
    elif isinstance(data, PointCloud) and data.spatial_rank == 3:
        lower_x, lower_y, lower_z = [float(d) for d in data.bounds.lower.vector.unstack_spatial('x,y,z')]
        upper_x, upper_y, upper_z = [float(d) for d in data.bounds.upper.vector.unstack_spatial('x,y,z')]
        if data.points.shape.non_channel.rank > 1:
            data_list = field.unstack(data, data.points.shape.non_channel[0].name)
            for d in data_list:
                _plot(d, fig, size, colormap, show_color_bar, row, col)
        else:
            x, y, z = [d.numpy() for d in data.points.vector.unstack_spatial('x,y,z')]
            color = data.color.native()
            # if data.color.shape.instance_rank == 0:
            #     color = str(data.color)
            # else:
            #     color = [str(d) for d in math.unstack(data.color, instance)]
            domain_y = fig.layout[subplot.plotly_name].domain.y
            if isinstance(data.elements, Sphere):
                symbol = 'circle'
                marker_size = data.elements.bounding_radius().numpy() * 2
            elif isinstance(data.elements, BaseBox):
                symbol = 'square'
                marker_size = math.mean(data.elements.bounding_half_extent(), 'vector').numpy() * 1
            elif isinstance(data.elements, Point):
                symbol = 'x'
                marker_size = 4 / (size[1] * (domain_y[1] - domain_y[0]) / (upper_y - lower_y) * 0.5)
            else:
                symbol = 'asterisk'
                marker_size = data.elements.bounding_radius().numpy()
            marker_size *= size[1] * (domain_y[1] - domain_y[0]) / (upper_y - lower_y) * 0.5
            marker = graph_objects.scatter3d.Marker(size=marker_size, color=color, sizemode='diameter', symbol=symbol)
            fig.add_scatter3d(mode='markers', x=x, y=y, z=z, marker=marker, row=row, col=col)
        subplot.xaxis.update(range=[lower_x, upper_x])
        subplot.yaxis.update(range=[lower_y, upper_y])
        subplot.zaxis.update(range=[lower_z, upper_z])
        fig.update_layout(showlegend=False)
    else:
        raise NotImplementedError(f"No figure recipe for {data}")
Exemplo n.º 24
0
def mean(field: Grid):
    return math.mean(field.values, field.shape.spatial)
Exemplo n.º 25
0
 def test_mean_collapsed(self):
     ones = math.ones(spatial(x=40000, y=30000))
     data = math.stack([ones, ones * 2], spatial('vector'))
     math.assert_close(1.5, math.mean(data))