def test_linear_operator(self): GLOBAL_AXIS_ORDER.x_last() direct = math.random_normal(batch=3, x=4, y=3) # , vector=2 op = lin_placeholder(direct) def linear_function(val): val = -val val *= 2 val = math.pad(val, {'x': (2, 0), 'y': (0, 1)}, extrapolation.PERIODIC) val = val.x[:-2].y[1:] + val.x[2:].y[:-1] val = math.pad(val, {'x': (0, 0), 'y': (0, 1)}, extrapolation.ZERO) val = math.pad(val, {'x': (2, 2), 'y': (0, 1)}, extrapolation.BOUNDARY) # sl = sl.vector[0] return val val = val.x[1:4].y[:2] return math.sum([val, sl], axis=0) - sl functions = [ linear_function, lambda val: math.gradient(val, difference='forward', padding=extrapolation.ZERO, dims='x').gradient[0], lambda val: math.gradient(val, difference='backward', padding=extrapolation.PERIODIC, dims='x').gradient[0], lambda val: math.gradient(val, difference='central', padding=extrapolation.BOUNDARY, dims='x').gradient[0], ] for f in functions: direct_result = f(direct) # print(direct_result.batch[0], 'Direct result') op_result = f(op) # print(op_result.build_sparse_coordinate_matrix().todense()) self.assertIsInstance(op_result, ShiftLinOp) op_result = NativeTensor(op_result.native(), op_result.shape) # print(op_result.batch[0], 'Placeholder result') math.assert_close(direct_result, op_result)
def test_gradient_vector(self): meshgrid = math.meshgrid(x=4, y=3) cases = dict(difference=('central', 'forward', 'backward'), padding=(None, extrapolation.ONE, extrapolation.BOUNDARY, extrapolation.PERIODIC, extrapolation.SYMMETRIC), dx=(0.1, 1), dims=( None, ('x', 'y'), )) for case_dict in [ dict(zip(cases, v)) for v in product(*cases.values()) ]: grad = math.gradient(meshgrid, **case_dict) inner = grad.x[1:-1].y[1:-1] math.assert_close(inner.spatial_gradient[0].vector[1], 0) math.assert_close(inner.spatial_gradient[1].vector[0], 0) math.assert_close(inner.spatial_gradient[0].vector[0], 1 / case_dict['dx']) math.assert_close(inner.spatial_gradient[1].vector[1], 1 / case_dict['dx']) self.assertEqual(grad.shape.vector, 2) self.assertEqual(grad.shape.spatial_gradient, 2) ref_shape = (4, 3) if case_dict['padding'] is not None else (( 2, 1) if case_dict['difference'] == 'central' else (3, 2)) self.assertEqual((grad.shape.x, grad.shape.y), ref_shape)
def divergence(field: Grid) -> CenteredGrid: """ Computes the divergence of a grid using finite differences. This function can operate in two modes depending on the type of `field`: * `CenteredGrid` approximates the divergence at cell centers using central differences * `StaggeredGrid` exactly computes the divergence at cell centers Args: field: vector field as `CenteredGrid` or `StaggeredGrid` Returns: Divergence field as `CenteredGrid` """ if isinstance(field, StaggeredGrid): components = [] for i, dim in enumerate(field.shape.spatial.names): div_dim = math.gradient(field.values.vector[i], dx=field.dx[i], difference='forward', padding=None, dims=[dim]).gradient[0] components.append(div_dim) data = math.sum(components, 0) return CenteredGrid(data, field.box, field.extrapolation.gradient()) elif isinstance(field, CenteredGrid): left, right = shift(field, (-1, 1), stack_dim='div_') grad = (right - left) / (field.dx * 2) components = [grad.vector[i].div_[i] for i in range(grad.div_.size)] result = sum(components) return result else: raise NotImplementedError( f"{type(field)} not supported. Only StaggeredGrid allowed.")
def gradient(self, physical_units=True): if not physical_units or self.has_cubic_cells: data = math.gradient(self.data, dx=np.mean(self.dx), padding=_pad_mode(self.extrapolation)) return self.copied_with(data=data, extrapolation=_gradient_extrapolation( self.extrapolation), flags=()) else: raise NotImplementedError('Only cubic cells supported.')
def test_gradient_scalar(self): ones = tensor(np.ones([2, 4, 3]), 'batch,x,y') cases = dict(difference=('central', 'forward', 'backward'), padding=(None, extrapolation.ONE, extrapolation.BOUNDARY, extrapolation.PERIODIC, extrapolation.SYMMETRIC)) for case_dict in [ dict(zip(cases, v)) for v in product(*cases.values()) ]: scalar_grad = math.gradient(ones, dx=0.1, **case_dict) math.assert_close(scalar_grad, 0) self.assertEqual(scalar_grad.shape.names, ('batch', 'x', 'y', 'gradient')) ref_shape = (2, 4, 3, 2) if case_dict['padding'] is not None else ( (2, 2, 1, 2) if case_dict['difference'] == 'central' else (2, 3, 2, 2)) self.assertEqual(scalar_grad.shape.sizes, ref_shape)
def spatial_gradient(field: CenteredGrid, type: type = CenteredGrid, stack_dim='vector'): """ Finite difference spatial_gradient. This function can operate in two modes: * `type=CenteredGrid` approximates the spatial_gradient at cell centers using central differences * `type=StaggeredGrid` computes the spatial_gradient at face centers of neighbouring cells Args: field: centered grid of any number of dimensions (scalar field, vector field, tensor field) type: either `CenteredGrid` or `StaggeredGrid` stack_dim: name of dimension to be added. This dimension lists the spatial_gradient w.r.t. the spatial dimensions. The `field` must not have a dimension of the same name. Returns: spatial_gradient field of type `type`. """ if type == CenteredGrid: values = math.gradient(field.values, field.dx.vector.as_channel(name=stack_dim), difference='central', padding=field.extrapolation, stack_dim=stack_dim) return CenteredGrid(values, field.bounds, field.extrapolation.spatial_gradient()) elif type == StaggeredGrid: assert stack_dim == 'vector' return stagger(field, lambda lower, upper: (upper - lower) / field.dx, field.extrapolation.spatial_gradient()) raise NotImplementedError( f"{type(field)} not supported. Only CenteredGrid and StaggeredGrid allowed." )