Exemplo n.º 1
0
 def test_grid_sample_gradient_1d(self):
     for backend in BACKENDS:
         if backend.supports(Backend.gradients):
             with backend:
                 grid = math.tensor([0., 1, 2, 3], spatial('x'))
                 coords = math.tensor([0.5, 1.5], instance('points'))
                 with math.record_gradients(grid, coords):
                     sampled = math.grid_sample(grid, coords, extrapolation.ZERO)
                     loss = math.mean(math.l2_loss(sampled)) / 2
                     grad_grid, grad_coords = math.gradients(loss, grid, coords)
                 math.assert_close(grad_grid, math.tensor([0.125, 0.5, 0.375, 0], spatial('x')), msg=backend)
                 math.assert_close(grad_coords, math.tensor([0.25, 0.75], instance('points')), msg=backend)
Exemplo n.º 2
0
 def test_make_incompressible_gradients_equal_tf_torch(self):
     DOMAIN = Domain(x=16, y=16, boundaries=OPEN, bounds=Box[0:100, 0:100])  # TODO CLOSED solve fails because div is not subtracted from dx
     velocity0 = DOMAIN.staggered_grid(Noise(vector=2))
     grads = []
     for backend in [TF_BACKEND, TORCH_BACKEND]:
         with backend:
             velocity = param = velocity0.with_(values=math.tensor(velocity0.values))
             with math.record_gradients(param.values):
                 solve = math.LinearSolve()
                 velocity, _, _, _ = fluid.make_incompressible(velocity, DOMAIN, solve_params=solve)
                 loss = field.l2_loss(velocity)
                 assert math.isfinite(loss)
                 grad = math.gradients(loss, param.values)
                 assert math.all(math.isfinite(grad))
                 grads.append(grad)
     math.assert_close(*grads, abs_tolerance=1e-5)
Exemplo n.º 3
0
 def test_grid_sample_gradient_2d(self):
     grads_grid = []
     grads_coords = []
     for backend in BACKENDS:
         if backend.supports(Backend.gradients):
             with backend:
                 grid = math.tensor([[1., 2, 3], [1, 2, 3]], spatial('x,y'))
                 coords = math.tensor([(0.5, 0.5), (1, 1.1), (-0.8, -0.5)], instance('points'), channel('vector'))
                 with math.record_gradients(grid, coords):
                     sampled = math.grid_sample(grid, coords, extrapolation.ZERO)
                     loss = math.sum(sampled) / 3
                     grad_grid, grad_coords = math.gradients(loss, grid, coords)
                     grads_grid.append(grad_grid)
                     grads_coords.append(grad_coords)
     math.assert_close(*grads_grid)
     math.assert_close(*grads_coords)
Exemplo n.º 4
0
 def test_grid_sample_gradient_2d(self):
     grads_grid = []
     grads_coords = []
     for backend in [tf.TF_BACKEND, torch.TORCH_BACKEND]:
         with backend:
             grid = math.tensor([[1., 2, 3], [1, 2, 3]], 'x,y')
             coords = math.tensor([(0.5, 0.5), (1, 1.1), (-0.8, -0.5)],
                                  'points,vector')
             with math.record_gradients(grid, coords):
                 sampled = math.grid_sample(grid, coords,
                                            extrapolation.ZERO)
                 loss = math.sum(sampled)
                 grad_grid, grad_coords = math.gradients(loss, grid, coords)
                 grads_grid.append(grad_grid)
                 grads_coords.append(grad_coords)
     math.assert_close(*grads_grid)
     math.assert_close(*grads_coords)
Exemplo n.º 5
0
 def test_grid_sample_gradient_1d(self):
     grads_grid = []
     grads_coords = []
     for backend in BACKENDS:
         if backend.supports(Backend.gradients):
             print(backend)
             with backend:
                 grid = math.tensor([0., 1, 2, 3], 'x')
                 coords = math.tensor([0.5, 1.5], 'points')
                 with math.record_gradients(grid, coords):
                     sampled = math.grid_sample(grid, coords,
                                                extrapolation.ZERO)
                     loss = math.l2_loss(sampled)
                     grad_grid, grad_coords = math.gradients(
                         loss, grid, coords)
                     grads_grid.append(grad_grid)
                     grads_coords.append(grad_coords)
     math.assert_close(*grads_grid, math.tensor([0.125, 0.5, 0.375, 0],
                                                'x'))
     math.assert_close(*grads_coords, math.tensor([0.25, 0.75], 'points'))
Exemplo n.º 6
0
 def test_make_incompressible_gradients_equal_tf_torch(self):
     velocity0 = StaggeredGrid(Noise(),
                               ZERO,
                               x=16,
                               y=16,
                               bounds=Box[0:100, 0:100])
     grads = []
     for backend in BACKENDS:
         if backend.supports(Backend.record_gradients):
             with backend:
                 velocity = param = velocity0.with_values(
                     math.tensor(velocity0.values))
                 with math.record_gradients(param.values):
                     velocity, _ = fluid.make_incompressible(velocity)
                     loss = field.l2_loss(velocity)
                     assert math.isfinite(loss).all
                     grad = math.gradients(loss, param.values)
                     assert math.isfinite(grad).all
                     grads.append(grad)
     math.assert_close(*grads, abs_tolerance=1e-5)