def test_Jacobian(self): for n in range(1, 20): v1 = np.random.rand() * 4 - 2 v2 = np.random.rand() * 4 - 2 o = TestObject2(v1, v2) o.adder.set_dofs(np.random.rand(2) * 4 - 2) o.t.set_dofs([np.random.rand() * 4 - 2]) o.t.adder1.set_dofs(np.random.rand(3) * 4 - 2) o.t.adder2.set_dofs(np.random.rand(2) * 4 - 2) r = Rosenbrock(b=3.0) r.set_dofs(np.random.rand(2) * 3 - 1.5) a = Affine(nparams=3, nvals=3) # Randomly fix some of the degrees of freedom o.fixed = np.random.rand(2) > 0.5 o.adder.fixed = np.random.rand(2) > 0.5 o.t.adder1.fixed = np.random.rand(3) > 0.5 o.t.adder2.fixed = np.random.rand(2) > 0.5 r.fixed = np.random.rand(2) > 0.5 a.fixed = np.random.rand(3) > 0.5 rtol = 1e-6 atol = 1e-6 for j in range(4): # Try different sets of the objects: if j==0: dofs = Dofs([o.J, r.terms, o.t.J]) nvals = 4 nvals_per_func = [1, 2, 1] elif j==1: dofs = Dofs([r.term2, r.terms]) nvals = 3 nvals_per_func = [1, 2] elif j==2: dofs = Dofs([r.term2, Target(o.t, 'f'), r.term1, Target(o, 'f')]) nvals = 4 nvals_per_func = [1, 1, 1, 1] elif j==3: dofs = Dofs([a, o]) nvals = 4 nvals_per_func = [3, 1] jac = dofs.jac() fd_jac = dofs.fd_jac() fd_jac_centered = dofs.fd_jac(centered=True) #print('j=', j, ' Diff in Jacobians:', jac - fd_jac) #print('jac: ', jac) #print('fd_jac: ', fd_jac) #print('fd_jac_centered: ', fd_jac_centered) #print('shapes: jac=', jac.shape, ' fd_jac=', fd_jac.shape, ' fd_jac_centered=', fd_jac_centered.shape) np.testing.assert_allclose(jac, fd_jac, rtol=rtol, atol=atol) np.testing.assert_allclose(fd_jac, fd_jac_centered, rtol=rtol, atol=atol) self.assertEqual(dofs.nvals, nvals) self.assertEqual(list(dofs.nvals_per_func), nvals_per_func)
def test_gradient(self): for n in range(1, 10): r = Rosenbrock(b=np.random.rand() * 2) # Note b must be > 0. r.set_dofs(np.random.rand(2) * 4 - 2) rtol = 1e-6 atol = 1e-6 # Test gradient of term1 # Supply a function to finite_difference(): fd_grad = Dofs([r.term1]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, r.dterm1prop, rtol=rtol, atol=atol) np.testing.assert_allclose(fd_grad, r.dterm1(), rtol=rtol, atol=atol) # Supply an attribute to finite_difference(): fd_grad = Dofs([Target(r, "term1prop")]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, r.dterm1prop, rtol=rtol, atol=atol) np.testing.assert_allclose(fd_grad, r.dterm1(), rtol=rtol, atol=atol) # Test gradient of term2 # Supply a function to finite_difference(): fd_grad = Dofs([r.term2]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, r.dterm2prop, rtol=rtol, atol=atol) np.testing.assert_allclose(fd_grad, r.dterm2(), rtol=rtol, atol=atol) # Supply an attribute to finite_difference(): fd_grad = Dofs([Target(r, "term2prop")]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, r.dterm2prop, rtol=rtol, atol=atol) np.testing.assert_allclose(fd_grad, r.dterm2(), rtol=rtol, atol=atol)
def test_solve_quadratic_fixed_supplying_attributes(self): """ Same as test_solve_quadratic_fixed, except supplying attributes rather than functions as targets. """ for solver in solvers: iden1 = Identity() iden2 = Identity() iden3 = Identity() iden1.x = 4 iden2.x = 5 iden3.x = 6 iden1.names = ['x1'] iden2.names = ['x2'] iden3.names = ['x3'] iden1.fixed = [True] iden3.fixed = [True] # Try a mix of explicit LeastSquaresTerms and tuples term1 = LeastSquaresTerm(Target(iden1, 'x'), 1, 1) term2 = (iden2, 'x', 2, 1 / 4.) term3 = (iden3, 'x', 3, 1 / 9.) prob = LeastSquaresProblem([term1, term2, term3]) solver(prob) self.assertAlmostEqual(prob.objective(), 10) self.assertAlmostEqual(iden1.x, 4) self.assertAlmostEqual(iden2.x, 2) self.assertAlmostEqual(iden3.x, 6)
def test_solve_quadratic_fixed_supplying_properties(self): """ Same as test_solve_quadratic_fixed, except supplying @properties rather than functions as targets. """ for solver in solvers: iden1 = Identity() iden2 = Identity() iden3 = Identity() iden1.x = 4 iden2.x = 5 iden3.x = 6 iden1.names = ['x1'] iden2.names = ['x2'] iden3.names = ['x3'] iden1.fixed = [True] iden3.fixed = [True] # Try a mix of explicit LeastSquaresTerms and lists term1 = [iden1, 'f', 1, 1] term2 = [iden2, 'f', 2, 1 / 4.] term3 = LeastSquaresTerm.from_sigma(Target(iden3, 'f'), 3, sigma=3) prob = LeastSquaresProblem([term1, term2, term3]) solver(prob) self.assertAlmostEqual(prob.objective(), 10) self.assertAlmostEqual(iden1.x, 4) self.assertAlmostEqual(iden2.x, 2) self.assertAlmostEqual(iden3.x, 6)
def test_gradient(self): for n in range(1, 20): v1 = np.random.rand() * 4 - 2 v2 = np.random.rand() * 4 - 2 o = TestObject2(v1, v2) o.adder.set_dofs(np.random.rand(2) * 4 - 2) o.t.set_dofs([np.random.rand() * 4 - 2]) o.t.adder1.set_dofs(np.random.rand(3) * 4 - 2) o.t.adder2.set_dofs(np.random.rand(2) - 0.5) # Randomly fix some of the degrees of freedom o.fixed = np.random.rand(2) > 0.5 o.adder.fixed = np.random.rand(2) > 0.5 o.t.adder1.fixed = np.random.rand(3) > 0.5 o.t.adder2.fixed = np.random.rand(2) > 0.5 rtol = 1e-4 atol = 1e-4 dofs = Dofs([o.J]) mask = np.logical_not(np.array(dofs.func_fixed[0])) # Supply a function to finite_difference(): fd_grad = Dofs([o.J]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, o.df[mask], rtol=rtol, atol=atol) np.testing.assert_allclose(fd_grad, o.dJ()[mask], rtol=rtol, atol=atol) # Supply an object to finite_difference(): fd_grad = Dofs([o]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, o.df[mask], rtol=rtol, atol=atol) np.testing.assert_allclose(fd_grad, o.dJ()[mask], rtol=rtol, atol=atol) # Supply an attribute to finite_difference(): fd_grad = Dofs([Target(o, "f")]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, o.df[mask], rtol=rtol, atol=atol) np.testing.assert_allclose(fd_grad, o.dJ()[mask], rtol=rtol, atol=atol) print('Diff in TestObject2:', fd_grad - o.df[mask])
def test_supply_attribute(self): """ Test that we can supply an attribute instead of a function. """ iden = Identity() lst = LeastSquaresTerm(Target(iden, 'x'), 3, weight=0.1) self.assertEqual(lst.goal, 3) self.assertAlmostEqual(lst.weight, 0.1, places=13) iden.set_dofs([17]) self.assertEqual(lst.f_in(), 17) correct_value = 0.1 * ((17 - 3)**2) self.assertAlmostEqual(lst.f_out(), correct_value, places=13)
def test_supply_property(self): """ Test that we can supply a property instead of a function. """ iden = Identity() lst = LeastSquaresTerm.from_sigma(Target(iden, 'f'), 3, sigma=0.1) self.assertEqual(lst.goal, 3) self.assertAlmostEqual(lst.weight, 100, places=13) iden.set_dofs([17]) self.assertEqual(lst.f_in(), 17) correct_value = ((17 - 3) / 0.1)**2 self.assertAlmostEqual(lst.f_out(), correct_value, places=11)
def test_gradient(self): for n in range(1, 10): a = Adder(n) a.set_dofs(np.random.rand(n) * 4 - 2) # Supply an object to finite_difference(): fd_grad = Dofs([a]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, a.df) np.testing.assert_allclose(fd_grad, a.dJ()) # Supply a function to finite_difference(): fd_grad = Dofs([a.J]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, a.df) np.testing.assert_allclose(fd_grad, a.dJ()) # Supply an attribute to finite_difference(): fd_grad = Dofs([Target(a, "f")]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, a.df) np.testing.assert_allclose(fd_grad, a.dJ())
def test_gradient(self): iden = Identity() for n in range(1, 10): iden.set_dofs([np.random.rand() * 4 - 2]) # Supply an object to finite_difference(): fd_grad = Dofs([iden]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, iden.df) np.testing.assert_allclose(fd_grad, iden.dJ()) # Supply a function to finite_difference(): fd_grad = Dofs([iden.J]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, iden.df) np.testing.assert_allclose(fd_grad, iden.dJ()) # Supply an attribute to finite_difference(): fd_grad = Dofs([Target(iden, "f")]).fd_jac().flatten() np.testing.assert_allclose(fd_grad, iden.df) np.testing.assert_allclose(fd_grad, iden.dJ())