def test_least_squares_combination(self): iden1 = Identity() iden2 = Identity() term1 = LeastSquaresProblem.from_sigma(3, 2, opts_in=[iden1]) term2 = LeastSquaresProblem.from_sigma(-4, 5, opts_in=[iden2]) lsp = term1 + term2 iden1.x = [10] self.assertAlmostEqual(np.abs(lsp.residuals()[0]), 3.5) self.assertAlmostEqual(np.abs(lsp.residuals()[1]), 0.8)
def test_solve_rosenbrock(self): """ Minimize the Rosenbrock function using two separate least-squares terms. """ for solver in solvers: #for grad in [True, False]: r = Rosenbrock() prob = LeastSquaresProblem(0, 1, opts_in=r) solver(prob) # , grad=grad) self.assertAlmostEqual(prob.objective(), 0)
def test_exceptions(self): """ Test that exceptions are thrown when invalid inputs are provided. """ iden = Identity() # sigma cannot be zero with self.assertRaises(ValueError): lst = LeastSquaresProblem.from_sigma(3, 0, opts_in=iden) # Weight cannot be negative with self.assertRaises(ValueError): lst = LeastSquaresProblem(3, -1.0, opts_in=iden)
def test_parent_dof_transitive_behavior(self): iden1 = Identity() iden2 = Identity() lsp = LeastSquaresProblem.from_sigma([3, -4], [2, 5], opts_in=[iden1, iden2]) iden1.x = [10] self.assertAlmostEqual(np.abs(lsp.residuals()[0]), 3.5) self.assertAlmostEqual(np.abs(lsp.residuals()[1]), 0.8)
def test_single_value_opt_in(self): iden = Identity() lst = LeastSquaresProblem.from_sigma(3, 0.1, opts_in=iden) iden.x = [17] correct_value = ((17 - 3) / 0.1) # ** 2 self.assertAlmostEqual(np.abs(lst.residuals()[0]), correct_value, places=11) iden.x = [0] term1 = LeastSquaresProblem.from_sigma(3, 2, opts_in=iden) self.assertAlmostEqual(np.abs(term1.residuals()[0]), 1.5) term1.x = [10] self.assertAlmostEqual(np.abs(term1.residuals()[0]), 3.5) self.assertAlmostEqual(np.abs(term1.residuals(x=[0])), 1.5) self.assertAlmostEqual(np.abs(term1.residuals(x=[5])), 1)
def test_multiple_funcs_single_input(self): iden1 = Identity(x=10) iden2 = Identity() # Objective function # f(x,y) = ((x - 3) / 2) ** 2 + ((y + 4) / 5) ** 2 lsp = LeastSquaresProblem.from_sigma([3, -4], [2, 5], opts_in=[iden1, iden2]) self.assertAlmostEqual(np.abs(lsp.residuals()[0]), 3.5) self.assertAlmostEqual(np.abs(lsp.residuals()[1]), 0.8) lsp.x = [5, -7] self.assertAlmostEqual(np.abs(lsp.residuals()[0]), 1.0) self.assertAlmostEqual(np.abs(lsp.residuals()[1]), 0.6) self.assertAlmostEqual(np.abs(lsp.residuals([10, 0])[0]), 3.5) self.assertAlmostEqual(np.abs(lsp.residuals([10, 0])[1]), 0.8) self.assertAlmostEqual(np.abs(lsp.residuals([5, -7])[0]), 1.0) self.assertAlmostEqual(np.abs(lsp.residuals([5, -7])[1]), 0.6)
def test_parallel_optimization(self): """ Test a full least-squares optimization. """ rank_world = MPI.COMM_WORLD.Get_rank() logger.info(f"rank world is {rank_world}") nprocs = MPI.COMM_WORLD.Get_size() for ngroups in range(1, 4): #for grad in [True, False]: mpi = MpiPartition(ngroups=ngroups) o = TestFunction3(mpi.comm_groups) term1 = (o.f0, 0, 1) term2 = (o.f1, 0, 1) prob = LeastSquaresProblem.from_tuples([term1, term2]) least_squares_mpi_solve(prob, mpi) # , grad=grad) self.assertAlmostEqual(prob.full_x[0], 1) self.assertAlmostEqual(prob.full_x[1], 1)
def test_solve_quadratic_fixed(self): """ Same as test_solve_quadratic, except with different weights and x and z are fixed, so only y is optimized. """ for solver in solvers: iden1 = Identity(4, dof_name='x1', dof_fixed=True) iden2 = Identity(5, dof_name='x2') iden3 = Identity(6, dof_name='x3', dof_fixed=True) term1 = (iden1.f, 1, 1) term2 = (iden2.f, 2, 1 / 4.) term3 = (iden3.f, 3, 1 / 9.) prob = LeastSquaresProblem.from_tuples([term1, term2, term3]) solver(prob) self.assertAlmostEqual(prob.objective(), 10) self.assertTrue(np.allclose(iden1.x, [4])) self.assertTrue(np.allclose(iden2.x, [2])) self.assertTrue(np.allclose(iden3.x, [6]))
def test_solve_quadratic(self): """ Minimize f(x,y,z) = 1 * (x - 1) ^ 2 + 2 * (y - 2) ^ 2 + 3 * (z - 3) ^ 2. The optimum is at (x,y,z)=(1,2,3), and f=0 at this point. """ for solver in solvers: iden1 = Identity() iden2 = Identity() iden3 = Identity() term1 = (iden1.f, 1, 1) term2 = (iden2.f, 2, 2) term3 = (iden3.f, 3, 3) prob = LeastSquaresProblem.from_tuples([term1, term2, term3]) solver(prob) self.assertAlmostEqual(prob.objective(), 0) self.assertTrue(np.allclose(iden1.x, [1])) self.assertTrue(np.allclose(iden2.x, [2])) self.assertTrue(np.allclose(iden3.x, [3]))
# Start with a default surface, which is axisymmetric with major # radius 1 and minor radius 0.1. surf = SurfaceRZFourier() # Parameters are all non-fixed by default, meaning they will be # optimized. You can choose to exclude any subset of the variables # from the space of independent variables by setting their 'fixed' # property to True. surf.fix('rc(0,0)') surf.print_return_fn_names() # Approach 1 prob1 = LeastSquaresProblem(opts_in=surf, goals=[desired_area, desired_volume], weights=[1, 1]) least_squares_serial_solve(prob1) print("At the optimum using approach 1,") print(" rc(m=1,n=0) = ", surf.get_rc(1, 0)) print(" zs(m=1,n=0) = ", surf.get_zs(1, 0)) print(" volume = ", surf.volume()) print(" area = ", surf.area()) print(" objective function = ", prob1.objective()) print(" -------------------------\n\n") # Approach 2 surf2 = SurfaceRZFourier()