コード例 #1
0
def build_single_variable_quartic(instance: int = 0):
    """ Implements: f(x) = (x-2)(x-3)(x+1)^2

    This function as both a local optimum at x = -1 and a global optima at
    x = (13 + sqrt(57)) / 8.

    Arg:
        Instance: Either 0 or 1 indicating whether initial condition should be
            set to find the global optimum (0) or the local optimum (1).
    """
    # Test quartic optimization: f(x) = (x - 2)(x - 3)(x + 1)^2.
    x_var = Variable(1)
    obj = Product([
        Sum([x_var, Constant(-2)]),
        Sum([x_var, Constant(-3)]),
        Sum([x_var, Constant(1)]),
        Sum([x_var, Constant(1)])
    ])
    if instance == 0:
        # Optimization with x0 = 1 should land in the optima at
        # (13 + sqrt(57)) / 8.
        param = DirectParam(np.array([1]), bounds=[-10, 10])
        return (obj, param, (13 + np.sqrt(57)) / 8)
    elif instance == 1:
        # Optimization with x0 = 0 should land in local optimum at x = -1.
        param = DirectParam(np.array([0]), bounds=[-10, 10])
        return (obj, param, -1)
コード例 #2
0
    def test_single_variable_step(self):
        # Test that the first step is correct.

        # Test quadratic optimization: f(x) = x^2 - 4x + 1
        # Setup objective.
        x_var = Variable(1)
        x_var_squared = Product([x_var, x_var])
        obj = Sum([x_var_squared, Product([Constant(-4), x_var]), Constant(1)])

        # Optimization with x0 = 0
        param = DirectParam([0], bounds=[-10, 10])
        opt = GradientDescent(obj, param, 0.05, normalize_gradient=False)

        # Iterate once and check against manual gradient.
        opt.iterate()
        self.assertEqual(param.encode(), [0.2])
コード例 #3
0
 def test_sanity(self):
     # Quick sanity checks.
     x = Variable(3)
     opt = OptimizationProblem(x[0],
                               cons_eq=(x[1], ),
                               cons_ineq=(x[2] + 2, x[1] - 1))
     slack_opt = SlackOptimizationProblem(opt)
     # Test that number of constraints is correct.
     self.assertEqual(len(slack_opt.get_inequality_constraints()), 0)
     self.assertEqual(len(slack_opt.get_equality_constraints()), 3)
     # Test that we can use SlackParam.
     slack_param = slack_opt.build_param(DirectParam([3, 1, 2]))
     self.assertEqual(slack_opt.calculate_objective_function(slack_param),
                      3)
     self.assertEqual(
         slack_opt.calculate_gradient(slack_param).tolist(),
         [1, 0, 0, 0, 0])
     eq_cons, ineq_cons = slack_opt.calculate_constraints(slack_param)
     self.assertEqual(eq_cons.tolist(), [1, 4, 0])
     self.assertEqual(ineq_cons.tolist(), [])
     eq_grad, ineq_grad = slack_opt.calculate_constraint_gradients(
         slack_param)
     self.assertEqual(len(eq_grad), 3)
     self.assertEqual(len(ineq_grad), 0)
     self.assertEqual(eq_grad[0].tolist(), [0, 1, 0, 0, 0])
     self.assertEqual(eq_grad[1].tolist(), [0, 0, 1, 1, 0])
     self.assertEqual(eq_grad[2].tolist(), [0, 1, 0, 0, 1])
コード例 #4
0
    def test_gradient(self):
        # Create a 3x3 2D grid to brute force check adjoint gradients.
        shape = [3, 3, 1]
        # Setup epsilon (pure vacuum).
        epsilon = [np.ones(shape) for i in range(3)]
        # Setup dxes. Assume dx = 40.
        dxes = [[np.ones(shape[i]) * 40 for i in range(3)] for j in range(2)]
        # Setup a point source in the center.
        J = [np.zeros(shape) for i in range(3)]
        J[2][1, 1, 0] = 1
        # Setup frequency.
        omega = 2 * np.pi / 1500
        # Avoid complexities of selection matrix by setting to the identity.
        # Number of elements: 3 field components * grid size
        S = np.identity(3 * np.prod(shape))
        # Use 2D solver.
        sim = FdfdSimulation(DirectSolver(), shape, omega, dxes, J, S, epsilon)

        # Setup target fields.
        target_fields = [
            np.zeros(shape).astype(np.complex128) for i in range(3)
        ]
        target_fields[2][:, :, 0] = 20j

        objective = SimpleEmObjective(sim, fdfd_tools.vec(target_fields))

        # Check gradient for initial parametrization of 0.5 everywhere.
        param = DirectParam(0.5 * np.ones(np.prod(shape) * 3))

        f = objective.calculate_objective_function(param)
        gradient = objective.calculate_gradient(param)

        # Now brute-force the gradient.
        eps = 1e-7  # Empirically 1e-6 to 1e-7 is the best step size.
        vec = param.encode()
        brute_gradient = np.zeros_like(vec)
        for i in range(len(vec)):
            temp_vec = np.array(vec)
            temp_vec[i] += eps
            new_f = objective.calculate_objective_function(
                DirectParam(temp_vec))
            brute_gradient[i] = (new_f - f) / eps

        np.testing.assert_almost_equal(gradient, brute_gradient, decimal=4)
コード例 #5
0
def build_single_variable_quadratic():
    """ Implements f(x) = x^2 - 4x + 1 """
    # Setup objective.
    x_var = Variable(1)
    x_var_squared = Product([x_var, x_var])
    obj = Sum([x_var_squared, Product([Constant(-4), x_var]), Constant(1)])

    # Optimization with x0 = 0
    param = DirectParam(np.array([0]), bounds=[-10, 10])
    return (obj, param, [2])
コード例 #6
0
    def test_stop_tolerance(self):
        # Check that optimization actually stops after hitting tolerance.
        objective = Variable(1)**2 + 3
        param = DirectParam([0.5], bounds=(0, 1))

        opt = AdaptiveGradientDescent(objective, param, 1)
        opt.max_iters = 10
        # We should hit stop tolerance in two steps.
        opt.optimize()
        np.testing.assert_almost_equal(opt.param.to_vector(), 0)
        self.assertEqual(opt.iter, 2)
コード例 #7
0
def build_constrained_ellipsoidal_problem():
    # Implements f(x) = x^2 + 2y^2 - 5y - 2xy constrainted to x - y >= 1
    var = Variable(2)
    x_var = var[0]
    y_var = var[1]

    obj = x_var**2 + 2 * y_var**2 - 5 * y_var - 2 * x_var * y_var
    cons_ineq = [y_var - x_var + 1]
    opt = OptimizationProblem(obj, cons_ineq=cons_ineq)
    param = DirectParam(np.array([0, 0]), bounds=[-10, 10])
    return (opt, param, [7 / 2, 5 / 2])
コード例 #8
0
def build_two_variable_quadratic():
    """ Implements f(x) = x^2 + 2y^2 - 5y - 2xy """
    var = Variable(2)
    x_var = var[0]
    y_var = var[1]

    obj = Sum([
        Product([x_var, x_var]),
        Product([Constant(2), y_var, y_var]),
        Product([Constant(-5), y_var]),
        Product([Constant(-2), x_var, y_var])
    ])
    param = DirectParam(np.array([0, 0]), bounds=[-10, 10])
    return (obj, param, [5 / 2, 5 / 2])
コード例 #9
0
def build_constrained_linear_problem(instance: int = 0):
    obj = PlaneObjective()
    cons0 = -SphereObjective(radius=1.0, r0=np.array([0, 0]))
    cons1 = -SphereObjective(radius=1.0, r0=np.array([1, 0]))
    param = DirectParam(np.array([0.5, 0.5]), bounds=(-1, 1))
    if instance == 0:
        cons_eq = [cons0]
        cons_ineq = [cons1]
        res = [1 / np.sqrt(2), 1 / np.sqrt(2)]
    elif instance == 1:
        cons_eq = [cons1]
        cons_ineq = [cons0]
        res = [0.5, np.sqrt(1 - 0.5**2)]
    opt = OptimizationProblem(obj, cons_eq, cons_ineq)
    return (opt, param, res)
コード例 #10
0
def build_rosenbrock_function(a: float = 1, b: float = 100):
    """ Implements f(x) = (a - x)^2 + b(y - x^2)^2.

    This function has a valley that is easy to get to but hard to find the
    global optimum.
    """
    var = Variable(2)
    x_var = var[0]
    y_var = var[1]

    x_minus_a = Sum([x_var, Constant(-a)])
    y_minus_x2 = Sum([y_var, Product([Constant(-1), x_var, x_var])])
    obj = Sum([
        Product([x_minus_a] * 2),
        Product([Constant(b), Product([y_minus_x2] * 2)])
    ])
    param = DirectParam(np.array([0, 0]), bounds=[-10, 10])
    return (obj, param, [a, a**2])
コード例 #11
0
def build_constrained_quadratic_problem(instance: int = 0):
    obj = QuadObjective(np.array([-0.5, 0]))
    cons0 = -SphereObjective(radius=1.0, r0=np.array([0, 0]))
    cons1 = -SphereObjective(radius=1.0, r0=np.array([1, 0]))
    param = DirectParam(np.array([0.5, 0.5]), bounds=(-1, 1))
    if instance == 0:
        cons_eq = [cons1]
        cons_ineq = [cons0]
        res = [0, 0]
    elif instance == 1:
        cons_eq = [cons1]
        cons_ineq = []
        res = [0, 0]
    elif instance == 2:
        cons_eq = []
        cons_ineq = [cons0]
        res = [-0.5, 0]
    opt = OptimizationProblem(obj, cons_eq, cons_ineq)
    return (opt, param, res)
コード例 #12
0
    def test_constrained_optimization(self):
        # Test that constrained optimization works on SLSQP.
        # Implements f(x) = x^2 + 2y^2 - 5y - 2xy constrainted to x - y >= 1
        var = Variable(2)
        x_var = var[0]
        y_var = var[1]

        obj = Sum([
            Product([x_var, x_var]),
            Product([Constant(2), y_var, y_var]),
            Product([Constant(-5), y_var]),
            Product([Constant(-2), x_var, y_var])
        ])
        param = DirectParam(np.array([0, 0]), bounds=[-10, 10])
        constraints = [{
            'type': 'ineq',
            'fun': lambda z: z[0] - z[1] - 1,
            'jac': lambda z: np.array([1, -1])
        }]

        opt = ScipyOptimizer(obj, param, 'SLSQP', constraints=constraints)
        opt.optimize()
        np.testing.assert_almost_equal(opt.param.to_vector(), [7 / 2, 5 / 2])
コード例 #13
0
    def test_direct_param(self):
        # Test by wrapping a DirectParam.

        # No slack variables.
        orig_param = DirectParam([0.1, 2, -3], bounds=(0, 1))
        param = SlackParam(orig_param, 0)
        self.assertEqual(param.get_structure().tolist(), [0.1, 2, -3])
        np.testing.assert_array_equal(
            np.array(param.calculate_gradient().todense()),
            [[1, 0, 0], [0, 1, 0], [0, 0, 1]])
        self.assertEqual(param.get_param(), orig_param)
        param.project()
        np.testing.assert_array_equal(param.get_structure(), [0.1, 1, 0])
        self.assertEqual(param.get_bounds(), ((0, 0, 0), (1, 1, 1)))

        np.testing.assert_array_equal(param.to_vector(), [0.1, 1, 0])
        param.from_vector([0.1, 0.2, 0.3])
        np.testing.assert_array_equal(param.to_vector(), [0.1, 0.2, 0.3])
        param.deserialize(param.serialize())
        np.testing.assert_array_equal(param.to_vector(), [0.1, 0.2, 0.3])

        # One slack variable.
        orig_param = DirectParam([0.1, 2, -3], bounds=(0, 1))
        param = SlackParam(orig_param, 1)
        self.assertEqual(param.get_structure().tolist(), [0.1, 2, -3])
        np.testing.assert_array_equal(
            np.array(param.calculate_gradient().todense()),
            [[1, 0, 0], [0, 1, 0], [0, 0, 1]])
        self.assertEqual(param.get_param(), orig_param)
        param.project()
        np.testing.assert_array_equal(param.get_structure(), [0.1, 1, 0])
        self.assertEqual(param.get_bounds(), ((0, 0, 0, 0), (1, 1, 1, None)))

        np.testing.assert_array_equal(param.to_vector(), [0.1, 1, 0, 0])
        param.from_vector([0.1, 0.2, 0.3, 1])
        np.testing.assert_array_equal(param.to_vector(), [0.1, 0.2, 0.3, 1])
        param.deserialize(param.serialize())
        np.testing.assert_array_equal(param.to_vector(), [0.1, 0.2, 0.3, 1])

        self.assertEqual(param.get_slack_variable(0), 1)

        # Two slack variables.
        orig_param = DirectParam([0.1, 2, -3], bounds=(0, 1))
        param = SlackParam(orig_param, 2)
        self.assertEqual(param.get_structure().tolist(), [0.1, 2, -3])
        np.testing.assert_array_equal(
            np.array(param.calculate_gradient().todense()),
            [[1, 0, 0], [0, 1, 0], [0, 0, 1]])
        self.assertEqual(param.get_param(), orig_param)
        param.project()
        np.testing.assert_array_equal(param.get_structure(), [0.1, 1, 0])
        self.assertEqual(param.get_bounds(),
                         ((0, 0, 0, 0, 0), (1, 1, 1, None, None)))

        np.testing.assert_array_equal(param.to_vector(), [0.1, 1, 0, 0, 0])
        param.from_vector([0.1, 0.2, 0.3, 1, 2])
        np.testing.assert_array_equal(param.to_vector(), [0.1, 0.2, 0.3, 1, 2])
        param.deserialize(param.serialize())
        np.testing.assert_array_equal(param.to_vector(), [0.1, 0.2, 0.3, 1, 2])

        self.assertEqual(param.get_slack_variable(0), 1)
        self.assertEqual(param.get_slack_variable(1), 2)

        # Quick check for wrapping params with no bounds.
        param = SlackParam(DirectParam([1, 2, 3], bounds=None), 2)
        self.assertEqual(param.get_bounds(), ((None, None, None, 0, 0),
                                              (None, None, None, None, None)))