Пример #1
0
 def test_addition_and_subtraction(self):
     # data for tests
     s0 = Signomial.from_dict({(0, ): 1, (1, ): 2, (2, ): 3})
     t0 = Signomial.from_dict({(-1, ): 5})
     # tests
     s = s0 - s0
     s = s.without_zeros()
     assert s.m == 1 and set(s.c) == {0}
     s = -s0 + s0
     s = s.without_zeros()
     assert s.m == 1 and set(s.c) == {0}
     s = s0 + t0
     assert s.alpha_c == {(-1, ): 5, (0, ): 1, (1, ): 2, (2, ): 3}
Пример #2
0
 def test_signomial_multiplication(self):
     # data for tests
     s0 = Signomial.from_dict({(0, ): 1, (1, ): 2, (2, ): 3})
     t0 = Signomial.from_dict({(-1, ): 1})
     q0 = Signomial.from_dict({(5, ): 0})
     # tests
     s = s0 * t0
     s = s.without_zeros()
     assert s.alpha_c == {(-1, ): 1, (0, ): 2, (1, ): 3}
     s = t0 * s0
     s = s.without_zeros()
     assert s.alpha_c == {(-1, ): 1, (0, ): 2, (1, ): 3}
     s = s0 * q0
     s = s.without_zeros()
     assert s.alpha_c == {(0, ): 0}
Пример #3
0
 def test_unconstrained_sage_3(self):
     # Background
     #
     #       This is Example 2.5 from the original SAGE paper by Chandrasekaran and Shah.
     #       The signomial s(x1,x2,x3) = (exp(x1) - exp(x2) - exp(x3))**2 is nonnegative
     #       over R^3, but it is not SAGE.
     #
     # Tests
     #
     #       (1) Show that the standard SAGE hierarchy produces no finite bound on "s",
     #           for ell \in {0, 1}.
     #
     # Notes
     #
     #       It is suspected that the standard SAGE hierarchy never produces a finite bound
     #       for this signomial.
     #
     s = Signomial.from_dict({(1, 0, 0): 1,
                              (0, 1, 0): -1,
                              (0, 0, 1): -1})
     s = s ** 2
     expected = -np.inf
     pd0, _ = primal_dual_vals(s, 0)
     assert pd0[0] == expected and pd0[1] == expected
     pd1, _ = primal_dual_vals(s, 1)
     assert pd1[0] == expected and pd1[1] == expected
Пример #4
0
 def test_sage_multiplier_search(self):
     # Background
     #
     #       This example was constructed solely as a test case for sageopt.
     #
     #       The problem is to find a bound on the nonnegative signomial
     #       s(x) = (exp(x)  - exp(-x))**4, using the machinery of SAGE certificates.
     #
     # Tests
     #
     #       (1) Show that there is no SAGE signomial "f" (over the same exponents as "s")
     #           such that f * s is SAGE.
     #
     #       (2) Obtain a loose (but finite) bound on "s", via a SAGE relaxation with ell == 1.
     #
     #       (3) Improve the finite bound from Test 2 by verifying nonnegativity of an
     #           appropriate translate of "s".
     #
     s = Signomial.from_dict({(1,): 1, (-1,): -1}) ** 4
     prob0 = sage_multiplier_search(s, level=1)
     res0 = prob0.solve(solver='ECOS', verbose=False)
     val0 = res0[1]
     assert val0 == -np.inf
     prob1 = sig_relaxation(s, form='primal', ell=1)
     res1 = prob1.solve(solver='ECOS', verbose=False)
     s_bound = res1[1]
     assert -np.inf < s_bound < 0
     s_shifted = s - 0.5 * s_bound  # shifted_s is nonnegative, and not-SAGE by construction.
     prob2 = sage_multiplier_search(s_shifted, level=1)
     res2 = prob2.solve(solver='ECOS', verbose=False)
     val2 = res2[1]
     assert val2 == 0.
Пример #5
0
 def test_constrained_sage_2(self):
     # Background
     #
     #       This is a signomial formulation of a nonnegative polynomial optimization problem.
     #
     #       The problem can be found on page 16 of the gloptipoly3 manual
     #                   http://homepages.laas.fr/henrion/papers/gloptipoly3.pdf
     #       among other places. The optimal objective is -4.
     #
     # Tests - (p, q, ell) = (0, 1, 0)
     #
     #       (1) Check for similar primal / dual objectives.
     #
     x = standard_sig_monomials(3)
     f = -2 * x[0] + x[1] - x[2]
     g1 = Signomial.from_dict({(0, 0, 0): 24,
                     (1, 0, 0): -20,
                     (0, 1, 0): 9,
                     (0, 0, 1): -13,
                     (2, 0, 0): 4,
                     (1, 1, 0): -4,
                     (1, 0, 1): 4,
                     (0, 2, 0): 2,
                     (0, 1, 1): -2,
                     (0, 0, 2): 2})
     g2 = 4 - x[0] - x[1] - x[2]
     g3 = 6 - 3*x[1] - x[2]
     g4 = 2 - x[0]
     g5 = 3 - x[2]
     gts = [g1, g2, g3, g4, g5]
     res01, _ = constrained_primal_dual_vals(f, gts, [], p=0, q=1, ell=0, X=None)
     expect = -6
     assert abs(res01[0] - expect) < 1e-4
     assert abs(res01[1] - expect) < 1e-4
     assert abs(res01[0] - res01[1]) < 1e-5
Пример #6
0
 def test_signomial_shift_coordinates(self):
     f = Signomial.from_dict({(0,): 1, (1,): 2, (2,): 3})
     g = Signomial.from_dict({(-1,): 1})
     h = Signomial.from_dict({(2, 3): 1,
                              (1, -3): -2})
     x0 = -1.2345
     x_test = 3.21
     f_shift = f.shift_coordinates(x0)
     self.assertAlmostEqual(f(x_test + x0), f_shift(x_test), places=4)
     g_shift = g.shift_coordinates(x0)
     self.assertAlmostEqual(g(x_test + x0), g_shift(x_test), places=4)
     x0 = np.array([1.1, 2.2])
     x_test = np.array([-0.5, 3])
     h_shift = h.shift_coordinates(x0)
     self.assertAlmostEqual(h(x_test + x0), h_shift(x_test), places=4)
     self.assertRaises(ValueError, f.shift_coordinates, np.array([1, 1j]))
Пример #7
0
 def test_signomial_evaluation(self):
     s = Signomial.from_dict({(1, ): 1})
     assert s(0) == 1 and abs(s(1) - np.exp(1)) < 1e-10
     zero = np.array([0])
     one = np.array([1])
     assert s(zero) == 1 and abs(s(one) - np.exp(1)) < 1e-10
     zero_one = np.array([[0, 1]])
     assert np.allclose(s(zero_one), np.exp(zero_one), rtol=0, atol=1e-10)
Пример #8
0
 def test_exponentiation(self):
     x = standard_sig_monomials(2)
     y0 = (x[0] - x[1])**2
     y1 = x[0]**2 - 2 * x[0] * x[1] + x[1]**2
     assert y0 == y1
     z0 = x[0]**0.5
     z1 = Signomial.from_dict({(0.5, 0): 1})
     assert z0 == z1
Пример #9
0
def gpkit_hmap_to_sageopt_sig(curhmap, vkmap):
    n_vks = len(vkmap)
    temp_sig_dict = dict()
    for expinfo, coeff in curhmap.items():
        tup = n_vks * [0]
        for vk, expval in expinfo.items():
            tup[vkmap[vk]] = expval
        temp_sig_dict[tuple(tup)] = coeff
    s = Signomial.from_dict(temp_sig_dict)
    return s
Пример #10
0
 def _constrained_sage_1():
     # Background
     #
     #       This is Example 3.3 from Chandraskearan and Shah's original paper on SAGE relaxations.
     #       The problem is to minimize a nonconvex signomial, over a convex set defined by a single
     #       posynomial inequality.
     #
     s0 = Signomial.from_dict({(10.2, 0, 0): 10, (0, 9.8, 0): 10, (0, 0, 8.2): 10})
     s1 = Signomial.from_dict({(1.5089, 1.0981, 1.3419): -14.6794})
     s2 = Signomial.from_dict({(1.0857, 1.9069, 1.6192): -7.8601})
     s3 = Signomial.from_dict({(1.0459, 0.0492, 1.6245): 8.7838})
     f = s0 + s1 + s2 + s3
     g = Signomial.from_dict({(10.2, 0, 0): -8,
                              (0, 9.8, 0): -8,
                              (0, 0, 8.2): -8,
                              (1.0857, 1.9069, 1.6192): -6.4,
                              (0, 0, 0): 1})
     gs = [g]
     return f, gs
Пример #11
0
 def test_construction(self):
     # data for tests
     alpha = np.array([[0], [1], [2]])
     c = np.array([1, -1, -2])
     alpha_c = {(0, ): 1, (1, ): -1, (2, ): -2}
     # Construction with two numpy arrays as arguments
     s = Signomial(alpha, c)
     assert s.n == 1 and s.m == 3 and s.alpha_c == alpha_c
     # Construction with a vector-to-coefficient dictionary
     s = Signomial.from_dict(alpha_c)
     recovered_alpha_c = dict()
     for i in range(s.m):
         recovered_alpha_c[tuple(s.alpha[i, :])] = s.c[i]
     assert s.n == 1 and s.m == 3 and alpha_c == recovered_alpha_c
Пример #12
0
 def test_broadcasting(self):
     # any signomial will do.
     alpha_c = {(0, ): 1, (1, ): -1, (2, ): -2}
     s = Signomial.from_dict(alpha_c)
     other = np.array([1, 2])
     t1 = s + other
     self.assertIsInstance(t1, np.ndarray)
     t2 = other + s
     self.assertIsInstance(t2, np.ndarray)
     delta = t1 - t2
     d1 = delta[0].without_zeros()
     d2 = delta[1].without_zeros()
     self.assertEqual(d1.m, 1)
     self.assertEqual(d2.m, 1)
Пример #13
0
def valid_monomial_equations(eqs):
    conv_eqs = []
    for g in eqs:
        # g defines a constraint g(x) == 0.
        if np.count_nonzero(g.c) > 2:
            # cannot convexify
            continue
        pos_loc = np.where(g.c > 0)[0]
        if pos_loc.size == 1:
            pos_loc = pos_loc[0]
            inverse_term = Signomial.from_dict(
                {tuple(-g.alpha[pos_loc, :]): 1})
            conv_eqs.append(g * inverse_term)
    return conv_eqs
Пример #14
0
    def from_dict(d):
        """
        Construct a Polynomial object which represents the function::

            lambda x: np.sum([ d[a] * np.prod(np.power(x, a)) for a in d]).

        Parameters
        ----------
        d : Dict[Tuple[Float], Float]

        Returns
        -------
        s : Polynomial
        """
        s = Signomial.from_dict(d)
        p = s.as_polynomial()
        p._alpha_c = s.alpha_c
        return p
Пример #15
0
def valid_posynomial_inequalities(gs):
    conv_gs = []
    for g in gs:
        # g defines a constraint g(x) >= 0
        num_pos = np.count_nonzero(g.c > 0)
        if num_pos >= 2:
            # cannot convexify
            continue
        elif num_pos == 0 and np.count_nonzero(
                g.c < 0) > 0:  # pragma: no cover
            raise RuntimeError(
                'Attempting to convexify an infeasible signomial inequality constraint.'
            )
        else:
            pos_loc = np.where(g.c > 0)[0][0]
            inverse_term = Signomial.from_dict(
                {tuple(-g.alpha[pos_loc, :]): 1})
            conv_gs.append(g * inverse_term)
    return conv_gs
Пример #16
0
 def test_unconstrained_sage_4(self, presolve=False, compactdual=False, kernel_basis=False):
     # Background
     #
     #       This example was constructed soley as a test case for sageopt.
     #
     #       Minimize s(x) = exp(3*x) - 4*exp(2*x) + 7*exp(x) + exp(-x), over x \in R.
     #
     # Tests
     #
     #       (1) Check that primal / dual objectives are close to reference values, for ell \in {0, 1, 2}.
     #
     #       (2) Recover a globally optimal solution from the dual relaxation, when ell == 3.
     #
     # Notes
     #
     #       It may not be obvious, but the signomial "s" is actually convex!
     #
     initial_presolve = sage_cones.SETTINGS['presolve_trivial_age_cones']
     initial_compactdual = sage_cones.SETTINGS['compact_dual']
     initial_kb = sage_cones.SETTINGS['kernel_basis']
     cl.presolve_trivial_age_cones(presolve)
     cl.compact_sage_duals(compactdual)
     cl.kernel_basis_age_witnesses(kernel_basis)
     s = Signomial.from_dict({(3,): 1, (2,): -4, (1,): 7, (-1,): 1})
     expected = [3.464102, 4.60250026, 4.6217973]
     pds = [primal_dual_vals(s, ell) for ell in range(3)]
     for ell in range(3):
         assert abs(pds[ell][0][0] - expected[ell]) < 1e-5
         assert abs(pds[ell][0][1] - expected[ell]) < 1e-5
     dual = sig_relaxation(s, form='dual', ell=3)
     dual.solve(solver='ECOS', verbose=False)
     optsols = sig_solrec(dual)
     assert s(optsols[0]) < 1e-6 + dual.value
     cl.presolve_trivial_age_cones(initial_presolve)
     cl.compact_sage_duals(initial_compactdual)
     cl.kernel_basis_age_witnesses(initial_kb)
Пример #17
0
 def test_signomial_grad_val(self):
     f = Signomial.from_dict({(2, ): 1, (0, ): -1})
     actual = f.grad_val(np.array([3]))
     expect = 2 * np.exp(2 * 3)
     assert abs(actual[0] - expect) < 1e-8
Пример #18
0
 def test_signomial_hess_val(self):
     f = Signomial.from_dict({(-2, ): 1, (0, ): -1})
     actual = f.hess_val(np.array([3]))
     expect = 4 * np.exp(-2 * 3)
     assert abs(actual[0] - expect) < 1e-8