def test_standard_monomials(self): x = standard_poly_monomials(3) y_actual = np.prod(x) y_expect = Polynomial.from_dict({(1, 1, 1): 1}) assert TestPolynomials.are_equal(y_actual, y_expect) x = standard_poly_monomials(2) y_actual = np.sum(x)**2 y_expect = Polynomial.from_dict({(2, 0): 1, (1, 1): 2, (0, 2): 1}) assert TestPolynomials.are_equal(y_actual, y_expect)
def test_polynomial_exponentiation(self): p = Polynomial.from_dict({(0,): -1, (1,): 1}) # square of (x-1) res = p ** 2 expect = Polynomial.from_dict({(0,): 1, (1,): -2, (2,): 1}) assert res == expect # cube of (2x+5) p = Polynomial.from_dict({(0,): 5, (1,): 2}) expect = Polynomial.from_dict({(0,): 125, (1,): 150, (2,): 60, (3,): 8}) res = p ** 3 assert res == expect self.assertRaises(RuntimeError, Polynomial.__pow__, p, p)
def test_composition(self): p = Polynomial.from_dict({(2,): 1}) # represents lambda x: x ** 2 z = Polynomial.from_dict({(1,): 2, (0,): -1}) # represents lambda x: 2*x - 1 w = p(z) # represents lambda x: (2*x - 1) ** 2 assert w(0.5) == 0 assert w(1) == 1 assert w(0) == 1 x = standard_poly_monomials(3) p = np.prod(x) y = standard_poly_monomials(2) expr = np.array([y[0], y[0]-y[1], y[1]]) w = p(expr) assert w.n == 2 assert w(np.array([1, 1])) == 0 assert w(np.array([1, -2])) == -6
def test_unconstrained_3(self): # Minimization of the six-hump camel back function. p = Polynomial.from_dict({(0, 0): 0, (2, 0): 4, (1, 1): 1, (0, 2): -4, (4, 0): -2.1, (0, 4): 4, (6, 0): 1.0 / 3.0}) # sigrep_ell=0 has a decent bound, and sigrep_ell=1 is nearly optimal. # ECOS is unable to solver sigrep_ell=2 due to conditioning problems. # MOSEK easily solves sigrep_ell=2, and this is globally optimal res00 = primal_dual_unconstrained(p, poly_ell=0, sigrep_ell=0) expect00 = -1.18865 assert abs(res00[0] - res00[1]) <= 1e-6 assert abs(res00[0] - expect00) <= 1e-3 res10 = primal_dual_unconstrained(p, poly_ell=1, sigrep_ell=0) expect10 = -1.03416 assert abs(res10[0] - res10[1]) < 1e-6 assert abs(res10[0] - expect10) <= 1e-3 if cl.Mosek.is_installed(): res01 = primal_dual_unconstrained(p, poly_ell=0, sigrep_ell=1, solver='MOSEK') expect01 = -1.03221 assert abs(res01[0] - res01[1]) <= 1e-6 assert abs(res01[0] - expect01) <= 1e-3 res02 = primal_dual_unconstrained(p, poly_ell=0, sigrep_ell=2, solver='MOSEK') expect02 = -1.0316 assert abs(res02[0] - res02[1]) < 1e-6 assert abs(res02[0] - expect02) <= 1e-3
def test_unconstrained_2(self): # Background # # Unconstrained minimization of a polynomial in 2 variables. # This is Example 4.1 from a 2018 paper by Seidler and de Wolff # (https://arxiv.org/abs/1808.08431). # # Tests # # (1) primal / dual consistency for (poly_ell, sigrep_ell) \in {(0, 0), (1, 0), (0, 1)}. # # (2) Show that the bound with (poly_ell=0, sigrep_ell=1) is strong than # the bound with (poly_ell=1, sigrep_ell=0). # # Notes # # The global minimum of this polynomial (as verified by gloptipoly3) is 0.85018. # # The furthest we could progress up the hierarchy before encountering a solver failure # was (poly_ell=0, sigrep_ell=5). In this case the SAGE bound was 0.8336. # p = Polynomial.from_dict({ (0, 0): 1, (2, 6): 3, (6, 2): 2, (2, 2): 6, (1, 2): -1, (2, 1): 2, (3, 3): -3 }) res00 = primal_dual_unconstrained(p, poly_ell=0, sigrep_ell=0) expect00 = 0.6932 assert abs(res00[0] - res00[1]) <= 1e-6 assert abs(res00[0] - expect00) <= 1e-3 res10 = primal_dual_unconstrained(p, poly_ell=1, sigrep_ell=0) expect10 = 0.7587 assert abs(res10[0] - res10[1]) <= 1e-5 assert abs(res10[0] - expect10) <= 1e-3 if cl.Mosek.is_installed(): # ECOS fails res01 = primal_dual_unconstrained(p, poly_ell=0, sigrep_ell=1, solver='MOSEK') expect01 = 0.7876 assert abs(res01[0] - res01[1]) <= 1e-5 assert abs(res01[0] - expect01) <= 1e-3
def test_composition_sigs(self): p = Polynomial.from_dict({ (1, ): 2, (0, ): -1 }) # represents lambda x: 2*x - 1 s = Signomial.from_dict({(2, ): -1, (0, ): 1}) f = p(s) # lambda x: -2*exp(x) + 1 self.assertAlmostEqual(f(0.5), -2 * np.exp(1.0) + 1, places=4) self.assertAlmostEqual(f(1), -2 * np.exp(2.0) + 1, places=4) p = np.prod(standard_poly_monomials(3)) exp_x = standard_sig_monomials(2) sig_vec = np.array([exp_x[0], exp_x[0] - exp_x[1], 1.0 / exp_x[1]]) f = p(sig_vec) self.assertEqual(f.n, 2) self.assertEqual(f(np.array([1, 1])), 0) x_test = np.array([-3, 3]) self.assertAlmostEqual(f(x_test), np.exp(-6) * (np.exp(-3) - np.exp(3)), places=4)
def test_sigrep_1(self): p = Polynomial.from_dict({(0, 0): -1, (1, 2): 1, (2, 2): 10}) gamma = cl.Variable(shape=(), name='gamma') p -= gamma sr, sr_cons = p.sig_rep # Even though there is a Variable in p.c, no auxiliary # variables should have been introduced by defining this # signomial representative. assert len(sr_cons) == 0 count_nonconstants = 0 for i, ci in enumerate(sr.c): if isinstance(ci, cl.base.ScalarExpression): if not ci.is_constant(): assert len(ci.variables()) == 1 count_nonconstants += 1 assert ci.variables()[0].name == 'gamma' elif sr.alpha[i, 0] == 1 and sr.alpha[i, 1] == 2: assert ci == -1 elif sr.alpha[i, 0] == 2 and sr.alpha[i, 1] == 2: assert ci == 10 else: assert False assert count_nonconstants == 1
def test_sigrep_2(self): p = Polynomial.from_dict({(0, 0): 0, (1, 1): -1, (3, 3): 5}) # One non-even lattice point changes sign, another stays the same sr, sr_cons = p.sig_rep assert len(sr_cons) == 0 assert sr.alpha_c == {(0, 0): 0, (1, 1): -1, (3, 3): -5}
def test_sigrep_1(self): p = Polynomial.from_dict({(0, 0): -1, (1, 2): 1, (2, 2): 10}) # One non-even lattice point (the only one) changes sign. sr, sr_cons = p.sig_rep assert len(sr_cons) == 0 assert sr.alpha_c == {(0, 0): -1, (1, 2): -1, (2, 2): 10}
def test_polynomial_hess_val(self): f = Polynomial.from_dict({(3, ): 1, (0, ): -1}) actual = f.hess_val(np.array([0.1234])) expect = 3 * 2 * 0.1234 assert abs(actual[0] - expect) < 1e-8
def test_polynomial_grad_val(self): f = Polynomial.from_dict({(3, ): 1, (0, ): -1}) actual = f.grad_val(np.array([0.5])) expect = 3 * 0.5**2 assert abs(actual[0] - expect) < 1e-8