Ejemplo n.º 1
0
def sage_feasibility(f, X=None, additional_cons=None):
    """
    Constructs a coniclifts maximization Problem which is feasible if and only if
    ``f`` admits an X-SAGE decomposition (:math:`X=R^{\\texttt{f.n}}` by default).

    Parameters
    ----------
    f : Signomial
        We want to test if this function admits an X-SAGE decomposition.
    X : SigDomain
        If ``X`` is None, then we test nonnegativity of ``f`` over :math:`R^{\\texttt{f.n}}`.
    additional_cons : :obj:`list` of :obj:`sageopt.coniclifts.Constraint`
        This is mostly used for SAGE polynomials. When provided, it should be a list of Constraints over
        coniclifts Variables appearing in ``f.c``.

    Returns
    -------
    prob : sageopt.coniclifts.Problem
        A coniclifts maximization Problem. If ``f`` admits an X-SAGE decomposition, then we should have
        ``prob.value > -np.inf``, once ``prob.solve()`` has been called.
    """
    f = f.without_zeros()
    con = primal_sage_cone(f, name=str(f), X=X)
    constraints = [con]
    if additional_cons is not None:
        constraints += additional_cons
    prob = cl.Problem(cl.MAX, cl.Expression([0]), constraints)
    cl.clear_variable_indices()
    return prob
Ejemplo n.º 2
0
def poly_primal(f, poly_ell=0, sigrep_ell=0, X=None):
    if poly_ell == 0:
        sr, _ = f.sig_rep
        prob = sage_sigs.sig_primal(sr, sigrep_ell, X=X)
        if AUTO_CLEAR_INDICES:  # pragma:no cover
            cl.clear_variable_indices()
        return prob
    else:
        poly_modulator = f.standard_multiplier()**poly_ell
        gamma = cl.Variable(shape=(), name='gamma')
        lagrangian = (f - gamma) * poly_modulator
        if sigrep_ell > 0:
            sr, cons = lagrangian.sig_rep
            sig_modulator = Signomial(sr.alpha,
                                      np.ones(shape=(sr.m, )))**sigrep_ell
            sig_under_test = sr * sig_modulator
            con_name = 'Lagrangian modulated sigrep sage'
            con = sage_sigs.primal_sage_cone(sig_under_test, con_name, X=X)
            constraints = [con] + cons
        else:
            con_name = 'Lagrangian sage poly'
            constraints = primal_sage_poly_cone(lagrangian,
                                                con_name,
                                                log_AbK=X)
        obj = gamma
        prob = cl.Problem(cl.MAX, obj, constraints)
        if AUTO_CLEAR_INDICES:  # pragma:no cover
            cl.clear_variable_indices()
        return prob
Ejemplo n.º 3
0
def poly_constrained_primal(f, gts, eqs, p=0, q=1, ell=0, X=None):
    """
    Construct the primal SAGE-(p, q, ell) relaxation for the polynomial optimization problem

        inf{ f(x) : g(x) >= 0 for g in gts,
                    g(x) == 0 for g in eqs,
                    and x in X }

    where :math:`X = R^{\\texttt{f.n}}` by default.
    """
    lagrangian, ineq_lag_mults, _, gamma = make_poly_lagrangian(f, gts, eqs, p=p, q=q)
    metadata = {'lagrangian': lagrangian}
    if ell > 0:
        alpha_E_q = hierarchy_e_k([f] + list(gts) + list(eqs), k=1)
        modulator = Polynomial(2 * alpha_E_q, np.ones(alpha_E_q.shape[0])) ** ell
        lagrangian = lagrangian * modulator
        metadata['modulator'] = modulator
    # The Lagrangian (after possible multiplication, as above) must be a SAGE polynomial.
    con_name = 'Lagrangian sage poly'
    constrs = primal_sage_poly_cone(lagrangian, con_name, log_AbK=X)
    #  Lagrange multipliers (for inequality constraints) must be SAGE polynomials.
    for s_h, _ in ineq_lag_mults:
        con_name = str(s_h) + ' domain'
        cons = primal_sage_poly_cone(s_h, con_name, log_AbK=X)
        constrs += cons
    # Construct the coniclifts problem.
    prob = cl.Problem(cl.MAX, gamma, constrs)
    prob.metadata = metadata
    cl.clear_variable_indices()
    return prob
Ejemplo n.º 4
0
def poly_dual(f, poly_ell=0, sigrep_ell=0, X=None):
    if poly_ell == 0:
        sr, _ = f.sig_rep
        prob = sage_sigs.sig_dual(sr, sigrep_ell, X=X)
        if AUTO_CLEAR_INDICES:  # pragma:no cover
            cl.clear_variable_indices()
        return prob
    elif sigrep_ell == 0:
        modulator = f.standard_multiplier()**poly_ell
        gamma = cl.Variable()
        lagrangian = (f - gamma) * modulator
        v = cl.Variable(shape=(lagrangian.m, 1), name='v')
        con_base_name = v.name + ' domain'
        constraints = relative_dual_sage_poly_cone(lagrangian,
                                                   v,
                                                   con_base_name,
                                                   log_AbK=X)
        a = sym_corr.relative_coeff_vector(modulator, lagrangian.alpha)
        constraints.append(a.T @ v == 1)
        f_mod = Polynomial(f.alpha, f.c) * modulator
        obj_vec = sym_corr.relative_coeff_vector(f_mod, lagrangian.alpha)
        obj = obj_vec.T @ v
        prob = cl.Problem(cl.MIN, obj, constraints)
        if AUTO_CLEAR_INDICES:  # pragma:no cover
            cl.clear_variable_indices()
        return prob
    else:  # pragma: no cover
        raise NotImplementedError()
Ejemplo n.º 5
0
def poly_constrained_dual(f, gts, eqs, p=0, q=1, ell=0, X=None, slacks=False):
    """
    Construct the dual SAGE-(p, q, ell) relaxation for the polynomial optimization problem

        inf{ f(x) : g(x) >= 0 for g in gts,
                    g(x) == 0 for g in eqs,
                    and x in X }

    where :math:`X = R^{\\texttt{f.n}}` by default.
    """
    lagrangian, ineq_lag_mults, eq_lag_mults, _ = make_poly_lagrangian(f, gts, eqs, p=p, q=q)
    metadata = {'lagrangian': lagrangian, 'f': f, 'gts': gts, 'eqs': eqs, 'X': X}
    if ell > 0:
        alpha_E_1 = hierarchy_e_k([f, f.upcast_to_polynomial(1)] + gts + eqs, k=1)
        modulator = Polynomial(2 * alpha_E_1, np.ones(alpha_E_1.shape[0])) ** ell
        lagrangian = lagrangian * modulator
        f = f * modulator
    else:
        modulator = f.upcast_to_polynomial(1)
    metadata['modulator'] = modulator
    # In primal form, the Lagrangian is constrained to be a SAGE polynomial.
    # Introduce a dual variable "v" for this constraint.
    v = cl.Variable(shape=(lagrangian.m, 1), name='v')
    metadata['v_poly'] = v
    constraints = relative_dual_sage_poly_cone(lagrangian, v, 'Lagrangian', log_AbK=X)
    for s_g, g in ineq_lag_mults:
        # These generalized Lagrange multipliers "s_g" are SAGE polynomials.
        # For each such multiplier, introduce an appropriate dual variable "v_g", along
        # with constraints over that dual variable.
        g_m = g * modulator
        c_g = sym_corr.moment_reduction_array(s_g, g_m, lagrangian)
        name_base = 'v_' + str(g)
        if slacks:
            v_g = cl.Variable(name=name_base, shape=(s_g.m, 1))
            con = c_g @ v == v_g
            con.name += str(g) + ' >= 0'
            constraints.append(con)
        else:
            v_g = c_g @ v
        constraints += relative_dual_sage_poly_cone(s_g, v_g,
                                                    name_base=(name_base + ' domain'), log_AbK=X)
    for z_g, g in eq_lag_mults:
        # These generalized Lagrange multipliers "z_g" are arbitrary polynomials.
        # They dualize to homogeneous equality constraints.
        g_m = g * modulator
        c_g = sym_corr.moment_reduction_array(z_g, g_m, lagrangian)
        con = c_g @ v == 0
        con.name += str(g) + ' == 0'
        constraints.append(con)
    # Equality constraint (for the Lagrangian to be bounded).
    a = sym_corr.relative_coeff_vector(modulator, lagrangian.alpha)
    constraints.append(a.T @ v == 1)
    # Define the dual objective function.
    obj_vec = sym_corr.relative_coeff_vector(f, lagrangian.alpha)
    obj = obj_vec.T @ v
    # Return the coniclifts Problem.
    prob = cl.Problem(cl.MIN, obj, constraints)
    prob.metadata = metadata
    cl.clear_variable_indices()
    return prob
Ejemplo n.º 6
0
def sig_constrained_primal(f, gts, eqs, p=0, q=1, ell=0, X=None):
    """
    Construct the SAGE-(p, q, ell) primal problem for the signomial program

        min{ f(x) : g(x) >= 0 for g in gts,
                    g(x) == 0 for g in eqs,
                    and x in X }

    where X = :math:`R^{\\texttt{f.n}}` by default.
    """
    lagrangian, ineq_lag_mults, _, gamma = make_sig_lagrangian(f, gts, eqs, p=p, q=q)
    metadata = {'lagrangian': lagrangian, 'X': X}
    if ell > 0:
        alpha_E_1 = hierarchy_e_k([f, f.upcast_to_signomial(1)] + gts + eqs, k=1)
        modulator = Signomial(alpha_E_1, np.ones(alpha_E_1.shape[0])) ** ell
        lagrangian = lagrangian * modulator
    else:
        modulator = f.upcast_to_signomial(1)
    metadata['modulator'] = modulator
    # The Lagrangian (after possible multiplication, as above) must be a SAGE signomial.
    con = primal_sage_cone(lagrangian, name='Lagrangian is SAGE', X=X)
    constrs = [con]
    #  Lagrange multipliers (for inequality constraints) must be SAGE signomials.
    expcovers = None
    for i, (s_h, _) in enumerate(ineq_lag_mults):
        con_name = 'SAGE multiplier for signomial inequality # ' + str(i)
        con = primal_sage_cone(s_h, name=con_name, X=X, expcovers=expcovers)
        expcovers = con.ech.expcovers  # only * really * needed in first iteration, but keeps code flat.
        constrs.append(con)
    # Construct the coniclifts Problem.
    prob = cl.Problem(cl.MAX, gamma, constrs)
    prob.metadata = metadata
    cl.clear_variable_indices()
    return prob
Ejemplo n.º 7
0
def sig_dual(f, ell=0, X=None, modulator_support=None):
    f = f.without_zeros()
    # Signomial definitions (for the objective).
    lagrangian = f - cl.Variable(name='gamma')
    if modulator_support is None:
        modulator_support = lagrangian.alpha
    t_mul = Signomial(modulator_support,
                      np.ones(modulator_support.shape[0]))**ell
    metadata = {'f': f, 'lagrangian': lagrangian, 'modulator': t_mul, 'X': X}
    lagrangian = lagrangian * t_mul
    f_mod = f * t_mul
    # C_SAGE^STAR (v must belong to the set defined by these constraints).
    v = cl.Variable(shape=(lagrangian.m, 1), name='v')
    con = relative_dual_sage_cone(lagrangian,
                                  v,
                                  name='Lagrangian SAGE dual constraint',
                                  X=X)
    constraints = [con]
    # Equality constraint (for the Lagrangian to be bounded).
    a = sym_corr.relative_coeff_vector(t_mul, lagrangian.alpha)
    a = a.reshape(a.size, 1)
    constraints.append(a.T @ v == 1)
    # Objective definition and problem creation.
    obj_vec = sym_corr.relative_coeff_vector(f_mod, lagrangian.alpha)
    obj = obj_vec.T @ v
    # Create coniclifts Problem
    prob = cl.Problem(cl.MIN, obj, constraints)
    prob.metadata = metadata
    cl.clear_variable_indices()
    return prob
Ejemplo n.º 8
0
def _least_squares_magnitude_recovery(con, alpha_reduced, v_reduced, zero_tol):
    v_abs = np.abs(v_reduced).ravel()
    if con.X is not None:
        n = con.X.A.shape[1]
    else:
        n = con.alpha.shape[1]
    if n > con.alpha.shape[1]:
        padding = np.zeros(shape=(alpha_reduced.shape[0],
                                  n - con.alpha.shape[1].n))
        alpha_reduced = np.hstack((alpha_reduced, padding))
    y = cl.Variable(shape=(n, ), name='abs moment mag recovery')
    are_nonzero = v_abs > np.sqrt(zero_tol)
    t = cl.Variable(shape=(1, ), name='t')
    residual = alpha_reduced[are_nonzero, :] @ y - np.log(v_abs[are_nonzero])
    constraints = [cl.vector2norm(residual) <= t]
    if np.any(~are_nonzero):
        tempcon = alpha_reduced[~are_nonzero, :] @ y <= np.log(zero_tol)
        constraints.append(tempcon)
    if con.X is not None:
        A, b, K = con.X.A, con.X.b, con.X.K
        tempcon = cl.PrimalProductCone(A @ y + b, K)
        constraints.append(tempcon)
    prob = cl.Problem(cl.MIN, t, constraints)
    prob.solve(verbose=False)
    cl.clear_variable_indices()
    if prob.status in {cl.SOLVED, cl.INACCURATE} and prob.value < np.inf:
        mag = np.exp(y.value.astype(np.longdouble))
        return mag
    else:
        return None
Ejemplo n.º 9
0
def sig_constrained_dual(f, gts, eqs, p=0, q=1, ell=0, X=None, slacks=False):
    """
    Construct the SAGE-(p, q, ell) dual problem for the signomial program

        min{ f(x) : g(x) >= 0 for g in gts,
                    g(x) == 0 for g in eqs,
                    and x in X }

    where X = :math:`R^{\\texttt{f.n}}` by default.
    """
    lagrangian, ineq_lag_mults, eq_lag_mults, _ = make_sig_lagrangian(f, gts, eqs, p=p, q=q)
    metadata = {'lagrangian': lagrangian, 'f': f, 'gts': gts, 'eqs': eqs, 'level': (p, q, ell), 'X': X}
    if ell > 0:
        alpha_E_1 = hierarchy_e_k([f, f.upcast_to_signomial(1)] + list(gts) + list(eqs), k=1)
        modulator = Signomial(alpha_E_1, np.ones(alpha_E_1.shape[0])) ** ell
        lagrangian = lagrangian * modulator
        f = f * modulator
    else:
        modulator = f.upcast_to_signomial(1)
    metadata['modulator'] = modulator
    # In primal form, the Lagrangian is constrained to be a SAGE signomial.
    # Introduce a dual variable "v" for this constraint.
    v = cl.Variable(shape=(lagrangian.m, 1), name='v')
    con = relative_dual_sage_cone(lagrangian, v, name='Lagrangian SAGE dual constraint', X=X)
    constraints = [con]
    expcovers = None
    for i, (s_h, h) in enumerate(ineq_lag_mults):
        # These generalized Lagrange multipliers "s_h" are SAGE signomials.
        # For each such multiplier, introduce an appropriate dual variable "v_h", along
        # with constraints over that dual variable.
        h_m = h * modulator
        c_h = sym_corr.moment_reduction_array(s_h, h_m, lagrangian)
        if slacks:
            v_h = cl.Variable(name='v_' + str(h), shape=(s_h.m, 1))
            constraints.append(c_h @ v == v_h)
        else:
            v_h = c_h @ v
        con_name = 'SAGE dual for signomial inequality # ' + str(i)
        con = relative_dual_sage_cone(s_h, v_h, name=con_name, X=X, expcovers=expcovers)
        expcovers = con.ech.expcovers  # only * really * needed in first iteration, but keeps code flat.
        constraints.append(con)
    for s_h, h in eq_lag_mults:
        # These generalized Lagrange multipliers "s_h" are arbitrary signomials.
        # They dualize to homogeneous equality constraints.
        h = h * modulator
        c_h = sym_corr.moment_reduction_array(s_h, h, lagrangian)
        constraints.append(c_h @ v == 0)
    # Equality constraint (for the Lagrangian to be bounded).
    a = sym_corr.relative_coeff_vector(modulator, lagrangian.alpha)
    constraints.append(a.T @ v == 1)
    # Define the dual objective function.
    obj_vec = sym_corr.relative_coeff_vector(f, lagrangian.alpha)
    obj = obj_vec.T @ v
    # Return the coniclifts Problem.
    prob = cl.Problem(cl.MIN, obj, constraints)
    prob.metadata = metadata
    cl.clear_variable_indices()
    return prob
Ejemplo n.º 10
0
def sig_primal(f, ell=0, X=None, modulator_support=None):
    f = f.without_zeros()
    gamma = cl.Variable(name='gamma')
    lagrangian = f - gamma
    if modulator_support is None:
        modulator_support = lagrangian.alpha
    t = Signomial(modulator_support, np.ones(modulator_support.shape[0]))
    s_mod = lagrangian * (t ** ell)
    con = primal_sage_cone(s_mod, name=str(s_mod), X=X)
    constraints = [con]
    obj = gamma.as_expr()
    prob = cl.Problem(cl.MAX, obj, constraints)
    cl.clear_variable_indices()
    return prob
Ejemplo n.º 11
0
def sage_multiplier_search(f, level=1, X=None):
    """
    Constructs a coniclifts maximization Problem which is feasible if ``f`` can be certified as nonnegative
    over ``X``, by using an appropriate X-SAGE modulating function.

    Parameters
    ----------
    f : Polynomial
        We want to test if ``f`` is nonnegative over ``X``.
    level : int
        Controls the complexity of the X-SAGE modulating function. Must be a positive integer.
    X : PolyDomain or None
        If ``X`` is None, then we test nonnegativity of ``f`` over :math:`R^{\\texttt{f.n}}`.

    Returns
    -------
    prob : sageopt.coniclifts.Problem

    Notes
    -----
    This function provides an alternative to moving up the reference SAGE hierarchy, for the
    goal of certifying nonnegativity of a polynomial ``f`` over some set ``X`` where ``|X|``
    is log-convex. In general, the approach is to introduce a polynomial

        ``mult = Polynomial(alpha_hat, c_tilde)``

    where the rows of alpha_hat are all "level"-wise sums of rows from ``f.alpha``, and ``c_tilde``
    is a coniclifts Variable defining a nonzero SAGE polynomial. Then we can check if
    ``f_mod = f * mult`` is SAGE for any choice of ``c_tilde``.
    """
    constraints = []
    # Make the multiplier polynomial (and require that it be SAGE)
    mult_alpha = hierarchy_e_k([f], k=level)
    c_tilde = cl.Variable(shape=(mult_alpha.shape[0], ), name='c_tilde')
    mult = Polynomial(mult_alpha, c_tilde)
    temp_cons = primal_sage_poly_cone(mult,
                                      name=(c_tilde.name + ' domain'),
                                      log_AbK=X)
    constraints += temp_cons
    constraints.append(cl.sum(c_tilde) >= 1)
    # Make "f_mod := f * mult", and require that it be SAGE.
    f_mod = mult * f
    temp_cons = primal_sage_poly_cone(f_mod, name='f_mod sage poly', log_AbK=X)
    constraints += temp_cons
    # noinspection PyTypeChecker
    prob = cl.Problem(cl.MAX, 0, constraints)
    if AUTO_CLEAR_INDICES:  # pragma:no cover
        cl.clear_variable_indices()
    return prob
Ejemplo n.º 12
0
def _make_dummy_lagrangian(f, gts, eqs):
    dummy_gamma = cl.Variable(shape=())
    if len(gts) > 0:
        dummy_slacks = cl.Variable(shape=(len(gts),))
        ineq_term = sum([gts[i] * dummy_slacks[i] for i in range(len(gts))])
    else:
        ineq_term = 0
    if len(eqs) > 0:
        dummy_multipliers = cl.Variable(shape=(len(eqs),))
        eq_term = sum([eqs[i] * dummy_multipliers[i] for i in range(len(eqs))])
    else:
        eq_term = 0
    dummy_L = f - dummy_gamma - ineq_term - eq_term
    cl.clear_variable_indices()
    return dummy_L
Ejemplo n.º 13
0
def _constrained_least_squares(con, alpha, log_v):
    A, b, K = con.X.A, con.X.b, con.X.K
    lifted_n = A.shape[1]
    n = con.alpha.shape[1]
    x = cl.Variable(shape=(lifted_n,))
    t = cl.Variable(shape=(1,))
    cons = [cl.vector2norm(log_v - alpha @ x[:n]) <= t,
            cl.PrimalProductCone(A @ x + b, K)]
    prob = cl.Problem(cl.MIN, t, cons)
    cl.clear_variable_indices()
    res = prob.solve(verbose=False)
    if res[0] in {cl.SOLVED, cl.INACCURATE}:
        mu_ls = x.value[:n]
        return mu_ls
    else:
        return None
Ejemplo n.º 14
0
 def test_case_1_unpickle_then_solve(self):
     prob, expect_status, expect_value, expect_x = self.case_1()
     pickled_prob = pickle.dumps(prob)
     del prob
     cl.clear_variable_indices()
     prob = pickle.loads(pickled_prob)
     res = prob.solve(solver='ECOS', verbose=False)
     assert res[0] == expect_status
     assert abs(res[1] - expect_value) < 1e-6
     x = None
     for v in prob.all_variables:
         if v.name == 'x':
             x = v
             assert x.is_proper()
             break
     x_star = x.value
     assert np.allclose(x_star, expect_x, atol=1e-4)
Ejemplo n.º 15
0
def sage_multiplier_search(f, level=1, X=None):
    """
    Constructs a coniclifts maximization Problem which is feasible if ``f`` can be certified as nonnegative
    over ``X``, by using an appropriate X-SAGE modulating function.

    Parameters
    ----------
    f : Signomial
        We want to test if ``f`` is nonnegative over ``X``.
    level : int
        Controls the complexity of the X-SAGE modulating function. Must be a positive integer.
    X : SigDomain
        If ``X`` is None, then we test nonnegativity of ``f`` over :math:`R^{\\texttt{f.n}}`.


    Returns
    -------
    prob : sageopt.coniclifts.Problem

    Notes
    -----
    This function provides an alternative to moving up the reference SAGE hierarchy, for the goal of certifying
    nonnegativity of a signomial ``f`` over some convex set ``X``.  In general, the approach is to introduce
    a signomial

        ``mult = Signomial(alpha_hat, c_tilde)``

    where the rows of ``alpha_hat`` are all ``level``-wise sums of rows from ``f.alpha``, and ``c_tilde``
    is a coniclifts Variable defining a nonzero X-SAGE function. Then we check if ``f_mod = f * mult``
    is X-SAGE for any choice of ``c_tilde``.
    """
    f = f.without_zeros()
    constraints = []
    mult_alpha = hierarchy_e_k([f, f.upcast_to_signomial(1)], k=level)
    c_tilde = cl.Variable(mult_alpha.shape[0], name='c_tilde')
    mult = Signomial(mult_alpha, c_tilde)
    constraints.append(cl.sum(c_tilde) >= 1)
    sig_under_test = mult * f
    con1 = primal_sage_cone(mult, name=str(mult), X=X)
    con2 = primal_sage_cone(sig_under_test, name=str(sig_under_test), X=X)
    constraints.append(con1)
    constraints.append(con2)
    prob = cl.Problem(cl.MAX, cl.Expression([0]), constraints)
    if AUTO_CLEAR_INDICES:  # pragma:no cover
        cl.clear_variable_indices()
    return prob
Ejemplo n.º 16
0
def sage_feasibility(f, X=None):
    """
    Constructs a coniclifts maximization Problem which is feasible iff ``f`` admits an X-SAGE decomposition.

    Parameters
    ----------
    f : Polynomial
        We want to test if this function admits an X-SAGE decomposition.
    X : PolyDomain or None
        If ``X`` is None, then we test nonnegativity of ``f`` over :math:`R^{\\texttt{f.n}}`.

    Returns
    -------
    prob : sageopt.coniclifts.Problem
        A coniclifts maximization Problem. If ``f`` admits an X-SAGE decomposition, then we should have
        ``prob.value > -np.inf``, once ``prob.solve()`` has been called.
    """
    sr, cons = f.sig_rep
    prob = sage_sigs.sage_feasibility(sr, X=X, additional_cons=cons)
    cl.clear_variable_indices()
    return prob
Ejemplo n.º 17
0
def sig_primal(f, ell=0, X=None, modulator_support=None):
    f = f.without_zeros()
    gamma = cl.Variable(name='gamma')
    lagrangian = f - gamma
    if modulator_support is None:
        modulator_support = lagrangian.alpha
    t = Signomial(modulator_support, np.ones(modulator_support.shape[0]))
    t_mul = t**ell
    s_mod = lagrangian * t_mul
    con = primal_sage_cone(s_mod, name=str(s_mod), X=X)
    constraints = [con]
    obj = gamma.as_expr()
    prob = cl.Problem(cl.MAX, obj, constraints)
    prob.metadata = {
        'f': f,
        'lagrangian': lagrangian,
        'modulator': t_mul,
        'X': X
    }
    if AUTO_CLEAR_INDICES:  # pragma:no cover
        cl.clear_variable_indices()
    return prob