Exemplo n.º 1
0
def nlinprog(obj, cons, Vars):
    """nlinprog

    Parameters
    ----------
    obj :
    cons :

    Returns
    -------

    Notes
    ------
    """
    # feasibility check
    if obj == 0:
        res = check_feasibility(obj, cons, Vars)
        #if res.success and res_is_valid(Vars, cons, res):
        if res.success and True:
            return res
        else:
            return spec.OPTRES(0, None, 'OK', False)
    else:
        import nonlinprog.ipopt as ipopt
        res = check_feasibility(obj, cons, Vars)
        res_nlopt = ipopt.nlinprog(obj, cons, Vars, x0=res.x)
        if not res_nlopt.success:
            return res
        else:
            return res_nlopt
Exemplo n.º 2
0
def uncons_opt(obj, cons, Vars, x0):
    """nlinprog

    Parameters
    ----------
    obj :
    cons :

    Returns
    -------

    Notes
    ------
    """
    raise NotImplementedError

    assert(obj == 0)
    for c in cons:
        assert(isinstance(c, sym.LessThan))
        assert(c.rhs == 0)
        obj += c.lhs**2

    eval_f = ft.partial(eval_expr, sym.lambdify(Vars, obj))
    eval_grad_f = ft.partial(eval_grad_obj, sym.lambdify(Vars, grad(Vars, obj)))
    eval_hessian_f = ft.partial(eval_expr, sym.lambdify(Vars, sym.hessian(obj, Vars)))

    # Return codes in IpReturnCodes_inc.h
    res_x, mL, mU, Lambda, res_obj, status = pyipopt.fmin_unconstrained(
            eval_f,
            x0,
            fprime=eval_grad_f,
            fhess=eval_hessian_f,
            )

    return spec.OPTRES(res_obj, res_x, 'OK', res_obj <= 0)
Exemplo n.º 3
0
def check_feasibility(obj, cons, Vars):

    var_str_to_idx = {str(v): idx for idx, v in enumerate(Vars)}

    # Z3
    solver = z3.Solver()
    sym2Z3_varmap, z3_cons = sympy2z3(cons)
    solver.add(z3_cons)

    # dReal3
    LOGIC = '(set-logic QF_NRA)\n'
    smt2_str = LOGIC + solver.to_smt2()
    fp.overwrite(FNAME, smt2_str)
    output = U.strict_call_get_op([DREAL, FNAME, '--model', '--precision', DELTA_SAT_PREC])
    res_x = parse_dreal_res(var_str_to_idx, output)
    return spec.OPTRES(0, res_x, 'OK', res_x is not None)
Exemplo n.º 4
0
def uncons_opt(obj, cons, Vars):
    assert (obj == 0)  # else can't tell if the opt was successfull
    for c in cons:
        assert (isinstance(c, sym.LessThan))
        assert (c.rhs == 0)
        obj += c.lhs**2

    x0 = np.zeros(len(Vars))

    retcode, res_x, res_f = call_fmincon(Vars, obj, [], x0)
    print('res_f:', res_f)
    print('retcode:', retcode)

    success = res_f <= 0

    return spec.OPTRES(res_f, res_x, 'OK', success)
Exemplo n.º 5
0
def nlinprog(obj, cons, Vars):
    """nlinprog

    Parameters
    ----------
    obj :
    cons :

    Returns
    -------

    Notes
    ------
    """
    # ignoring objective, will check only feasibilit for now
    if obj != 0:
        raise NotImplementedError

    solver = z3.Solver()
    solver = z3.Solver()
    # Objective and constraints should be polynomials
    # assert(isinstance(obj, Poly))
    sym2Z3_varmap, z3_cons = sympy2z3(cons)
    solver.add(z3_cons)

    #smt_vars = z3.Reals(','.join('x{}'.format(v) for v in nvars))
    #     for c in cons:
    #         solver.add(poly2z3(c, smt_vars))

    res = solver.check()
    if res == z3.sat:
        model = solver.model()
        #varval_map = {sv: real2float(model[zv]) for sv, zv in sym2Z3_varmap.iteritems()}
        res_x = np.array([real2float(model[sym2Z3_varmap[v]]) for v in Vars])
    elif res == z3.unsat:
        #varval_map = None
        res_x = None
    else:
        raise RuntimeError(solver.reason_unknown())

    #return (res == z3.sat), varval_map
    return spec.OPTRES(0, res_x, 'OK', res == z3.sat)
Exemplo n.º 6
0
def cons_opt(obj, cons, Vars):

    #     def debugf(f, e, x):
    #         y = f(*x)
    #         print(x, ':', e.args[0] <= 0, ':', f(*x))
    #         print(e.args[0] <= 0, ':', f(*x))
    #         return y

    # Must pass l as an arguement, else late binding will make sure
    # that all functions are the same: the last one
    #cons_f = tuple(lambda x, l=l: l(*x) for l in lambdafied)
    #cons_f = tuple(ft.partial(debugf, l, e) for l, e in zip(lambdafied, cons))

    x0 = np.zeros(len(Vars))

    #retcode, res_x, res_f = fmincon(obj_f, x0, A=[], B=[], C=cons_f)
    retcode, res_x, res_f = call_fmincon(Vars, obj, cons, x0)
    print('retcode:', retcode)

    return spec.OPTRES(res_f, res_x, 'OK', retcode == 0)
Exemplo n.º 7
0
def cons_opt(obj, cons, Vars, x0):
    """nlinprog

    Parameters
    ----------
    obj :
    cons :

    Returns
    -------

    Notes
    ------
    """
    nvars = len(Vars)

    x_L = np.array((pyipopt.NLP_LOWER_BOUND_INF,)*nvars)
    x_U = np.array((pyipopt.NLP_UPPER_BOUND_INF,)*nvars)
    #x_L = -20.*np.ones(nvars)
    #x_U = 20.*np.ones(nvars)

    g_L, g_U, g = [], [], []
    for gc in group_cons_by_ub_lb(cons):
        g_L.append(gc.lb)
        g_U.append(gc.ub)
        g.append(gc.expr)
    ncon = len(g)

    g_L, g_U = np.array(g_L), np.array(g_U)
    eval_g = ft.partial(list2array_wrap, sym.lambdify(Vars, g))

    js = jac(Vars, g)
    jrow, jcol, jdata = np.asarray(js.row, dtype=int), np.asarray(js.col, dtype=int), js.data
    eval_jac_g = ft.partial(eval_jac_cons, (jrow, jcol, sym.lambdify(Vars, jdata.tolist())))

    eval_f = ft.partial(eval_expr, sym.lambdify(Vars, obj))
    eval_grad_f = ft.partial(eval_grad_obj, sym.lambdify(Vars, grad(Vars, obj)))
    #eval_hessian_f = ft.partial(eval_expr, sym.lambdify(Vars, sym.hessian(obj, Vars)))

    nnzj = js.nnz
    nnzh = 0

    if debug:
        for gi, lb, ub in zip(g, g_L, g_U):
            print('{} \in [{}, {}]'.format(gi, lb, ub))

    nlp = pyipopt.create(nvars, x_L, x_U, ncon, g_L, g_U, nnzj, nnzh, eval_f, eval_grad_f, eval_g, eval_jac_g)
    # Verbosity level \in [0, 12]
    nlp.int_option('print_level', print_level)
    nlp.num_option('constr_viol_tol', constr_viol_tol)
    res_x, zl, zu, constraint_multipliers, res_obj, status = nlp.solve(x0)
    nlp.close()

    if debug:

        def print_variable(variable_name, value):
            for i in xrange(len(value)):
                print(variable_name + "["+str(i)+"] =", value[i])

        print()
        print("Solution of the primal variables, x")
        print_variable("x", res_x)
        print()
        print("Solution of the bound multipliers, z_L and z_U")
        print_variable("z_L", zl)
        print_variable("z_U", zu)
        print()
        print("Solution of the constraint multipliers, lambda")
        print_variable("lambda", constraint_multipliers)
        print()
        print("Objective value")
        print("f(x*) =", res_obj)

    # Return codes in IpReturnCodes_inc.h
    print('status:', status)
    return spec.OPTRES(res_obj, res_x, 'OK', status in (0, 1))
Exemplo n.º 8
0
def nlinprog_cons(obj, cons, Vars):
    """nlinprog

    Parameters
    ----------
    obj :
    cons :

    Returns
    -------

    Notes
    ------
    """
    cons = list(cons)

    # incase Vars is an unordered object, freeze the order
    all_vars = tuple(Vars)

    obj_f = lambda x: sym.lambdify(Vars, obj)(*x)

    # constraints are encoded as g(x) >= 0, hence, reverse the sign
    #cons_f = tuple(sym.lambdify(all_vars, -c) for c in cons)

    def debugf(f, e, x):
        y = f(*x)
        #print(x, ':', -e.args[0] >= 0, ':', f(*x))
        print(-e.args[0] >= 0, ':', f(*x))
        return y

    # The below constraint encoding assumes all cons
    # (constraint exprs) are of the form f(x) <= 0
    lambdafied = []
    for c in cons:
        assert (isinstance(c, sym.LessThan))
        assert (c.args[1] == 0)
        lambdafied.append(sym.lambdify(all_vars, -c.args[0], str('numpy')))

    # more concise but can not put in asserts
    #lambdafied = tuple(sym.lambdify(all_vars, -c.args[0]) for c in cons)
    #cons_f = tuple({'type': 'ineq', 'fun': lambda x: sym.lambdify(all_vars, -c)(*x)} for c in cons)

    # Must pass l as an arguement, else late binding will make sure
    # that all functions are the same: the last one
    cons_f = tuple({
        'type': 'ineq',
        'fun': lambda x, l=l: l(*x)
    } for l in lambdafied)
    #cons_f = tuple({'type': 'ineq', 'fun': ft.partial(debugf, l, e)} for l, e in zip(lambdafied, cons))
    #cons_f2 = [ft.partial(debugf, l, e) for l, e in zip(lambdafied, cons)]

    #cons = ({'type': 'ineq', 'fun': lambda x:  x[0] - 2 * x[1] + 2},

    bounds = None  #[(-100, 100) for v in Vars]
    x0 = np.zeros(len(all_vars))

    # Refer:
    # https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html#scipy.optimize.minimize
    # Signature:
    # scipy.optimize.minimize(fun, x0, args=(), method=None, jac=None, hess=None, hessp=None,
    #                          bounds=None, constraints=(), tol=None, callback=None, options=None)

    res = spopt.minimize(obj_f,
                         x0,
                         method=METHOD,
                         jac=None,
                         hess=None,
                         hessp=None,
                         bounds=bounds,
                         constraints=cons_f,
                         tol=TOL,
                         callback=None,
                         options={
                             'disp': False,
                             'maxiter': 1000
                         })

    #     res_ = spopt.fmin_slsqp(obj_f, x0, ieqcons=cons_f2, bounds=(),
    #                             iter=1000, acc=1e-06, iprint=2, disp=True,
    #                             full_output=True)

    #     embed()

    print(res.message, res.status, res.success)
    #varval_map = {var: val for var, val in zip(all_vars, res.x)}
    #print(varval_map)
    return spec.OPTRES(res.fun, res.x, res.status, res.success)
Exemplo n.º 9
0
def nlinprog_uncons(obj, cons, Vars):
    """nlinprog

    Parameters
    ----------
    obj :
    cons :

    Returns
    -------

    Notes
    ------
    """
    cons = list(cons)

    objective = obj
    for c in cons:
        objective += c.args[0]
    embed()

    print(objective)
    objective_lambda = sym.lambdify(Vars, objective, str('numpy'))

    def obj_f(x):
        return objective_lambda(*x)

    bounds = ()  #[(-100, 100) for v in Vars]
    x0 = np.zeros(len(Vars))

    # Refer:
    # https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html#scipy.optimize.minimize
    # Signature:
    # scipy.optimize.minimize(fun, x0, args=(), method=None, jac=None, hess=None, hessp=None,
    #                          bounds=None, constraints=(), tol=None, callback=None, options=None)

    res = spopt.minimize(obj_f,
                         x0,
                         method=METHOD,
                         jac=None,
                         hess=None,
                         hessp=None,
                         bounds=bounds,
                         constraints=(),
                         tol=TOL,
                         callback=None,
                         options={
                             'disp': False,
                             'maxiter': 1000
                         })

    #     res_ = spopt.fmin_slsqp(obj_f, x0, ieqcons=cons_f2, bounds=(),
    #                             iter=1000, acc=1e-06, iprint=2, disp=True,
    #                             full_output=True)

    #     embed()

    print(res.message, res.status, res.success)
    #varval_map = {var: val for var, val in zip(all_vars, res.x)}
    #print(varval_map)
    return spec.OPTRES(res.fun, res.x, res.status, res.success)