Пример #1
0
def test_optimize(name_solver, solver, tol, loss, penalty):
    """Test a method on both the backtracking and fixed step size strategy."""
    max_iter = 1000
    for alpha in np.logspace(-1, 3, 3):
        obj = loss(A, b, alpha)
        prox_1 = _get_prox(penalty[0])
        prox_2 = _get_prox(penalty[1])
        trace = cp.utils.Trace(obj)
        opt = solver(
            obj.f_grad,
            np.zeros(n_features),
            prox_1=prox_1,
            prox_2=prox_2,
            tol=1e-12,
            max_iter=max_iter,
            callback=trace,
        )
        assert opt.certificate < tol, name_solver

        opt_2 = solver(
            obj.f_grad,
            np.zeros(n_features),
            prox_1=prox_1,
            prox_2=prox_2,
            max_iter=max_iter,
            tol=1e-12,
            line_search=False,
            step_size=1.0 / obj.lipschitz,
        )
        assert opt.certificate < tol, name_solver
        assert opt_2.certificate < tol, name_solver
Пример #2
0
def test_optimize(accelerated, loss, penalty):
    """Test a method on both the line_search and fixed step size strategy."""
    max_iter = 200
    for alpha in np.logspace(-1, 3, 3):
        obj = loss(A, b, alpha)
        if penalty is not None:
            prox = penalty(1e-3).prox
        else:
            prox = None
        opt = cp.minimize_proximal_gradient(
            obj.f_grad,
            np.zeros(n_features),
            prox=prox,
            jac=True,
            step="backtracking",
            max_iter=max_iter,
            accelerated=accelerated,
        )
        grad_x = obj.f_grad(opt.x)[1]
        assert certificate(opt.x, grad_x, prox) < 1e-5

        opt_2 = cp.minimize_proximal_gradient(
            obj.f_grad,
            np.zeros(n_features),
            prox=prox,
            jac=True,
            max_iter=max_iter,
            step=lambda x: 1 / obj.lipschitz,
            accelerated=accelerated,
        )
        grad_2x = obj.f_grad(opt_2.x)[1]
        assert certificate(opt_2.x, grad_2x, prox) < 1e-5
Пример #3
0
def test_loss_grad():
    for A in (A_dense, A_sparse):
        for loss in [copt.loss.LogLoss, copt.loss.SquareLoss, copt.loss.HuberLoss]:
            f = loss(A, b)
            err = optimize.check_grad(
                f, lambda x: f.f_grad(x)[1], np.random.randn(n_features)
            )
            assert err < 1e-6
Пример #4
0
def test_gradient():
    for _ in range(20):
        A = np.random.randn(10, 5)
        b = np.random.rand(10)
        for loss in loss_funcs:
            f_grad = loss(A, b).f_grad
            f = lambda x: f_grad(x)[0]
            grad = lambda x: f_grad(x)[1]
            eps = optimize.check_grad(f, grad, np.random.randn(5))
            assert eps < 0.001
Пример #5
0
    cb_adatos = cp.utils.Trace()
    x0 = np.zeros(n_features)
    adatos = cp.minimize_three_split(
        f.f_grad,
        x0,
        g_prox,
        h_prox,
        step_size=10 * step_size,
        max_iter=max_iter,
        tol=1e-14,
        verbose=1,
        callback=cb_adatos,
        h_Lipschitz=beta,
    )
    trace_ls = [loss(x, beta) for x in cb_adatos.trace_x]
    all_trace_ls.append(trace_ls)
    all_trace_ls_time.append(cb_adatos.trace_time)
    out_img.append(adatos.x.reshape(img.shape))

    cb_tos = cp.utils.Trace()
    x0 = np.zeros(n_features)
    cp.minimize_three_split(
        f.f_grad,
        x0,
        g_prox,
        h_prox,
        step_size=step_size,
        max_iter=max_iter,
        tol=1e-14,
        verbose=1,
Пример #6
0
    cb_tosls = cp.utils.Trace()
    x0 = np.zeros(n_features)
    tos_ls = cp.minimize_three_split(
        f.f_grad,
        x0,
        G2.prox,
        G1.prox,
        step_size=5 * step_size,
        max_iter=max_iter,
        tol=1e-14,
        verbose=1,
        callback=cb_tosls,
        h_Lipschitz=beta,
    )
    trace_ls = np.array([loss(x) for x in cb_tosls.trace_x])
    all_trace_ls.append(trace_ls)
    all_trace_ls_time.append(cb_tosls.trace_time)

    cb_tos = cp.utils.Trace()
    x0 = np.zeros(n_features)
    tos = cp.minimize_three_split(
        f.f_grad,
        x0,
        G1.prox,
        G2.prox,
        step_size=step_size,
        max_iter=max_iter,
        tol=1e-14,
        verbose=1,
        line_search=False,