示例#1
0
def test_f2backtrack(gf):

    projSplit = ps.ProjSplitFit()
    m = 10
    d = 20
    if getNewOptVals and (gf == 1.0):
        A = np.random.normal(0, 1, [m, d])
        y = np.random.normal(0, 1, m)
        cache['Af2bt'] = A
        cache['yf2bt'] = y
    else:
        A = cache['Af2bt']
        y = cache['yf2bt']

    processor = lp.Forward2Backtrack(growFactor=gf, growFreq=10)

    projSplit.setDualScaling(1e-1)
    projSplit.addData(A, y, 2, processor, intercept=True, normalize=True)
    projSplit.run(maxIterations=None,
                  keepHistory=True,
                  primalTol=1e-3,
                  dualTol=1e-3,
                  nblocks=5)
    ps_val = projSplit.getObjective()

    if getNewOptVals and (gf == 1.0):
        AwithIntercept = np.zeros((m, d + 1))
        AwithIntercept[:, 0] = np.ones(m)
        AwithIntercept[:, 1:(d + 1)] = A
        result = np.linalg.lstsq(AwithIntercept, y, rcond=None)
        xhat = result[0]
        LSval = 0.5 * np.linalg.norm(AwithIntercept.dot(xhat) - y, 2)**2 / m
        cache['optf2bt'] = LSval
    else:
        LSval = cache['optf2bt']

    assert ps_val - LSval < 1e-2
示例#2
0
        AwithIntercept = np.zeros((m, d + 1))
        AwithIntercept[:, 0] = np.ones(m)
        AwithIntercept[:, 1:(d + 1)] = A
        result = np.linalg.lstsq(AwithIntercept, y, rcond=None)
        xhat = result[0]
        LSval = 0.5 * np.linalg.norm(AwithIntercept.dot(xhat) - y, 2)**2 / m
        cache['optf2bt'] = LSval
    else:
        LSval = cache['optf2bt']

    assert ps_val - LSval < 1e-2


stepsize = 1e-1
f2fixed = lp.Forward2Fixed(stepsize)
f2backtrack = lp.Forward2Backtrack()
f2affine = lp.Forward2Affine()
f1fixed = lp.Forward1Fixed(stepsize)
f1bt = lp.Forward1Backtrack()
back_exact = lp.BackwardExact()
backCG = lp.BackwardCG()
backLBFGS = lp.BackwardLBFGS()
ToDo = []
firsttest = True
for i in [False, True]:
    for j in [False, True]:
        for blk in range(1, 5):
            for process in [
                    backLBFGS, f2fixed, f2backtrack, f2affine, f1fixed, f1bt,
                    back_exact, backCG
            ]:
示例#3
0
            results[(i,'tseng')] =  outtseng.finalFuncVal

        if False:
            outfrb = algo.for_reflect_back(theFunc,proxfstar_4_tseng,proxg,theGrad,init,iter=maxIter,
                                       gamma0=tuned,gamma1=tuned,G=G,Gt=Gt,verbose=False,getFuncVals=False)
            results[(i,'frb')] =  outfrb.finalFuncVal

        if loss == "log":
            loss2use = "logistic"
        else:
            loss2use = 2

        gamma = 1.0
        if True :
            psObj = ps.ProjSplitFit(gamma)
            proc = lp.Forward2Backtrack()
            psObj.addData(X,y,loss2use,linearOp=H,normalize=False,process=proc,
                          embed=regularizers.L1(scaling=(1-mu)*lam))
            (nbeta,ngamma) = H.shape
            shape = (ngamma-1,ngamma)
            G_for_ps = sl.LinearOperator(shape,matvec=lambda x: x[:-1],rmatvec = lambda x : np.concatenate((x,np.array([0]))))
            psObj.addRegularizer(regularizers.L1(scaling = mu*lam,step=tuned),linearOp=G_for_ps)
            psObj.run(nblocks=10,maxIterations=maxIter,verbose=False,keepHistory=False,
                      primalTol=0.0,dualTol=0.0,blockActivation="greedy")
            results[(i,'ps2fembed_g')] = psObj.getObjective()


        if False:
            psObj = ps.ProjSplitFit(gamma)
            psObj.addData(X,y,loss2use,linearOp=H,normalize=False,process=lp.Forward1Backtrack(),
                          embed=regularizers.L1(scaling=(1-mu)*lam))
    embed = False

    if run2f:
        print("2f")
        gamma2f = gamma2fs[i]
        t0 = time.time()
        psObj = ps.ProjSplitFit(gamma2f)

        if embed:
            psObj.addData(X,
                          y,
                          loss2use,
                          linearOp=H,
                          normalize=False,
                          process=lp.Forward2Backtrack(),
                          embed=regularizers.L1(scaling=(1 - mu) * lam))
        else:
            psObj.addData(X,
                          y,
                          loss2use,
                          linearOp=H,
                          normalize=False,
                          process=lp.Forward2Backtrack())
            psObj.addRegularizer(regularizers.L1(scaling=(1 - mu) * lam),
                                 linearOp=H)

        (nbeta, ngamma) = H.shape
        shape = (ngamma - 1, ngamma)
        G_for_ps = sl.LinearOperator(shape,
                                     matvec=lambda x: x[:-1],
示例#5
0
#from matplotlib import pyplot as plt

if getNewOptVals:
    from utils import runCVX_lasso
    from utils import getLSdata
    import cvxpy as cvx
    cache = {}
else:
    np.random.seed(1)
    with open('results/cache_L1LS', 'rb') as file:
        cache = pickle.load(file)

stepsize = 1e-1
f2fixed = lp.Forward2Fixed(stepsize)
f1fixed = lp.Forward1Fixed(stepsize)
f2bt = lp.Forward2Backtrack(growFactor=1.1, growFreq=10)
f2affine = lp.Forward2Affine()
f1bt = lp.Forward1Backtrack()


@pytest.mark.parametrize("processor,testNumber", [(f2fixed, 0), (f2bt, 1),
                                                  (f2affine, 2), (f1fixed, 3),
                                                  (f1bt, 4)])
def test_user_defined_embedded(processor, testNumber):
    def val1(x):
        return 0.5 * np.linalg.norm(x, 2)**2

    def prox1(x, scale):
        return (1 + scale)**(-1) * x

    def val2(x):
示例#6
0
print("frb running time: "+str(outfrb.times[-1]))
print("================")

print("running ProjSplitFit")
X = sp.load_npz('data/trip_advisor/S_train.npz') # training matrix
H = sp.load_npz('data/trip_advisor/S_A.npz')     # this matrix is called H
y = np.load('data/trip_advisor/y_train.npy')     # training labels

f_ps2fg = []
t_ps2fg = []
f_psbg = []
t_psbg = []
for erg in [False,'simple','weighted']:
    t0 = time.time()
    psObj = ps.ProjSplitFit(gamma2fg)
    psObj.addData(X,y,2,linearOp=H,normalize=False,process=lp.Forward2Backtrack())
    psObj.addRegularizer(regularizers.L1(scaling=(1-mu)*lam),linearOp=H)
    (nbeta,ngamma) = H.shape
    shape = (ngamma-1,ngamma)
    G_for_ps = sl.LinearOperator(shape,matvec=lambda x: x[:-1],rmatvec = lambda x : np.concatenate((x,np.array([0]))))
    psObj.addRegularizer(regularizers.L1(scaling = mu*lam),linearOp=G_for_ps)
    psObj.run(nblocks=10,maxIterations=iterOverwrite,verbose=False,keepHistory=True,historyFreq=1,
                      primalTol=0.0,dualTol=0.0,ergodic=erg)
    f_ps2fg.append(psObj.getHistory()[0])
    t_ps2fg.append(psObj.getHistory()[1])
    t1 = time.time()
    print(f"ps2fbt_g total running time {t1-t0}")

    t0 = time.time()
    psObj = ps.ProjSplitFit(gammabg)
    psObj.addData(X,y,2,linearOp=H,normalize=False,process=lp.BackwardCG())