Exemplo n.º 1
0
def SRIG(pathway_id_and_filepath_and_graph_struct_and_lambda):

    pathway_id, filepath, graph, lambda1 = pathway_id_and_filepath_and_graph_struct_and_lambda

    print()
    print('-----------------')
    print(pathway_id)
    print(str(sparsity_low) + '-' + str(sparsity_high))
    print()

    # we had done dataset.to_csv(filename, index=True, header=True)
    dataset = pd.read_csv(filepath, index_col=0)
    y = LabelEncoder().fit_transform(dataset.index.tolist())
    Y = np.asfortranarray(np.expand_dims(y, axis=1)).astype(float)
    Y = spams.normalize(Y)

    dataset = dataset.transpose().reindex(index=nodes).transpose()
    X = dataset.values
    X = np.asfortranarray(dataset.values).astype(float)
    X = spams.normalize(X)

    W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=np.float64, order="F")

    features = []
    accuracies = []

    for train, test in StratifiedKFold(n_splits=10).split(X, y):

        print()
        print('fold')
        print()

        W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=np.float64, order="F")

        (W, optim_info) = spams.fistaGraph(Y,
                                           X,
                                           W0,
                                           graph,
                                           loss='square',
                                           regul='graph',
                                           lambda1=lambda1,
                                           return_optim_info=True)

        yhat = srig_predict(X[test], W)
        num_cor = num_correct(y[test], yhat)
        accuracy = num_cor / float(len(test))

        features.append(W)
        accuracies.append(accuracy)

    features = pd.DataFrame(features, columns=dataset.columns)
    features = features.columns[(features != 0).any()].tolist()

    return pathway_id, accuracies, features
			epoch_res["Xwrt_r%d_t%d"%(r,t)] = dc(Xwrt)
			for n in range(N):
				DSn = (i * N + n)
				DSw = (i * W )
				Dstack.rows[DSn] = range(DSw,DSw + W)
				Dstack.data[DSn] = Xwrt[n*W:n*W + W]
				epoch_res["Dstack_%d"%DSn] = dc(Dstack[DSn,:])
			i+=1 
	
	Yntrflat = array([ (Y-b_hat-Y_mean).transpose([2,1,0]).flatten()]).T
	Yspams = asfortranarray(Yntrflat)
	Xspams = Dstack.tocsc()
	wr0 = asfortranarray(zeros((Xspams.shape[1],Yspams.shape[1])))
	
	wr = spams.fistaGraph(
		Yspams, Xspams, wr0, graph,False,**graphparams
	)
	epoch_res["D"] = {}
	epoch_res["D"]['params'] = dc(graphparams)
	epoch_res["D"]['Xspams'] = dc(Xspams)
	epoch_res["D"]['Yspams'] = dc(Yspams)
	epoch_res["D"]['wr0'] = dc(wr0)
	epoch_res["D"]['wr'] = dc(wr)
	w_hat = wr.reshape([R,T,W])
	Yest = diagonal(diagonal(w_hat.dot(D),axis1=1,axis2=2),axis1=0,axis2=2)

	D = np.diagonal(np.tensordot(u_hat, X, axes=([2],[2])), axis1=3, axis2=0)
	Yest = diagonal(
		diagonal(
			w_hat.dot(D),axis1=1,axis2=2
		),axis1=0,axis2=2
            Xwrt = Xwr.dot(u_hat[r, t, :])
            epoch_res["Xwrt_r%d_t%d" % (r, t)] = dc(Xwrt)
            for n in range(N):
                DSn = (i * N + n)
                DSw = (i * W)
                Dstack.rows[DSn] = range(DSw, DSw + W)
                Dstack.data[DSn] = Xwrt[n * W:n * W + W]
                epoch_res["Dstack_%d" % DSn] = dc(Dstack[DSn, :])
            i += 1

    Yntrflat = array([(Y - b_hat - Y_mean).transpose([2, 1, 0]).flatten()]).T
    Yspams = asfortranarray(Yntrflat)
    Xspams = Dstack.tocsc()
    wr0 = asfortranarray(zeros((Xspams.shape[1], Yspams.shape[1])))

    wr = spams.fistaGraph(Yspams, Xspams, wr0, graph, False, **graphparams)
    epoch_res["D"] = {}
    epoch_res["D"]['params'] = dc(graphparams)
    epoch_res["D"]['Xspams'] = dc(Xspams)
    epoch_res["D"]['Yspams'] = dc(Yspams)
    epoch_res["D"]['wr0'] = dc(wr0)
    epoch_res["D"]['wr'] = dc(wr)
    w_hat = wr.reshape([R, T, W])
    Yest = diagonal(diagonal(w_hat.dot(D), axis1=1, axis2=2), axis1=0, axis2=2)

    D = np.diagonal(np.tensordot(u_hat, X, axes=([2], [2])), axis1=3, axis2=0)
    Yest = diagonal(diagonal(w_hat.dot(D), axis1=1, axis2=2), axis1=0, axis2=2)
    flatwhat = array([w_hat.transpose([0, 1, 2]).flatten()]).T
    regulFlatwhat = regulGroups(flatwhat, groups_var)
    errorAfterWord = norm(Yest + b_hat - Y -
                          Y_mean) / 2 + regulFlatwhat * graphparams['lambda1']
def test_fistaGraph():
    np.random.seed(0)
    num_threads = -1 # all cores (-1 by default)
    verbose = False   # verbosity, false by default
    lambda1 = 0.1 # regularization ter
    it0 = 1      # frequency for duality gap computations
    max_it = 100 # maximum number of iterations
    L0 = 0.1
    tol = 1e-5
    intercept = False
    pos = False

    eta_g = np.array([1, 1, 1, 1, 1],dtype=myfloat)

    groups = ssp.csc_matrix(np.array([[0, 0, 0, 1, 0],
                       [0, 0, 0, 0, 0],
                       [0, 0, 0, 0, 0],
                       [0, 0, 0, 0, 0],
                       [0, 0, 1, 0, 0]],dtype=np.bool),dtype=np.bool)

    groups_var = ssp.csc_matrix(np.array([[1, 0, 0, 0, 0],
                           [1, 0, 0, 0, 0],
                           [1, 0, 0, 0, 0],
                           [1, 1, 0, 0, 0],
                           [0, 1, 0, 1, 0],
                           [0, 1, 0, 1, 0],
                           [0, 1, 0, 0, 1],
                           [0, 0, 0, 0, 1],
                           [0, 0, 0, 0, 1],
                           [0, 0, 1, 0, 0]],dtype=np.bool),dtype=np.bool)

    graph = {'eta_g': eta_g,'groups' : groups,'groups_var' : groups_var}

    verbose = True
    X = np.asfortranarray(np.random.normal(size = (100,10)))
    X = np.asfortranarray(X - np.tile(np.mean(X,0),(X.shape[0],1)),dtype=myfloat)
    X = spams.normalize(X)
    Y = np.asfortranarray(np.random.normal(size = (100,1)))
    Y = np.asfortranarray(Y - np.tile(np.mean(Y,0),(Y.shape[0],1)),dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = np.zeros((X.shape[1],Y.shape[1]),dtype=myfloat,order="FORTRAN")
    # Regression experiments 
    # 100 regression problems with the same design matrix X.
    print '\nVarious regression experiments'
    compute_gram = True
#
    print '\nFISTA + Regression graph'
    loss = 'square'
    regul = 'graph'
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
    print '\nADMM + Regression graph'
    admm = True
    lin_admm = True
    c = 1
    delta = 1
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
    admm = False
    max_it = 5
    it0 = 1
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
#  works also with non graph-structured regularization. graph is ignored
    print '\nFISTA + Regression Fused-Lasso'
    regul = 'fused-lasso'
    lambda2 = 0.01
    lambda3 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),t,np.mean(optim_info[3,:]))
#
    print '\nFISTA + Regression graph with intercept'
    regul = 'graph'
    intercept = True
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
    intercept = False

# Classification
    print '\nOne classification experiment'
    Y = np.asfortranarray( 2 * np.asfortranarray(np.random.normal(size = (100,Y.shape[1])) > 0,dtype = myfloat) -1)
    print '\nFISTA +  Logistic + graph-linf'
    loss = 'logistic'
    regul = 'graph'
    lambda1 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
# can be used of course with other regularization functions, intercept,...

# Multi-Class classification
    
    Y = np.asfortranarray(np.ceil(5 * np.random.random(size = (100,Y.shape[1]))) - 1,dtype=myfloat)
    loss = 'multi-logistic'
    regul = 'graph'
    print '\nFISTA + Multi-Class Logistic + graph'
    nclasses = np.max(Y) + 1
    W0 = np.zeros((X.shape[1],nclasses * Y.shape[1]),dtype=myfloat,order="FORTRAN")
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
# can be used of course with other regularization functions, intercept,...
# Multi-Task regression
    Y = np.asfortranarray(np.random.normal(size = (100,Y.shape[1])))
    Y = np.asfortranarray(Y - np.tile(np.mean(Y,0),(Y.shape[0],1)),dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = W0 = np.zeros((X.shape[1],Y.shape[1]),dtype=myfloat,order="FORTRAN")
    compute_gram = False
    verbose = True
    loss = 'square'
    print '\nFISTA + Regression multi-task-graph'
    regul = 'multi-task-graph'
    lambda2 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
# Multi-Task Classification
    print '\nFISTA + Logistic + multi-task-graph'
    regul = 'multi-task-graph'
    lambda2 = 0.01
    loss = 'logistic'
    Y = np.asfortranarray( 2 * np.asfortranarray(np.random.normal(size = (100,Y.shape[1])) > 0,dtype = myfloat) -1)
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
# Multi-Class + Multi-Task Regularization
    verbose = False
    print '\nFISTA + Multi-Class Logistic +multi-task-graph'
    Y = np.asfortranarray(np.ceil(5 * np.random.random(size = (100,Y.shape[1]))) - 1,dtype=myfloat)
    loss = 'multi-logistic'
    regul = 'multi-task-graph'
    nclasses = np.max(Y) + 1
    W0 = np.zeros((X.shape[1],nclasses * Y.shape[1]),dtype=myfloat,order="FORTRAN")
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
# can be used of course with other regularization functions, intercept,...

    return None
Exemplo n.º 5
0
def test_fistaGraph():
    np.random.seed(0)
    num_threads = -1  # all cores (-1 by default)
    verbose = False   # verbosity, false by default
    lambda1 = 0.1  # regularization ter
    it0 = 1      # frequency for duality gap computations
    max_it = 100  # maximum number of iterations
    L0 = 0.1
    tol = 1e-5
    intercept = False
    pos = False

    eta_g = np.array([1, 1, 1, 1, 1], dtype=myfloat)

    groups = ssp.csc_matrix(np.array([[0, 0, 0, 1, 0],
                                      [0, 0, 0, 0, 0],
                                      [0, 0, 0, 0, 0],
                                      [0, 0, 0, 0, 0],
                                      [0, 0, 1, 0, 0]], dtype=np.bool), dtype=np.bool)

    groups_var = ssp.csc_matrix(np.array([[1, 0, 0, 0, 0],
                                          [1, 0, 0, 0, 0],
                                          [1, 0, 0, 0, 0],
                                          [1, 1, 0, 0, 0],
                                          [0, 1, 0, 1, 0],
                                          [0, 1, 0, 1, 0],
                                          [0, 1, 0, 0, 1],
                                          [0, 0, 0, 0, 1],
                                          [0, 0, 0, 0, 1],
                                          [0, 0, 1, 0, 0]], dtype=np.bool), dtype=np.bool)

    graph = {'eta_g': eta_g, 'groups': groups, 'groups_var': groups_var}

    verbose = True
    X = np.asfortranarray(np.random.normal(size=(100, 10)))
    X = np.asfortranarray(
        X - np.tile(np.mean(X, 0), (X.shape[0], 1)), dtype=myfloat)
    X = spams.normalize(X)
    Y = np.asfortranarray(np.random.normal(size=(100, 1)))
    Y = np.asfortranarray(
        Y - np.tile(np.mean(Y, 0), (Y.shape[0], 1)), dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=myfloat, order="F")
    # Regression experiments
    # 100 regression problems with the same design matrix X.
    print('\nVarious regression experiments')
    compute_gram = True
#
    print('\nFISTA + Regression graph')
    loss = 'square'
    regul = 'graph'
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
    print('\nADMM + Regression graph')
    admm = True
    lin_admm = True
    c = 1
    delta = 1
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
    admm = False
    max_it = 5
    it0 = 1
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
#  works also with non graph-structured regularization. graph is ignored
    print('\nFISTA + Regression Fused-Lasso')
    regul = 'fused-lasso'
    lambda2 = 0.01
    lambda3 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, time: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :]), t, np.mean(optim_info[3, :])))
#
    print('\nFISTA + Regression graph with intercept')
    regul = 'graph'
    intercept = True
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
    intercept = False

# Classification
    print('\nOne classification experiment')
    Y = np.asfortranarray(
        2 * np.asfortranarray(np.random.normal(size=(100, Y.shape[1])) > 0, dtype=myfloat) - 1)
    print('\nFISTA +  Logistic + graph-linf')
    loss = 'logistic'
    regul = 'graph'
    lambda1 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
# can be used of course with other regularization functions, intercept,...

# Multi-Class classification

    Y = np.asfortranarray(
        np.ceil(5 * np.random.random(size=(100, Y.shape[1]))) - 1, dtype=myfloat)
    loss = 'multi-logistic'
    regul = 'graph'
    print('\nFISTA + Multi-Class Logistic + graph')
    nclasses = np.max(Y) + 1
    W0 = np.zeros((X.shape[1], int(nclasses) * Y.shape[1]),
                  dtype=myfloat, order="F")
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
# can be used of course with other regularization functions, intercept,...
# Multi-Task regression
    Y = np.asfortranarray(np.random.normal(size=(100, Y.shape[1])))
    Y = np.asfortranarray(
        Y - np.tile(np.mean(Y, 0), (Y.shape[0], 1)), dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=myfloat, order="F")
    compute_gram = False
    verbose = True
    loss = 'square'
    print('\nFISTA + Regression multi-task-graph')
    regul = 'multi-task-graph'
    lambda2 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
# Multi-Task Classification
    print('\nFISTA + Logistic + multi-task-graph')
    regul = 'multi-task-graph'
    lambda2 = 0.01
    loss = 'logistic'
    Y = np.asfortranarray(
        2 * np.asfortranarray(np.random.normal(size=(100, Y.shape[1])) > 0, dtype=myfloat) - 1)
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
# Multi-Class + Multi-Task Regularization
    verbose = False
    print('\nFISTA + Multi-Class Logistic +multi-task-graph')
    Y = np.asfortranarray(
        np.ceil(5 * np.random.random(size=(100, Y.shape[1]))) - 1, dtype=myfloat)
    loss = 'multi-logistic'
    regul = 'multi-task-graph'
    nclasses = np.max(Y) + 1
    W0 = np.zeros((X.shape[1], int(nclasses) * Y.shape[1]),
                  dtype=myfloat, order="F")
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
# can be used of course with other regularization functions, intercept,...

    return None
Exemplo n.º 6
0
 def _call(self, x, y, w0):
     w = spams.fistaGraph(y, x, w0, self.graph, False, **self.params)
     return w