Ejemplo n.º 1
0
def SRIG(pathway_id_and_filepath_and_graph_struct_and_lambda):

    pathway_id, filepath, graph, lambda1 = pathway_id_and_filepath_and_graph_struct_and_lambda

    print()
    print('-----------------')
    print(pathway_id)
    print(str(sparsity_low) + '-' + str(sparsity_high))
    print()

    # we had done dataset.to_csv(filename, index=True, header=True)
    dataset = pd.read_csv(filepath, index_col=0)
    y = LabelEncoder().fit_transform(dataset.index.tolist())
    Y = np.asfortranarray(np.expand_dims(y, axis=1)).astype(float)
    Y = spams.normalize(Y)

    dataset = dataset.transpose().reindex(index=nodes).transpose()
    X = dataset.values
    X = np.asfortranarray(dataset.values).astype(float)
    X = spams.normalize(X)

    W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=np.float64, order="F")

    features = []
    accuracies = []

    for train, test in StratifiedKFold(n_splits=10).split(X, y):

        print()
        print('fold')
        print()

        W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=np.float64, order="F")

        (W, optim_info) = spams.fistaGraph(Y,
                                           X,
                                           W0,
                                           graph,
                                           loss='square',
                                           regul='graph',
                                           lambda1=lambda1,
                                           return_optim_info=True)

        yhat = srig_predict(X[test], W)
        num_cor = num_correct(y[test], yhat)
        accuracy = num_cor / float(len(test))

        features.append(W)
        accuracies.append(accuracy)

    features = pd.DataFrame(features, columns=dataset.columns)
    features = features.columns[(features != 0).any()].tolist()

    return pathway_id, accuracies, features
Ejemplo n.º 2
0
def ksvd(X, lambda1=None, model=None, return_model=False, n_atoms=100,
         n_iter=1000, n_jobs=8):
    if (model is None):
        D = np.random.randn(X.shape[1], n_atoms)
        D = spams.normalize(np.asfortranarray(D, dtype=D.dtype))
        
        A = np.random.randn(n_atoms, X.shape[0])
    else:
        D = model[0].T
        A = model[1].T
    
         
    E = np.zeros(n_iter)
    for i in range(n_iter): 
        print i
        # Update code
        A = spams.omp(X.T, D, L=lambda1, numThreads=n_jobs)  
        # Update Dico  --- should be parallelized
        for k in np.arange(n_atoms):
            print k, A.shape, D.shape, np.dot(D, A).shape, X.T.shape
            res = ksvd_optim_dk(X.T, D, A, k)
            if res is not None:
                D[:, k]  = res[0]             
                A[k, res[2]] = res[1] 
        E[i] = ((X.T - np.dot(D, A))**2).sum()
        print 'ksvd iter', i, ' --> ', E[i]
    
    if return_model:
        return D.T, [A.T, D.T]
    
    return D.T, None
def test_fistaFlat():
    param = {'numThreads' : 1,'verbose' : True,
             'lambda1' : 0.05, 'it0' : 10, 'max_it' : 200,
             'L0' : 0.1, 'tol' : 1e-3, 'intercept' : False,
             'pos' : False}
    np.random.seed(0)
    m = 100;n = 200
    X = np.asfortranarray(np.random.normal(size = (m,n)))
    X = np.asfortranarray(X - np.tile(np.mean(X,0),(X.shape[0],1)),dtype=myfloat)
    X = spams.normalize(X)
    Y = np.asfortranarray(np.random.normal(size = (m,1)))
    Y = np.asfortranarray(Y - np.tile(np.mean(Y,0),(Y.shape[0],1)),dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = np.zeros((X.shape[1],Y.shape[1]),dtype=myfloat,order="FORTRAN")
    # Regression experiments 
    # 100 regression problems with the same design matrix X.
    print '\nVarious regression experiments'
    param['compute_gram'] = True
    print '\nFISTA + Regression l1'
    param['loss'] = 'square'
    param['regul'] = 'l1'
    # param.regul='group-lasso-l2';
    # param.size_group=10;
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
##    print "XX %s" %str(optim_info.shape);return None
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[2,:],0),np.mean(optim_info[3,:],0))
###
    print '\nISTA + Regression l1'
    param['ista'] = True
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f\n' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
##
    print '\nSubgradient Descent + Regression l1'
    param['ista'] = False
    param['subgrad'] = True
    param['a'] = 0.1
    param['b'] = 1000 # arbitrary parameters
    max_it = param['max_it']
    it0 = param['it0']
    param['max_it'] = 500
    param['it0'] = 50
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f\n' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
    param['subgrad'] = False
    param['max_it'] = max_it
    param['it0'] = it0

###
    print '\nFISTA + Regression l2'
    param['regul'] = 'l2'
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f\n' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
###
    print '\nFISTA + Regression l2 + sparse feature matrix'
    param['regul'] = 'l2';
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,ssp.csc_matrix(X),W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
###########

    print '\nFISTA + Regression Elastic-Net'
    param['regul'] = 'elastic-net'
    param['lambda2'] = 0.1
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[3,:]))

    print '\nFISTA + Group Lasso L2'
    param['regul'] = 'group-lasso-l2'
    param['size_group'] = 2
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[2,:],0),np.mean(optim_info[3,:],0))
    
    print '\nFISTA + Group Lasso L2 with variable size of groups'
    param['regul'] = 'group-lasso-l2'
    param2=param.copy()
    param2['groups'] = np.array(np.random.random_integers(1,5,X.shape[1]),dtype = np.int32)
    param2['lambda1'] *= 10
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[2,:],0),np.mean(optim_info[3,:],0))

    print '\nFISTA + Trace Norm'
    param['regul'] = 'trace-norm-vec'
    param['size_group'] = 5
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:],0),np.mean(optim_info[3,:]))
    
####    
   
    print '\nFISTA + Regression Fused-Lasso'
    param['regul'] = 'fused-lasso'
    param['lambda2'] = 0.1
    param['lambda3'] = 0.1; #
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[3,:]))
    
    print '\nFISTA + Regression no regularization'
    param['regul'] = 'none'
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[3,:]))
    
    
    print '\nFISTA + Regression l1 with intercept '
    param['intercept'] = True
    param['regul'] = 'l1'
    x1 = np.asfortranarray(np.concatenate((X,np.ones((X.shape[0],1))),1),dtype=myfloat)
    W01 = np.asfortranarray(np.concatenate((W0,np.zeros((1,W0.shape[1]))),0),dtype=myfloat)
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,x1,W01,True,**param)',locals()) # adds a column of ones to X for the intercept,True)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
    
    print '\nFISTA + Regression l1 with intercept+ non-negative '
    param['pos'] = True
    param['regul'] = 'l1'
    x1 = np.asfortranarray(np.concatenate((X,np.ones((X.shape[0],1))),1),dtype=myfloat)
    W01 = np.asfortranarray(np.concatenate((W0,np.zeros((1,W0.shape[1]))),0),dtype=myfloat)
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,x1,W01,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[3,:]))
    param['pos'] = False
    param['intercept'] = False

    print '\nISTA + Regression l0'
    param['regul'] = 'l0'
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[3,:]))
    
# Classification
    
    print '\nOne classification experiment'
#*    Y = 2 * double(randn(100,1) > 0)-1
    Y = np.asfortranarray(2 * np.asarray(np.random.normal(size = (100,1)) > 0,dtype=myfloat) - 1)
    print '\nFISTA + Logistic l1'
    param['regul'] = 'l1'
    param['loss'] = 'logistic'
    param['lambda1'] = 0.01
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
# can be used of course with other regularization functions, intercept,...
    param['regul'] = 'l1'
    param['loss'] = 'weighted-logistic'
    param['lambda1'] = 0.01
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
# can be used of course with other regularization functions, intercept,...
#!    pause
    
    print '\nFISTA + Logistic l1 + sparse matrix'
    param['loss'] = 'logistic'
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,ssp.csc_matrix(X),W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
# can be used of course with other regularization functions, intercept,...
    

# Multi-Class classification
    Y = np.asfortranarray(np.ceil(5 * np.random.random(size = (100,1000))) - 1,dtype=myfloat)
    param['loss'] = 'multi-logistic'
    print '\nFISTA + Multi-Class Logistic l1'
    nclasses = np.max(Y[:])+1
    W0 = np.zeros((X.shape[1],nclasses * Y.shape[1]),dtype=myfloat,order="FORTRAN")
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())

    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
# can be used of course with other regularization functions, intercept,...
    
    
# Multi-Task regression
    Y = np.asfortranarray(np.random.normal(size = (100,100)),dtype=myfloat)
    Y = np.asfortranarray(Y - np.tile(np.mean(Y,0),(Y.shape[0],1)),dtype=myfloat)
    Y = spams.normalize(Y)
    param['compute_gram'] = False
    W0 = np.zeros((X.shape[1],Y.shape[1]),dtype=myfloat,order="FORTRAN")
    param['loss'] = 'square'
    print '\nFISTA + Regression l1l2 '
    param['regul'] = 'l1l2'
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
    
    print '\nFISTA + Regression l1linf '
    param['regul'] = 'l1linf'
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
    
    
    print '\nFISTA + Regression l1l2 + l1 '
    param['regul'] = 'l1l2+l1'
    param['lambda2'] = 0.1
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[3,:]))
    
    
    print '\nFISTA + Regression l1linf + l1 '
    param['regul'] = 'l1linf+l1'
    param['lambda2'] = 0.1
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[3,:]))
    
    
    print '\nFISTA + Regression l1linf + row + columns '
    param['regul'] = 'l1linf-row-column'
    param['lambda2'] = 0.1
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
    
# Multi-Task Classification
    
    print '\nFISTA + Logistic + l1l2 '
    param['regul'] = 'l1l2'
    param['loss'] = 'logistic'
#*    Y = 2*double(randn(100,100) > 0)-1
    Y = np.asfortranarray(2 * np.asarray(np.random.normal(size = (100,100)) > 1,dtype=myfloat) - 1)
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
# Multi-Class + Multi-Task Regularization
    
    
    print '\nFISTA + Multi-Class Logistic l1l2 '
#*    Y = double(ceil(5*rand(100,1000))-1)
    Y = np.asfortranarray(np.ceil(5 * np.random.random(size = (100,1000))) - 1,dtype=myfloat)
    Y = spams.normalize(Y)
    param['loss'] = 'multi-logistic'
    param['regul'] = 'l1l2'
    nclasses = np.max(Y[:])+1
    W0 = np.zeros((X.shape[1],nclasses * Y.shape[1]),dtype=myfloat,order="FORTRAN")
    (W, optim_info) = Xtest1('spams','spams.fistaFlat(Y,X,W0,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),np.mean(optim_info[3,:]))
def test_fistaTree():
    param = {'numThreads' : -1,'verbose' : False,
             'lambda1' : 0.001, 'it0' : 10, 'max_it' : 200,
             'L0' : 0.1, 'tol' : 1e-5, 'intercept' : False,
             'pos' : False}
    np.random.seed(0)
    m = 100;n = 10
    X = np.asfortranarray(np.random.normal(size = (m,n)))
    X = np.asfortranarray(X - np.tile(np.mean(X,0),(X.shape[0],1)),dtype=myfloat)
    X = spams.normalize(X)
    Y = np.asfortranarray(np.random.normal(size = (m,m)))
    Y = np.asfortranarray(Y - np.tile(np.mean(Y,0),(Y.shape[0],1)),dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = np.zeros((X.shape[1],Y.shape[1]),dtype=myfloat,order="FORTRAN")
    own_variables =  np.array([0,0,3,5,6,6,8,9],dtype=np.int32)
    N_own_variables =  np.array([0,3,2,1,0,2,1,1],dtype=np.int32)
    eta_g = np.array([1,1,1,2,2,2,2.5,2.5],dtype=myfloat)
    groups = np.asfortranarray([[0, 0, 0, 0, 0, 0, 0, 0],
              [1, 0, 0, 0, 0, 0, 0, 0],
              [0, 1, 0, 0, 0, 0, 0, 0],
              [0, 1, 0, 0, 0, 0, 0, 0],
              [1, 0, 0, 0, 0, 0, 0, 0],
              [0, 0, 0, 0, 1, 0, 0, 0],
              [0, 0, 0, 0, 1, 0, 0, 0],
              [0, 0, 0, 0, 0, 0, 1, 0]],dtype = np.bool)
    groups = ssp.csc_matrix(groups,dtype=np.bool)
    tree = {'eta_g': eta_g,'groups' : groups,'own_variables' : own_variables,
            'N_own_variables' : N_own_variables}
    print '\nVarious regression experiments'
    param['compute_gram'] = True

    print '\nFISTA + Regression tree-l2'
    param['loss'] = 'square'
    param['regul'] = 'tree-l2'
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,X,W0,tree,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[3,:],0))
###
    print '\nFISTA + Regression tree-linf'
    param['regul'] = 'tree-linf'
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,X,W0,tree,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[2,:]),np.mean(optim_info[3,:],0))
###
# works also with non tree-structured regularization. tree is ignored
    print '\nFISTA + Regression Fused-Lasso'
    param['regul'] = 'fused-lasso'
    param['lambda2'] = 0.001
    param['lambda3'] = 0.001
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,X,W0,tree,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[3,:],0))
###
    print '\nISTA + Regression tree-l0'
    param['regul'] = 'tree-l0'
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,X,W0,tree,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[3,:],0))
###
    print '\nFISTA + Regression tree-l2 with intercept'
    param['intercept'] = True
    param['regul'] = 'tree-l2'
    x1 = np.asfortranarray(np.concatenate((X,np.ones((X.shape[0],1))),1),dtype=myfloat)
    W01 = np.asfortranarray(np.concatenate((W0,np.zeros((1,W0.shape[1]))),0),dtype=myfloat)
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,x1,W01,tree,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[3,:],0))
###
    param['intercept'] = False

#    Classification

    print '\nOne classification experiment'
    Y = np.asfortranarray(2 * np.asarray(np.random.normal(size = (100,Y.shape[1])) > 0,dtype=myfloat) - 1)
    print '\nFISTA + Logistic + tree-linf'
    param['regul'] = 'tree-linf'
    param['loss'] = 'logistic'
    param['lambda1'] = 0.001
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,X,W0,tree,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[2,:]),np.mean(optim_info[3,:],0))
###
# can be used of course with other regularization functions, intercept,...

#  Multi-Class classification
    Y = np.asfortranarray(np.ceil(5 * np.random.random(size = (100,Y.shape[1]))) - 1,dtype=myfloat)
    param['loss'] = 'multi-logistic'
    param['regul'] = 'tree-l2'
    print '\nFISTA + Multi-Class Logistic + tree-l2'
    nclasses = np.max(Y[:])+1
    W0 = np.zeros((X.shape[1],nclasses * Y.shape[1]),dtype=myfloat,order="FORTRAN")
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,X,W0,tree,True,**param)',locals())
    print 'mean loss: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[3,:],0))
# can be used of course with other regularization functions, intercept,...

# Multi-Task regression
    Y = np.asfortranarray(np.random.normal(size = (100,100)))
    Y = np.asfortranarray(Y - np.tile(np.mean(Y,0),(Y.shape[0],1)),dtype=myfloat)
    Y = spams.normalize(Y)
    param['compute_gram'] = False
    param['verbose'] = True;   # verbosity, False by default
    W0 = np.zeros((X.shape[1],Y.shape[1]),dtype=myfloat,order="FORTRAN")
    param['loss'] = 'square'
    print '\nFISTA + Regression  multi-task-tree'
    param['regul'] = 'multi-task-tree'
    param['lambda2'] = 0.001
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,X,W0,tree,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[2,:]),np.mean(optim_info[3,:],0))

# Multi-Task Classification
    print '\nFISTA + Logistic + multi-task-tree'
    param['regul'] = 'multi-task-tree'
    param['lambda2'] = 0.001
    param['loss'] = 'logistic'
    Y = np.asfortranarray(2 * np.asarray(np.random.normal(size = (100,Y.shape[1])) > 0,dtype=myfloat) - 1)
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,X,W0,tree,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[2,:]),np.mean(optim_info[3,:],0))

#  Multi-Class + Multi-Task Regularization
    param['verbose'] = False
    print '\nFISTA + Multi-Class Logistic +multi-task-tree'
    Y = np.asfortranarray(np.ceil(5 * np.random.random(size = (100,Y.shape[1]))) - 1,dtype=myfloat)
    param['loss'] = 'multi-logistic'
    param['regul'] = 'multi-task-tree'
    nclasses = np.max(Y[:])+1
    W0 = np.zeros((X.shape[1],nclasses * Y.shape[1]),dtype=myfloat,order="FORTRAN")
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,X,W0,tree,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[2,:]),np.mean(optim_info[3,:],0))
# can be used of course with other regularization functions, intercept,...

    print '\nFISTA + Multi-Class Logistic +multi-task-tree + sparse matrix'
    nclasses = np.max(Y[:])+1
    W0 = np.zeros((X.shape[1],nclasses * Y.shape[1]),dtype=myfloat,order="FORTRAN")
    X2 = ssp.csc_matrix(X)
    (W, optim_info) = Xtest1('spams','spams.fistaTree(Y,X2,W0,tree,True,**param)',locals())
    print 'mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' %(np.mean(optim_info[0,:],0),np.mean(optim_info[2,:]),np.mean(optim_info[3,:],0))

    return None
def test_fistaGraph():
    np.random.seed(0)
    num_threads = -1 # all cores (-1 by default)
    verbose = False   # verbosity, false by default
    lambda1 = 0.1 # regularization ter
    it0 = 1      # frequency for duality gap computations
    max_it = 100 # maximum number of iterations
    L0 = 0.1
    tol = 1e-5
    intercept = False
    pos = False

    eta_g = np.array([1, 1, 1, 1, 1],dtype=myfloat)

    groups = ssp.csc_matrix(np.array([[0, 0, 0, 1, 0],
                       [0, 0, 0, 0, 0],
                       [0, 0, 0, 0, 0],
                       [0, 0, 0, 0, 0],
                       [0, 0, 1, 0, 0]],dtype=np.bool),dtype=np.bool)

    groups_var = ssp.csc_matrix(np.array([[1, 0, 0, 0, 0],
                           [1, 0, 0, 0, 0],
                           [1, 0, 0, 0, 0],
                           [1, 1, 0, 0, 0],
                           [0, 1, 0, 1, 0],
                           [0, 1, 0, 1, 0],
                           [0, 1, 0, 0, 1],
                           [0, 0, 0, 0, 1],
                           [0, 0, 0, 0, 1],
                           [0, 0, 1, 0, 0]],dtype=np.bool),dtype=np.bool)

    graph = {'eta_g': eta_g,'groups' : groups,'groups_var' : groups_var}

    verbose = True
    X = np.asfortranarray(np.random.normal(size = (100,10)))
    X = np.asfortranarray(X - np.tile(np.mean(X,0),(X.shape[0],1)),dtype=myfloat)
    X = spams.normalize(X)
    Y = np.asfortranarray(np.random.normal(size = (100,1)))
    Y = np.asfortranarray(Y - np.tile(np.mean(Y,0),(Y.shape[0],1)),dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = np.zeros((X.shape[1],Y.shape[1]),dtype=myfloat,order="FORTRAN")
    # Regression experiments 
    # 100 regression problems with the same design matrix X.
    print '\nVarious regression experiments'
    compute_gram = True
#
    print '\nFISTA + Regression graph'
    loss = 'square'
    regul = 'graph'
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
    print '\nADMM + Regression graph'
    admm = True
    lin_admm = True
    c = 1
    delta = 1
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
    admm = False
    max_it = 5
    it0 = 1
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
#  works also with non graph-structured regularization. graph is ignored
    print '\nFISTA + Regression Fused-Lasso'
    regul = 'fused-lasso'
    lambda2 = 0.01
    lambda3 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),t,np.mean(optim_info[3,:]))
#
    print '\nFISTA + Regression graph with intercept'
    regul = 'graph'
    intercept = True
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
    intercept = False

# Classification
    print '\nOne classification experiment'
    Y = np.asfortranarray( 2 * np.asfortranarray(np.random.normal(size = (100,Y.shape[1])) > 0,dtype = myfloat) -1)
    print '\nFISTA +  Logistic + graph-linf'
    loss = 'logistic'
    regul = 'graph'
    lambda1 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
# can be used of course with other regularization functions, intercept,...

# Multi-Class classification
    
    Y = np.asfortranarray(np.ceil(5 * np.random.random(size = (100,Y.shape[1]))) - 1,dtype=myfloat)
    loss = 'multi-logistic'
    regul = 'graph'
    print '\nFISTA + Multi-Class Logistic + graph'
    nclasses = np.max(Y) + 1
    W0 = np.zeros((X.shape[1],nclasses * Y.shape[1]),dtype=myfloat,order="FORTRAN")
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
# can be used of course with other regularization functions, intercept,...
# Multi-Task regression
    Y = np.asfortranarray(np.random.normal(size = (100,Y.shape[1])))
    Y = np.asfortranarray(Y - np.tile(np.mean(Y,0),(Y.shape[0],1)),dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = W0 = np.zeros((X.shape[1],Y.shape[1]),dtype=myfloat,order="FORTRAN")
    compute_gram = False
    verbose = True
    loss = 'square'
    print '\nFISTA + Regression multi-task-graph'
    regul = 'multi-task-graph'
    lambda2 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
#
# Multi-Task Classification
    print '\nFISTA + Logistic + multi-task-graph'
    regul = 'multi-task-graph'
    lambda2 = 0.01
    loss = 'logistic'
    Y = np.asfortranarray( 2 * np.asfortranarray(np.random.normal(size = (100,Y.shape[1])) > 0,dtype = myfloat) -1)
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
# Multi-Class + Multi-Task Regularization
    verbose = False
    print '\nFISTA + Multi-Class Logistic +multi-task-graph'
    Y = np.asfortranarray(np.ceil(5 * np.random.random(size = (100,Y.shape[1]))) - 1,dtype=myfloat)
    loss = 'multi-logistic'
    regul = 'multi-task-graph'
    nclasses = np.max(Y) + 1
    W0 = np.zeros((X.shape[1],nclasses * Y.shape[1]),dtype=myfloat,order="FORTRAN")
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y,X,W0,graph,True,numThreads = num_threads,verbose = verbose,
        lambda1 = lambda1,it0 = it0,max_it = max_it,L0 = L0,tol = tol,
        intercept = intercept,pos = pos,compute_gram = compute_gram,
        loss = loss,regul = regul,admm = admm,lin_admm = lin_admm,c = c,
        lambda2 = lambda2,lambda3 = lambda3,delta = delta)
    tac = time.time()
    t = tac - tic
    print 'mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' %(np.mean(optim_info[0,:]),np.mean(optim_info[2,:]),t,np.mean(optim_info[3,:]))
# can be used of course with other regularization functions, intercept,...

    return None
Ejemplo n.º 6
0
    def get_x_y_estimated_beta(self):
        """
        Reference:
        ---------
        http://spams-devel.gforge.inria.fr/doc-python/html/doc_spams006.html#toc23
        """
        shape = (4, 4, 1)
        num_samples = 10
        coefficient = 0.05

        num_ft = shape[0] * shape[1] * shape[2]
        X = np.random.random((num_samples, num_ft))
        beta = np.random.random((num_ft, 1))
        # y = dot(X, beta) + noise
        y = np.dot(X, beta) + np.random.random((num_samples, 1)) * 0.0001

        try:
            import spams
            # Normalization for X
            X = np.asfortranarray(X)
            X = np.asfortranarray(X - np.tile(
                                  np.mean(X, 0),
                                  (X.shape[0], 1)))
            X = spams.normalize(X)
            # Normalization for y
            y = np.asfortranarray(y)
            y = np.asfortranarray(y - np.tile(
                                  np.mean(y, 0),
                                  (y.shape[0], 1)))
            y = spams.normalize(y)
            weight0 = np.zeros((X.shape[1], y.shape[1]),
                               dtype=np.float64,
                               order="FORTRAN")
            param = {'numThreads': 1, 'verbose': True,
                 'lambda1': coefficient, 'it0': 10, 'max_it': 200,
                 'L0': 0.1, 'tol': 1e-3, 'intercept': False,
                 'pos': False}
            param['compute_gram'] = True
            param['loss'] = 'square'
            param['regul'] = 'l2'
            (weight_ridge, optim_info) = spams.fistaFlat(y,
                                                  X,
                                                  weight0,
                                                  True,
                                                  **param)
            param['regul'] = 'l1'
            (weight_l1, optim_info) = spams.fistaFlat(y,
                                                 X,
                                                 weight0,
                                                 True,
                                                 **param)
#            print "X = ", repr(X)
#            print "y = ", repr(y)
#            print "weight_ridge =", repr(weight_ridge)
#            print "weight_l1 =", repr(weight_l1)
        except ImportError:
            # TODO: Don't use print directly.
            print "Cannot import spams. Default values will be used."
            X = np.asarray([
           [ 0.26856766,  0.30620391,  0.26995615,  0.3806023 ,  0.41311465,
            -0.24685479,  0.34108499, -0.22786788, -0.2267594 ,  0.30325884,
            -0.00382229,  0.3503643 ,  0.21786749, -0.15275043, -0.24074157,
            -0.25639825],
           [-0.14305316, -0.19553497,  0.45250255, -0.17317269, -0.00304901,
             0.43838073,  0.01606735,  0.09267714,  0.47763275,  0.23234948,
             0.38694597,  0.72591941,  0.21028899,  0.42317021,  0.276003  ,
             0.42198486],
           [-0.08738645,  0.10795947,  0.45813373, -0.34232048,  0.43621128,
            -0.36984753,  0.16555311,  0.55188325, -0.48169657, -0.52844883,
             0.15140672,  0.06074575, -0.36873621,  0.23679974,  0.47195386,
            -0.09728514],
           [ 0.16461237,  0.30299873, -0.32108348, -0.53918274,  0.02287831,
             0.01105383, -0.11124968,  0.18629018,  0.30017151, -0.04217922,
            -0.46066699, -0.33612491, -0.52611772, -0.25397362, -0.27198468,
            -0.42883518],
           [ 0.4710195 ,  0.35047152, -0.07990029,  0.34911632,  0.07206932,
            -0.20270895, -0.0684226 , -0.18958745, -0.08433092,  0.14453963,
             0.28095469, -0.35894296,  0.11680455, -0.37598039, -0.28331446,
            -0.00825299],
           [-0.420528  , -0.74469306,  0.22732681,  0.34362884,  0.16006124,
            -0.29691759,  0.27029047, -0.31077084, -0.048071  ,  0.36495065,
             0.49364453, -0.16903801,  0.07577839, -0.36492748,  0.09448284,
            -0.37055486],
           [ 0.4232946 , -0.26373387, -0.01430445, -0.2353587 , -0.5005603 ,
            -0.35899458,  0.32702596, -0.38311949,  0.31862621, -0.31931012,
            -0.41836583, -0.02855145, -0.50315227, -0.34807958, -0.05252361,
             0.11551424],
           [-0.28443208,  0.07677476, -0.23720305,  0.11056299, -0.48742565,
             0.36772457, -0.56074202,  0.3145033 , -0.22811763,  0.36482173,
            -0.01786535, -0.02929555,  0.35635411,  0.45838473,  0.45853286,
             0.00159594],
           [-0.45779277,  0.10020579, -0.30873257,  0.28114072,  0.18120182,
             0.33333004,  0.17928387,  0.31572323,  0.32902088, -0.10396976,
            -0.33296829,  0.05277326,  0.27139148,  0.18653329,  0.06068255,
            -0.01942451],
           [ 0.06569833, -0.04065228, -0.44669538, -0.17501657, -0.29450165,
             0.32483427, -0.55889145, -0.34973144, -0.35647584, -0.41601239,
            -0.07926316, -0.26784983,  0.14952119,  0.19082353, -0.51309079,
             0.6416559 ]])
            y = np.asarray([
               [ 0.15809895],
               [ 0.69496971],
               [ 0.01214928],
               [-0.39826324],
               [-0.01682498],
               [-0.03372654],
               [-0.45148804],
               [ 0.21735376],
               [ 0.08795349],
               [-0.27022239]])
            weight_ridge = np.asarray([
               [ 0.038558  ],
               [ 0.12605106],
               [ 0.19115798],
               [ 0.07187217],
               [ 0.09472713],
               [ 0.14943554],
               [-0.01968095],
               [ 0.11695959],
               [ 0.15049031],
               [ 0.18930644],
               [ 0.26086626],
               [ 0.23243305],
               [ 0.17425178],
               [ 0.13200238],
               [ 0.11710994],
               [ 0.11272092]])
            weight_l1 = np.asarray([
               [ 0.        ],
               [ 0.02664519],
               [ 0.        ],
               [ 0.        ],
               [ 0.        ],
               [ 0.10357106],
               [ 0.        ],
               [ 0.2103012 ],
               [ 0.00399881],
               [ 0.10815184],
               [ 0.32221254],
               [ 0.49350083],
               [ 0.21351531],
               [ 0.        ],
               [ 0.        ],
               [ 0.        ]])

        ret_data = {}
        ret_data['X'] = X
        ret_data['y'] = y
        ret_data['weight_ridge'] = weight_ridge
        ret_data['weight_l1'] = weight_l1
        ret_data['coefficient'] = coefficient
        ret_data['shape'] = shape
        ret_data['num_samples'] = num_samples
        ret_data['num_ft'] = num_ft

        return ret_data
Ejemplo n.º 7
0
def groupLasso_demo(signal_type, fig_start):
  X,Y,W_actual,groups = generate_data(signal_type)
  #Plotting the actual W
  plt.figure(0+fig_start)
  plt.plot(W_actual)
  plt.title("Original (D = 4096, number groups = 64, active groups = 8)")
  plt.savefig("W_actual_{}.png".format(signal_type) , dpi=300)
  ##### Applying Lasso Regression #####
  # L1 norm is the sum of absolute values of coefficients
  lasso_reg = linear_model.Lasso(alpha=0.5)
  lasso_reg.fit(X, Y)
  W_lasso_reg = lasso_reg.coef_
  ##### Debiasing step #####
  ba = np.argwhere(W_lasso_reg != 0) #Finding where the coefficients are not zero
  X_debiased = X[:, ba]
  W_lasso_reg_debiased = np.linalg.lstsq(X_debiased[:,:,0],Y) #Re-estimate the chosen coefficients using least squares
  W_lasso_reg_debiased_2 = np.zeros((4096))
  W_lasso_reg_debiased_2[ba] = W_lasso_reg_debiased[0]
  lasso_reg_mse = mean_squared_error(W_actual, W_lasso_reg_debiased_2)
  plt.figure(1+fig_start)
  plt.plot(W_lasso_reg_debiased_2)
  plt.title('Standard L1 (debiased 1, regularization param(L1 = 0.5), MSE = {:.4f})'.format(lasso_reg_mse))
  plt.savefig("W_lasso_reg_{}.png".format(signal_type), dpi=300)
  ##### Applying Group Lasso L2 regression #####
  # L2 norm is the square root of sum of squares of coefficients 
  # PNLL(W) = NLL(W) + regularization_parameter * Σ(groups)L2-norm
  group_lassoL2_reg = GroupLasso(
    groups=groups,
    group_reg=3,
    l1_reg=1,
    frobenius_lipschitz=True,
    scale_reg="inverse_group_size",
    subsampling_scheme=1,
    supress_warning=True,
    n_iter=1000,
    tol=1e-3,
  )
  group_lassoL2_reg.fit(X, Y)
  W_groupLassoL2_reg = group_lassoL2_reg.coef_
  ##### Debiasing step #####
  ba = np.argwhere(W_groupLassoL2_reg != 0) #Finding where the coefficients are not zero
  X_debiased = X[:, ba]
  W_group_lassoL2_reg_debiased = np.linalg.lstsq(X_debiased[:,:,0],Y) #Re-estimate the chosen coefficients using least squares
  W_group_lassoL2_reg_debiased_2 = np.zeros((4096))
  W_group_lassoL2_reg_debiased_2[ba] = W_group_lassoL2_reg_debiased[0]
  groupLassoL2_mse = mean_squared_error(W_actual, W_group_lassoL2_reg_debiased_2)
  plt.figure(2+fig_start)
  plt.plot(W_group_lassoL2_reg_debiased_2)
  plt.title('Block-L2 (debiased 1, regularization param(L2 = 3, L1=1), MSE = {:.4f})'.format(groupLassoL2_mse))
  plt.savefig("W_groupLassoL2_reg_{}.png".format(signal_type), dpi=300)
  ##### Applying Group Lasso Linf regression #####
  # To use spams library, it is necessary to convert data to fortran normalized arrays
  # visit http://spams-devel.gforge.inria.fr/ for the documentation of spams library
  # Linf is the supremum of all the coeifficients
  # PNLL(W) = NLL(W) + regularization_parameter * Σ(groups)Linf-norm
  X_normalized = np.asfortranarray(X - np.tile(np.mean(X,0),(X.shape[0],1)),dtype=float)
  X_normalized = spams.normalize(X_normalized)
  Y_normalized = np.asfortranarray(Y - np.tile(np.mean(Y,0),(Y.shape[0],1)),dtype=float)
  Y_normalized = spams.normalize(Y_normalized)
  groups_modified = np.concatenate([[i] for i in groups]).reshape(-1, 1)
  W_initial = np.zeros((X_normalized.shape[1],Y_normalized.shape[1]),dtype=float,order="F")
  param = {'numThreads' : -1,'verbose' : True,
  'lambda2' : 3, 'lambda1' : 1, 'max_it' : 500,
  'L0' : 0.1, 'tol' : 1e-2, 'intercept' : False,
  'pos' : False, 'loss' : 'square'}
  param['regul'] = "group-lasso-linf"
  param2=param.copy()
  param['size_group'] = 64
  param2['groups'] = groups_modified
  (W_groupLassoLinf_reg, optim_info) = spams.fistaFlat(Y_normalized,X_normalized,W_initial,True,**param)
  ##### Debiasing step #####
  ba = np.argwhere(W_groupLassoLinf_reg != 0) #Finding where the coefficients are not zero
  X_debiased = X[:, ba[:,0]]
  W_groupLassoLinf_reg_debiased = np.linalg.lstsq(X_debiased,Y) #Re-estimate the chosen coefficients using least squares
  W_group_lassoLinf_reg_debiased_2 = np.zeros((4096))
  W_group_lassoLinf_reg_debiased_2[ba] = W_groupLassoLinf_reg_debiased[0]
  groupLassoLinf_mse = mean_squared_error(W_actual, W_group_lassoLinf_reg_debiased_2)
  plt.figure(3+fig_start)
  axes = plt.gca()
  plt.plot(W_group_lassoLinf_reg_debiased_2)
  plt.title('Block-Linf (debiased 1, regularization param(L2 = 3, L1=1), MSE = {:.4f})'.format(groupLassoLinf_mse))
  plt.savefig("W_groupLassoLinf_reg_{}.png".format(signal_type), dpi=300)
  plt.show()
Ejemplo n.º 8
0
def test_fistaTree():
    param = {'numThreads': -1, 'verbose': False,
             'lambda1': 0.001, 'it0': 10, 'max_it': 200,
             'L0': 0.1, 'tol': 1e-5, 'intercept': False,
             'pos': False}
    np.random.seed(0)
    m = 100
    n = 10
    X = np.asfortranarray(np.random.normal(size=(m, n)))
    X = np.asfortranarray(
        X - np.tile(np.mean(X, 0), (X.shape[0], 1)), dtype=myfloat)
    X = spams.normalize(X)
    Y = np.asfortranarray(np.random.normal(size=(m, m)))
    Y = np.asfortranarray(
        Y - np.tile(np.mean(Y, 0), (Y.shape[0], 1)), dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=myfloat, order="F")
    own_variables = np.array([0, 0, 3, 5, 6, 6, 8, 9], dtype=np.int32)
    N_own_variables = np.array([0, 3, 2, 1, 0, 2, 1, 1], dtype=np.int32)
    eta_g = np.array([1, 1, 1, 2, 2, 2, 2.5, 2.5], dtype=myfloat)
    groups = np.asfortranarray([[0, 0, 0, 0, 0, 0, 0, 0],
                                [1, 0, 0, 0, 0, 0, 0, 0],
                                [0, 1, 0, 0, 0, 0, 0, 0],
                                [0, 1, 0, 0, 0, 0, 0, 0],
                                [1, 0, 0, 0, 0, 0, 0, 0],
                                [0, 0, 0, 0, 1, 0, 0, 0],
                                [0, 0, 0, 0, 1, 0, 0, 0],
                                [0, 0, 0, 0, 0, 0, 1, 0]], dtype=np.bool)
    groups = ssp.csc_matrix(groups, dtype=np.bool)
    tree = {'eta_g': eta_g, 'groups': groups, 'own_variables': own_variables,
            'N_own_variables': N_own_variables}
    print('\nVarious regression experiments')
    param['compute_gram'] = True

    print('\nFISTA + Regression tree-l2')
    param['loss'] = 'square'
    param['regul'] = 'tree-l2'
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,X,W0,tree,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :], 0), np.mean(optim_info[3, :], 0)))
###
    print('\nFISTA + Regression tree-linf')
    param['regul'] = 'tree-linf'
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,X,W0,tree,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :], 0), np.mean(optim_info[2, :]), np.mean(optim_info[3, :], 0)))
###
# works also with non tree-structured regularization. tree is ignored
    print('\nFISTA + Regression Fused-Lasso')
    param['regul'] = 'fused-lasso'
    param['lambda2'] = 0.001
    param['lambda3'] = 0.001
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,X,W0,tree,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :], 0), np.mean(optim_info[3, :], 0)))
###
    print('\nISTA + Regression tree-l0')
    param['regul'] = 'tree-l0'
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,X,W0,tree,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :], 0), np.mean(optim_info[3, :], 0)))
###
    print('\nFISTA + Regression tree-l2 with intercept')
    param['intercept'] = True
    param['regul'] = 'tree-l2'
    x1 = np.asfortranarray(np.concatenate(
        (X, np.ones((X.shape[0], 1))), 1), dtype=myfloat)
    W01 = np.asfortranarray(np.concatenate(
        (W0, np.zeros((1, W0.shape[1]))), 0), dtype=myfloat)
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,x1,W01,tree,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :], 0), np.mean(optim_info[3, :], 0)))
###
    param['intercept'] = False

#    Classification

    print('\nOne classification experiment')
    Y = np.asfortranarray(
        2 * np.asarray(np.random.normal(size=(100, Y.shape[1])) > 0, dtype=myfloat) - 1)
    print('\nFISTA + Logistic + tree-linf')
    param['regul'] = 'tree-linf'
    param['loss'] = 'logistic'
    param['lambda1'] = 0.001
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,X,W0,tree,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :], 0), np.mean(optim_info[2, :]), np.mean(optim_info[3, :], 0)))
###
# can be used of course with other regularization functions, intercept,...

#  Multi-Class classification
    Y = np.asfortranarray(
        np.ceil(5 * np.random.random(size=(100, Y.shape[1]))) - 1, dtype=myfloat)
    param['loss'] = 'multi-logistic'
    param['regul'] = 'tree-l2'
    print('\nFISTA + Multi-Class Logistic + tree-l2')
    nclasses = np.max(Y[:])+1
    W0 = np.zeros((X.shape[1], int(nclasses) * Y.shape[1]),
                  dtype=myfloat, order="F")
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,X,W0,tree,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :], 0), np.mean(optim_info[3, :], 0)))
# can be used of course with other regularization functions, intercept,...

# Multi-Task regression
    Y = np.asfortranarray(np.random.normal(size=(100, 100)))
    Y = np.asfortranarray(
        Y - np.tile(np.mean(Y, 0), (Y.shape[0], 1)), dtype=myfloat)
    Y = spams.normalize(Y)
    param['compute_gram'] = False
    param['verbose'] = True   # verbosity, False by default
    W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=myfloat, order="F")
    param['loss'] = 'square'
    print('\nFISTA + Regression  multi-task-tree')
    param['regul'] = 'multi-task-tree'
    param['lambda2'] = 0.001
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,X,W0,tree,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :], 0), np.mean(optim_info[2, :]), np.mean(optim_info[3, :], 0)))

# Multi-Task Classification
    print('\nFISTA + Logistic + multi-task-tree')
    param['regul'] = 'multi-task-tree'
    param['lambda2'] = 0.001
    param['loss'] = 'logistic'
    Y = np.asfortranarray(
        2 * np.asarray(np.random.normal(size=(100, Y.shape[1])) > 0, dtype=myfloat) - 1)
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,X,W0,tree,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :], 0), np.mean(optim_info[2, :]), np.mean(optim_info[3, :], 0)))

#  Multi-Class + Multi-Task Regularization
    param['verbose'] = False
    print('\nFISTA + Multi-Class Logistic +multi-task-tree')
    Y = np.asfortranarray(
        np.ceil(5 * np.random.random(size=(100, Y.shape[1]))) - 1, dtype=myfloat)
    param['loss'] = 'multi-logistic'
    param['regul'] = 'multi-task-tree'
    nclasses = np.max(Y[:])+1
    W0 = np.zeros((X.shape[1], int(nclasses) * Y.shape[1]),
                  dtype=myfloat, order="F")
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,X,W0,tree,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :], 0), np.mean(optim_info[2, :]), np.mean(optim_info[3, :], 0)))
# can be used of course with other regularization functions, intercept,...

    print('\nFISTA + Multi-Class Logistic +multi-task-tree + sparse matrix')
    nclasses = np.max(Y[:])+1
    W0 = np.zeros((X.shape[1], int(nclasses) * Y.shape[1]),
                  dtype=myfloat, order="F")
    X2 = ssp.csc_matrix(X)
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaTree(Y,X2,W0,tree,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :], 0), np.mean(optim_info[2, :]), np.mean(optim_info[3, :], 0)))

    return None
Ejemplo n.º 9
0
def test_fistaGraph():
    np.random.seed(0)
    num_threads = -1  # all cores (-1 by default)
    verbose = False   # verbosity, false by default
    lambda1 = 0.1  # regularization ter
    it0 = 1      # frequency for duality gap computations
    max_it = 100  # maximum number of iterations
    L0 = 0.1
    tol = 1e-5
    intercept = False
    pos = False

    eta_g = np.array([1, 1, 1, 1, 1], dtype=myfloat)

    groups = ssp.csc_matrix(np.array([[0, 0, 0, 1, 0],
                                      [0, 0, 0, 0, 0],
                                      [0, 0, 0, 0, 0],
                                      [0, 0, 0, 0, 0],
                                      [0, 0, 1, 0, 0]], dtype=np.bool), dtype=np.bool)

    groups_var = ssp.csc_matrix(np.array([[1, 0, 0, 0, 0],
                                          [1, 0, 0, 0, 0],
                                          [1, 0, 0, 0, 0],
                                          [1, 1, 0, 0, 0],
                                          [0, 1, 0, 1, 0],
                                          [0, 1, 0, 1, 0],
                                          [0, 1, 0, 0, 1],
                                          [0, 0, 0, 0, 1],
                                          [0, 0, 0, 0, 1],
                                          [0, 0, 1, 0, 0]], dtype=np.bool), dtype=np.bool)

    graph = {'eta_g': eta_g, 'groups': groups, 'groups_var': groups_var}

    verbose = True
    X = np.asfortranarray(np.random.normal(size=(100, 10)))
    X = np.asfortranarray(
        X - np.tile(np.mean(X, 0), (X.shape[0], 1)), dtype=myfloat)
    X = spams.normalize(X)
    Y = np.asfortranarray(np.random.normal(size=(100, 1)))
    Y = np.asfortranarray(
        Y - np.tile(np.mean(Y, 0), (Y.shape[0], 1)), dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=myfloat, order="F")
    # Regression experiments
    # 100 regression problems with the same design matrix X.
    print('\nVarious regression experiments')
    compute_gram = True
#
    print('\nFISTA + Regression graph')
    loss = 'square'
    regul = 'graph'
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
    print('\nADMM + Regression graph')
    admm = True
    lin_admm = True
    c = 1
    delta = 1
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
    admm = False
    max_it = 5
    it0 = 1
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
#  works also with non graph-structured regularization. graph is ignored
    print('\nFISTA + Regression Fused-Lasso')
    regul = 'fused-lasso'
    lambda2 = 0.01
    lambda3 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, time: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :]), t, np.mean(optim_info[3, :])))
#
    print('\nFISTA + Regression graph with intercept')
    regul = 'graph'
    intercept = True
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
    intercept = False

# Classification
    print('\nOne classification experiment')
    Y = np.asfortranarray(
        2 * np.asfortranarray(np.random.normal(size=(100, Y.shape[1])) > 0, dtype=myfloat) - 1)
    print('\nFISTA +  Logistic + graph-linf')
    loss = 'logistic'
    regul = 'graph'
    lambda1 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
# can be used of course with other regularization functions, intercept,...

# Multi-Class classification

    Y = np.asfortranarray(
        np.ceil(5 * np.random.random(size=(100, Y.shape[1]))) - 1, dtype=myfloat)
    loss = 'multi-logistic'
    regul = 'graph'
    print('\nFISTA + Multi-Class Logistic + graph')
    nclasses = np.max(Y) + 1
    W0 = np.zeros((X.shape[1], int(nclasses) * Y.shape[1]),
                  dtype=myfloat, order="F")
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
# can be used of course with other regularization functions, intercept,...
# Multi-Task regression
    Y = np.asfortranarray(np.random.normal(size=(100, Y.shape[1])))
    Y = np.asfortranarray(
        Y - np.tile(np.mean(Y, 0), (Y.shape[0], 1)), dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=myfloat, order="F")
    compute_gram = False
    verbose = True
    loss = 'square'
    print('\nFISTA + Regression multi-task-graph')
    regul = 'multi-task-graph'
    lambda2 = 0.01
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
#
# Multi-Task Classification
    print('\nFISTA + Logistic + multi-task-graph')
    regul = 'multi-task-graph'
    lambda2 = 0.01
    loss = 'logistic'
    Y = np.asfortranarray(
        2 * np.asfortranarray(np.random.normal(size=(100, Y.shape[1])) > 0, dtype=myfloat) - 1)
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
# Multi-Class + Multi-Task Regularization
    verbose = False
    print('\nFISTA + Multi-Class Logistic +multi-task-graph')
    Y = np.asfortranarray(
        np.ceil(5 * np.random.random(size=(100, Y.shape[1]))) - 1, dtype=myfloat)
    loss = 'multi-logistic'
    regul = 'multi-task-graph'
    nclasses = np.max(Y) + 1
    W0 = np.zeros((X.shape[1], int(nclasses) * Y.shape[1]),
                  dtype=myfloat, order="F")
    tic = time.time()
    (W, optim_info) = spams.fistaGraph(
        Y, X, W0, graph, True, numThreads=num_threads, verbose=verbose,
        lambda1=lambda1, it0=it0, max_it=max_it, L0=L0, tol=tol,
        intercept=intercept, pos=pos, compute_gram=compute_gram,
        loss=loss, regul=regul, admm=admm, lin_admm=lin_admm, c=c,
        lambda2=lambda2, lambda3=lambda3, delta=delta)
    tac = time.time()
    t = tac - tic
    print('mean loss: %f, mean relative duality_gap: %f, time: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), t, np.mean(optim_info[3, :])))
# can be used of course with other regularization functions, intercept,...

    return None
Ejemplo n.º 10
0
def test_fistaFlat():
    param = {'numThreads': -1, 'verbose': True,
             'lambda1': 0.05, 'it0': 10, 'max_it': 200,
             'L0': 0.1, 'tol': 1e-3, 'intercept': False,
             'pos': False}
    np.random.seed(0)
    m = 100
    n = 200
    X = np.asfortranarray(np.random.normal(size=(m, n)))
    X = np.asfortranarray(
        X - np.tile(np.mean(X, 0), (X.shape[0], 1)), dtype=myfloat)
    X = spams.normalize(X)
    Y = np.asfortranarray(np.random.normal(size=(m, 1)))
    Y = np.asfortranarray(
        Y - np.tile(np.mean(Y, 0), (Y.shape[0], 1)), dtype=myfloat)
    Y = spams.normalize(Y)
    W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=myfloat, order="F")
    # Regression experiments
    # 100 regression problems with the same design matrix X.
    print('\nVarious regression experiments')
    param['compute_gram'] = True
    print('\nFISTA + Regression l1')
    param['loss'] = 'square'
    param['regul'] = 'l1'
    # param.regul='group-lasso-l2'
    # param.size_group=10
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
# print "XX %s" %str(optim_info.shape)
# return None
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :], 0), np.mean(optim_info[2, :], 0), np.mean(optim_info[3, :], 0)))
###
    print('\nISTA + Regression l1')
    param['ista'] = True
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f\n' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))
##
    print('\nSubgradient Descent + Regression l1')
    param['ista'] = False
    param['subgrad'] = True
    param['a'] = 0.1
    param['b'] = 1000  # arbitrary parameters
    max_it = param['max_it']
    it0 = param['it0']
    param['max_it'] = 500
    param['it0'] = 50
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f\n' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))
    param['subgrad'] = False
    param['max_it'] = max_it
    param['it0'] = it0

###
    print('\nFISTA + Regression l2')
    param['regul'] = 'l2'
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f\n' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))
###
    print('\nFISTA + Regression l2 + sparse feature matrix')
    param['regul'] = 'l2'
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaFlat(Y,ssp.csc_matrix(X),W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))
###########

    print('\nFISTA + Regression Elastic-Net')
    param['regul'] = 'elastic-net'
    param['lambda2'] = 0.1
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :]), np.mean(optim_info[3, :])))

    print('\nFISTA + Group Lasso L2')
    param['regul'] = 'group-lasso-l2'
    param['size_group'] = 2
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :], 0), np.mean(optim_info[2, :], 0), np.mean(optim_info[3, :], 0)))

    print('\nFISTA + Group Lasso L2 with variable size of groups')
    param['regul'] = 'group-lasso-l2'
    param2 = param.copy()
    param2['groups'] = np.array(np.random.randint(
        1, 5+1, X.shape[1]), dtype=np.int32)
    param2['lambda1'] *= 10
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :], 0), np.mean(optim_info[2, :], 0), np.mean(optim_info[3, :], 0)))

    print('\nFISTA + Trace Norm')
    param['regul'] = 'trace-norm-vec'
    param['size_group'] = 5
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :], 0), np.mean(optim_info[3, :])))

####

    print('\nFISTA + Regression Fused-Lasso')
    param['regul'] = 'fused-lasso'
    param['lambda2'] = 0.1
    param['lambda3'] = 0.1
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :]), np.mean(optim_info[3, :])))

    print('\nFISTA + Regression no regularization')
    param['regul'] = 'none'
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :]), np.mean(optim_info[3, :])))

    print('\nFISTA + Regression l1 with intercept ')
    param['intercept'] = True
    param['regul'] = 'l1'
    x1 = np.asfortranarray(np.concatenate(
        (X, np.ones((X.shape[0], 1))), 1), dtype=myfloat)
    W01 = np.asfortranarray(np.concatenate(
        (W0, np.zeros((1, W0.shape[1]))), 0), dtype=myfloat)
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,x1,W01,True,**param)',
                             locals())  # adds a column of ones to X for the intercept,True)',locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))

    print('\nFISTA + Regression l1 with intercept+ non-negative ')
    param['pos'] = True
    param['regul'] = 'l1'
    x1 = np.asfortranarray(np.concatenate(
        (X, np.ones((X.shape[0], 1))), 1), dtype=myfloat)
    W01 = np.asfortranarray(np.concatenate(
        (W0, np.zeros((1, W0.shape[1]))), 0), dtype=myfloat)
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaFlat(Y,x1,W01,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :]), np.mean(optim_info[3, :])))
    param['pos'] = False
    param['intercept'] = False

    print('\nISTA + Regression l0')
    param['regul'] = 'l0'
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :]), np.mean(optim_info[3, :])))

# Classification

    print('\nOne classification experiment')
# *    Y = 2 * double(randn(100,1) > 0)-1
    Y = np.asfortranarray(
        2 * np.asarray(np.random.normal(size=(100, 1)) > 0, dtype=myfloat) - 1)
    print('\nFISTA + Logistic l1')
    param['regul'] = 'l1'
    param['loss'] = 'logistic'
    param['lambda1'] = 0.01
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))
# can be used of course with other regularization functions, intercept,...
    param['regul'] = 'l1'
    param['loss'] = 'weighted-logistic'
    param['lambda1'] = 0.01
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))
# can be used of course with other regularization functions, intercept,...
#!    pause

    print('\nFISTA + Logistic l1 + sparse matrix')
    param['loss'] = 'logistic'
    (W, optim_info) = Xtest1(
        'spams', 'spams.fistaFlat(Y,ssp.csc_matrix(X),W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))
# can be used of course with other regularization functions, intercept,...


# Multi-Class classification
    Y = np.asfortranarray(
        np.ceil(5 * np.random.random(size=(100, 1000))) - 1, dtype=myfloat)
    param['loss'] = 'multi-logistic'
    print('\nFISTA + Multi-Class Logistic l1')
    nclasses = np.max(Y[:])+1
    W0 = np.zeros((X.shape[1], int(nclasses) * Y.shape[1]),
                  dtype=myfloat, order="F")
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())

    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))
# can be used of course with other regularization functions, intercept,...


# Multi-Task regression
    Y = np.asfortranarray(np.random.normal(size=(100, 100)), dtype=myfloat)
    Y = np.asfortranarray(
        Y - np.tile(np.mean(Y, 0), (Y.shape[0], 1)), dtype=myfloat)
    Y = spams.normalize(Y)
    param['compute_gram'] = False
    W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=myfloat, order="F")
    param['loss'] = 'square'
    print('\nFISTA + Regression l1l2 ')
    param['regul'] = 'l1l2'
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))

    print('\nFISTA + Regression l1linf ')
    param['regul'] = 'l1linf'
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))

    print('\nFISTA + Regression l1l2 + l1 ')
    param['regul'] = 'l1l2+l1'
    param['lambda2'] = 0.1
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :]), np.mean(optim_info[3, :])))

    print('\nFISTA + Regression l1linf + l1 ')
    param['regul'] = 'l1linf+l1'
    param['lambda2'] = 0.1
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, number of iterations: %f' %
          (np.mean(optim_info[0, :]), np.mean(optim_info[3, :])))

    print('\nFISTA + Regression l1linf + row + columns ')
    param['regul'] = 'l1linf-row-column'
    param['lambda2'] = 0.1
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))

# Multi-Task Classification

    print('\nFISTA + Logistic + l1l2 ')
    param['regul'] = 'l1l2'
    param['loss'] = 'logistic'
# *    Y = 2*double(randn(100,100) > 0)-1
    Y = np.asfortranarray(
        2 * np.asarray(np.random.normal(size=(100, 100)) > 1, dtype=myfloat) - 1)
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))
# Multi-Class + Multi-Task Regularization

    print('\nFISTA + Multi-Class Logistic l1l2 ')
# *    Y = double(ceil(5*rand(100,1000))-1)
    Y = np.asfortranarray(
        np.ceil(5 * np.random.random(size=(100, 1000))) - 1, dtype=myfloat)
    Y = spams.normalize(Y)
    param['loss'] = 'multi-logistic'
    param['regul'] = 'l1l2'
    nclasses = np.max(Y[:])+1
    W0 = np.zeros((X.shape[1], int(nclasses) * Y.shape[1]),
                  dtype=myfloat, order="F")
    (W, optim_info) = Xtest1('spams', 'spams.fistaFlat(Y,X,W0,True,**param)', locals())
    print('mean loss: %f, mean relative duality_gap: %f, number of iterations: %f' % (
        np.mean(optim_info[0, :]), np.mean(optim_info[2, :]), np.mean(optim_info[3, :])))
Ejemplo n.º 11
0
    def get_x_y_estimated_beta(self):
        """
        Reference:
        ---------
        http://spams-devel.gforge.inria.fr/doc-python/html/doc_spams006.html#toc23
        """
        shape = (4, 4, 1)
        num_samples = 10
        coefficient = 0.05

        num_ft = shape[0] * shape[1] * shape[2]
        X = np.random.random((num_samples, num_ft))
        beta = np.random.random((num_ft, 1))
        # y = dot(X, beta) + noise
        y = np.dot(X, beta) + np.random.random((num_samples, 1)) * 0.0001

        try:
            import spams
            # Normalization for X
            X = np.asfortranarray(X)
            X = np.asfortranarray(X - np.tile(
                                  np.mean(X, 0),
                                  (X.shape[0], 1)))
            X = spams.normalize(X)
            # Normalization for y
            y = np.asfortranarray(y)
            y = np.asfortranarray(y - np.tile(
                                  np.mean(y, 0),
                                  (y.shape[0], 1)))
            y = spams.normalize(y)
            weight0 = np.zeros((X.shape[1], y.shape[1]),
                               dtype=np.float64,
                               order="FORTRAN")
            param = {'numThreads': 1, 'verbose': True,
                     'lambda1': coefficient, 'it0': 10, 'max_it': 200,
                     'L0': 0.1, 'tol': 1e-3, 'intercept': False,
                     'pos': False}
            param['compute_gram'] = True
            param['loss'] = 'square'
            param['regul'] = 'l2'
            (weight_ridge, optim_info) = spams.fistaFlat(y,
                                                         X,
                                                         weight0,
                                                         True,
                                                         **param)
            param['regul'] = 'l1'
            (weight_l1, optim_info) = spams.fistaFlat(y,
                                                      X,
                                                      weight0,
                                                      True,
                                                      **param)
#            print "X = ", repr(X)
#            print "y = ", repr(y)
#            print "weight_ridge =", repr(weight_ridge)
#            print "weight_l1 =", repr(weight_l1)
        except ImportError:
            # TODO: Don't use print directly.
            print("Cannot import spams. Default values will be used.")
            X = np.asarray([
           [ 0.26856766,  0.30620391,  0.26995615,  0.3806023 ,  0.41311465,
            -0.24685479,  0.34108499, -0.22786788, -0.2267594 ,  0.30325884,
            -0.00382229,  0.3503643 ,  0.21786749, -0.15275043, -0.24074157,
            -0.25639825],
           [-0.14305316, -0.19553497,  0.45250255, -0.17317269, -0.00304901,
             0.43838073,  0.01606735,  0.09267714,  0.47763275,  0.23234948,
             0.38694597,  0.72591941,  0.21028899,  0.42317021,  0.276003  ,
             0.42198486],
           [-0.08738645,  0.10795947,  0.45813373, -0.34232048,  0.43621128,
            -0.36984753,  0.16555311,  0.55188325, -0.48169657, -0.52844883,
             0.15140672,  0.06074575, -0.36873621,  0.23679974,  0.47195386,
            -0.09728514],
           [ 0.16461237,  0.30299873, -0.32108348, -0.53918274,  0.02287831,
             0.01105383, -0.11124968,  0.18629018,  0.30017151, -0.04217922,
            -0.46066699, -0.33612491, -0.52611772, -0.25397362, -0.27198468,
            -0.42883518],
           [ 0.4710195 ,  0.35047152, -0.07990029,  0.34911632,  0.07206932,
            -0.20270895, -0.0684226 , -0.18958745, -0.08433092,  0.14453963,
             0.28095469, -0.35894296,  0.11680455, -0.37598039, -0.28331446,
            -0.00825299],
           [-0.420528  , -0.74469306,  0.22732681,  0.34362884,  0.16006124,
            -0.29691759,  0.27029047, -0.31077084, -0.048071  ,  0.36495065,
             0.49364453, -0.16903801,  0.07577839, -0.36492748,  0.09448284,
            -0.37055486],
           [ 0.4232946 , -0.26373387, -0.01430445, -0.2353587 , -0.5005603 ,
            -0.35899458,  0.32702596, -0.38311949,  0.31862621, -0.31931012,
            -0.41836583, -0.02855145, -0.50315227, -0.34807958, -0.05252361,
             0.11551424],
           [-0.28443208,  0.07677476, -0.23720305,  0.11056299, -0.48742565,
             0.36772457, -0.56074202,  0.3145033 , -0.22811763,  0.36482173,
            -0.01786535, -0.02929555,  0.35635411,  0.45838473,  0.45853286,
             0.00159594],
           [-0.45779277,  0.10020579, -0.30873257,  0.28114072,  0.18120182,
             0.33333004,  0.17928387,  0.31572323,  0.32902088, -0.10396976,
            -0.33296829,  0.05277326,  0.27139148,  0.18653329,  0.06068255,
            -0.01942451],
           [ 0.06569833, -0.04065228, -0.44669538, -0.17501657, -0.29450165,
             0.32483427, -0.55889145, -0.34973144, -0.35647584, -0.41601239,
            -0.07926316, -0.26784983,  0.14952119,  0.19082353, -0.51309079,
             0.6416559 ]])
            y = np.asarray([
               [ 0.15809895],
               [ 0.69496971],
               [ 0.01214928],
               [-0.39826324],
               [-0.01682498],
               [-0.03372654],
               [-0.45148804],
               [ 0.21735376],
               [ 0.08795349],
               [-0.27022239]])
            weight_ridge = np.asarray([
               [ 0.038558  ],
               [ 0.12605106],
               [ 0.19115798],
               [ 0.07187217],
               [ 0.09472713],
               [ 0.14943554],
               [-0.01968095],
               [ 0.11695959],
               [ 0.15049031],
               [ 0.18930644],
               [ 0.26086626],
               [ 0.23243305],
               [ 0.17425178],
               [ 0.13200238],
               [ 0.11710994],
               [ 0.11272092]])
            weight_l1 = np.asarray([
               [ 0.        ],
               [ 0.02664519],
               [ 0.        ],
               [ 0.        ],
               [ 0.        ],
               [ 0.10357106],
               [ 0.        ],
               [ 0.2103012 ],
               [ 0.00399881],
               [ 0.10815184],
               [ 0.32221254],
               [ 0.49350083],
               [ 0.21351531],
               [ 0.        ],
               [ 0.        ],
               [ 0.        ]])

        ret_data = {}
        ret_data['X'] = X
        ret_data['y'] = y
        ret_data['weight_ridge'] = weight_ridge
        ret_data['weight_l1'] = weight_l1
        ret_data['coefficient'] = coefficient
        ret_data['shape'] = shape
        ret_data['num_samples'] = num_samples
        ret_data['num_ft'] = num_ft

        return ret_data
         'lambda1' : 0.05, 'it0' : 10, 'max_it' : 200,
         'L0' : 0.1, 'tol' : 1e-3, 'intercept' : False,
         'pos' : False}
np.random.seed(0)
m = 100;n = 200
X = np.asfortranarray(np.random.normal(size = (m,n)))

X = np.asfortranarray(X - np.tile(np.mean(X,0),(X.shape[0],1)))
##
X1 = X.reshape(m * n)
f = open('datax','w')
for x in X1:
    print >> f,"%f" %x
f.close()
##
X = spams.normalize(X)
Y = np.asfortranarray(np.random.normal(size = (m,1)))
##
X1 = Y.reshape(m)
f = open('datay','w')
for x in X1:
    print >> f,"%f" %x
f.close()
##
Y = np.asfortranarray(Y - np.tile(np.mean(Y,0),(Y.shape[0],1)))
Y = spams.normalize(Y)
W0 = np.zeros((X.shape[1],Y.shape[1]),dtype=np.float64,order="FORTRAN")
param['compute_gram'] = True
param['verbose'] = True
param['loss'] = 'square'
param['regul'] = 'l1'
Ejemplo n.º 13
0
    'tol': 1e-3,
    'intercept': False,
    'pos': False
}

np.random.seed(0)

m = 100
n = 200

X = np.asfortranarray(np.random.normal(size=(m, n)))

X = np.asfortranarray(X - np.tile(np.mean(X, 0), (X.shape[0], 1)),
                      dtype=myfloat)

X = spams.normalize(X)

Y = np.asfortranarray(np.random.normal(size=(m, 1)))

Y = np.asfortranarray(Y - np.tile(np.mean(Y, 0), (Y.shape[0], 1)),
                      dtype=myfloat)

Y = spams.normalize(Y)

W0 = np.zeros((X.shape[1], Y.shape[1]), dtype=myfloat, order="FORTRAN")

# Regression experiments

# 100 regression problems with the same design matrix X.

print('\nVarious regression experiments')