#Xtotal,indices=Priberam_Ridge.tira_meta(Xtotal,indices) pdb.set_trace() trainsize=int(Xtotal.shape[0]*0.8) devsize=int(Xtotal.shape[0]*0.1)+trainsize Xtotal=Xtotal.tocsc() #Xtotal,indices=Priberam_Ridge.repara(Xtotal,indices) #train_index=xrange(trainsize) #dev_index=xrange(trainsize,devsize) #test_index=xrange(devsize,Xtotal.shape[0]) #Xtrain,Ytrain,Xtest,Ytest,Xdev,Ydev=Priberam_Ridge.separaXY(Xtotal,Ytotal) vec=sparse.csr_matrix([0 for i in xrange(Xtotal.shape[1])]) vec=vec.transpose() Ytotal=Ytotal.tocsc() W,F,lamb=Lasso.lasso(Xtotal[:trainsize,:],Ytotal[0:trainsize,0],vec,Xtotal[devsize:,:],Ytotal[devsize:,0],Xtotal[trainsize:devsize,:],Ytotal[trainsize:devsize,0],False,False) #W,F,lamb=Lasso.lasso(Xtrain,Ytrain.transpose(),vec,Xdev,Ydev.transpose(),Xtest,Ytest.transpose()) #print "ERRO:",Rfechado.erro(Xtest,Ytest,W) print "ERRO:", Rfechado.erro(Xtotal[trainsize:devsize,:],Ytotal[trainsize:devsize,:],W) ''' for k in indices: if W[indices[k]]==0: print k ''' print "PIORES 10" for coiso in sorted(np.array(W))[:10]: i=0 while W[i,0] != coiso[0]: i+=1 else: for cenas in indices:
w3=[] w4=[] lambs=[0.1,1,10,10e2,10e3,10e4,10e5,10e6,10e7,10e8,10e9,10e10,10e11] xg=[np.log10(x) for x in lambs] ''' for lamb in lambs: w_estimado,yy3,xx3=grad.grad(Xtotal,Ytotal,vec,Xtotal,Ytotal,Xtotal,Ytotal,max_iter,lamb) w0.append(w_estimado.todense()[0,0]) w1.append(w_estimado.todense()[1,0]) w2.append(w_estimado.todense()[2,0]) w3.append(w_estimado.todense()[3,0]) w4.append(w_estimado.todense()[4,0]) print w_estimado ''' for lamb in lambs: w_estimado,yy3,xx3=lasso.lasso(Xtotal,Ytotal,vec,Xtotal,Ytotal,Xtotal,Ytotal,max_iter,lamb) w0.append(w_estimado[0,0]) w1.append(w_estimado[1,0]) w2.append(w_estimado[2,0]) w3.append(w_estimado[3,0]) w4.append(w_estimado[4,0]) print w_estimado import pylab import matplotlib.pyplot as plt plt.figure(1) ''' plt.title("Comparacao entre os 3 metodos") plt.plot(xx1,yy1,"b",xx2,yy2,"g",xx3,yy3,"r",xx1,[custo for cenas in xrange(len(xx1))],"k--") pylab.ylim([custo-1e14,custo+4e14]) plt.show()
import time as t import matplotlib.pyplot as plt sys.path.insert(0, '../kode') import lass_admm as jakob import Lasso as jonas n = 10000 p = int(n / 2) A = np.random.rand(p, n) x = np.zeros([n, 1]) x[0, 0] = 1 b = A.dot(x) rho = 1 lamda = 5 maxit = 1000 er = 10**(-9) es = 10**(-9) st = t.time() res_jonas = jonas.lasso(A, b, rho, lamda, maxit, er=er, es=es) print("Jonas' in:", t.time() - st) st = t.time() res_jakob = jakob.lass_admm(A, b, rho, lamda, maxit, er=er, es=es) print("Jakobs in:", t.time() - st) plt.stem(res_jonas[0][-1]) plt.figure(2) plt.stem(res_jakob[0][-1]) plt.show()
10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000, 10000 ] for n in logspace: print("n:", n) time_opt_temp = 0 time_non_temp = 0 it_opt_temp = 0 it_non_temp = 0 for i in range(q): A = np.random.rand(int(n / 2), n) # Creation of random matrix x = np.zeros([n, 1]) # Creation og wanted solution x x[0, 0] = 1 b = A.dot(x) # Resulting vector b res_optimal = lasso.lasso(A, b, rho, lamda, maxit, er=er, es=es) #res_nonoptimal = l1.l1_admm_nonoptimized(A,b,rho,maxit,er =er,es = es,quiet = True) time_opt_temp += res_optimal[5] #time_non_temp += res_nonoptimal[5] it_opt_temp += res_optimal[6] #it_non_temp += res_nonoptimal[6] time_opt.append(time_opt_temp / q) it_opt.append(it_opt_temp / q) #time_non.append(time_non_temp/q) #it_non.append(it_non_temp/q) count = len(time_opt) data = np.array([logspace[:count], time_opt, it_opt]) np.save('data/Avg_iteration_time_lasso.npy', data) fig, ax1 = plt.subplots()