Exemplo n.º 1
0
    #act = ['softplus']
    act = ['softplus','sigmoid','tanh']
    #act = ['softplus','softplus','softplus','softplus']
    
    test = autoencoder(units,act)
    
    
    test.generate_encoder()
    test.generate_decoder(act)
    
    #ts = test.init_network()
    #ba = batch.knn_batch(data,5)
    #ba.extend(batch.knn_batch(data,8))
    #ba.extend(batch.knn_batch(data,15))
    
    ba = batch.rand_batch(data,n_batch)
    #ba = batch.seq_batch(data,n_batch)
    
    print ts.run(test.layers[0].W)
    
    #test.pre_train(data)
    
    #print ts.run(test.layers[0].W)
    
    
    p = test.pre_train_rbm(data,n_iters=100)
    print p[1]
    print ts.run(test.layers[0].W)

    #test.train(data,batch=ba,display=True,n_iters=1000,noise=False,noise_level=0.25)
    
Exemplo n.º 2
0
if(options.class_label=='all'):
    data = arr
else:
    data = np.asarray([arr for (arr,lab) in zip(arr,lab) if(lab==int(options.class_label))]).astype("float32")

if(bool(options.rem_mean)):
    m_d = np.cumsum(data,axis=0)[-1]/data.shape[0]

    data = data-m_d

print options

auto = autoencoder(units,action)

auto.generate_encoder(euris=options.euris)
auto.generate_decoder(symmetric=options.symm)
if(options.pre_train == 'rbm'):
    auto.pre_train_rbm(data,n_iters=10,adapt_learn=True,learning_rate=float(options.pre_learn_rate))
elif(options.pre_train == 'standard'):
    auto.pre_train(data)
#auto.pre_train_rbm(data,n_iters=10,learning_rate=float(options.pre_learn_rate),adapt_learn=True)

if(not options.batch):
    bat = None
else:
    from tools.data_manipulation.batch import rand_batch
    bat = rand_batch(data,int(options.n_batch))


auto.train(data,n_iters=int(options.iters),record_weight=True,w_file=options.w_file,use_dropout=options.drop_out,keep_prob=k,reg_weight=options.reg_w,reg_lambda=options.reg_lambda,model_name=options.model_name,batch=bat,display=False,noise=True,gradient=options.gradient,learning_rate=float(options.learn_rate))
Exemplo n.º 3
0
import autoencoder
import math
import numpy as np
import sys
from tools.data_manipulation import batch

data = np.loadtxt("../datasets/multi_pie.dat")

data = data+abs(np.min(data))
data = data/np.max(data)
data = data.astype("float32")


bat = batch.rand_batch(data,1000)

#units = [data.shape[1],int(math.ceil(data.shape[1]*1.2))+5,int(max(math.ceil(data.shape[1]/4),int_dim+2)+3),
#         int(max(math.ceil(data.shape[1]/10),int_dim+1)),int_dim]

units = [5600,1100,200,int(sys.argv[1])]

act = ['sigmoid','sigmoid','sigmoid']
#act = ['relu','relu','relu','relu']
auto = autoencoder.autoencoder(units,act)

auto.generate_encoder(euris=True)
auto.generate_decoder(symmetric=True)
#auto.pre_train(data,n_iters=5000)

session = auto.init_network()

ic,bc = auto.train(data,n_iters=5000,record_weight=False,w_file='./pie_weights_20',use_dropout=True,keep_prob=0.5,reg_weight=False,reg_lambda=0.0,model_name=sys.argv[2],batch=bat,display=False,noise=False,gradient='adam',learning_rate=0.0000125)
Exemplo n.º 4
0
 #act = ['softplus']
 act = ['softplus','sigmoid','tanh']
 #act = ['softplus','softplus','softplus','softplus']
 
 test = autoencoder(units,act)
 
 
 test.generate_encoder()
 test.generate_decoder(act)
 
 ts = test.init_network()
 #ba = batch.knn_batch(data,5)
 #ba.extend(batch.knn_batch(data,8))
 #ba.extend(batch.knn_batch(data,15))
 
 ba = batch.rand_batch(data,n_batch)
 #ba = batch.seq_batch(data,n_batch)
 
 #print ts.run(test.layers[0].W)
 
 #test.pre_train(data)
 
 #print ts.run(test.layers[0].W)
 
 
 test.pre_train(data,n_iters=2000)
 
 test.train(data,batch=ba,display=True,n_iters=1000,noise=False,noise_level=0.25)