示例#1
0
"""
class RnnRbm(RnnRbm):
    def __init__(self, n_hidden=150, n_hidden_recurrent=100, lr=0.001, dt=0.3):
        v, v_sample, cost, monitor, params, updates_train, v_t,updates_generate = build_rnnrbm(2, n_hidden, n_hidden_recurrent)
        self.dt = dt
        gradient = T.grad(cost, params, consider_constant=[v_sample])
        updates_train.update(((p, p - lr * g) for p, g in zip(params, gradient)))
        self.train_function = theano.function([v], monitor, updates=updates_train)
        self.generate_function = theano.function([], v_t, updates=updates_generate)
    def train(self, files, batch_size=100, num_epochs=200):

"""
###############################
####  Setting for dataset  ####
###############################
dataset = XOR(size=30, type='seq')
train_data = dataset.get_batch_design(10)
print len(train_data)

########################
####  Pre training  ####
########################
### First layer ###

rbm = RnnRbm(nvis=2, nhid=4, nhid_recurrent=3)
# rbm = RnnRbm(n_hidden=4, n_hidden_recurrent=3)
print ''

# for param in rbm.get_params():
#     print '------    ' + str(param) + '    -----'
#     print param.get_value()
示例#2
0
sys.path.append('/home/fujikawa/lib/python/other/pylearn2/pylearn2')
from pylearn2.models import mlp
from pylearn2.training_algorithms import sgd
from pylearn2.termination_criteria import EpochCounter
from pylearn2.train import Train
import numpy as np
from pylearn2.datasets.transformer_dataset import TransformerDataset
from pylearn2.models.autoencoder import DenoisingAutoencoder, HigherOrderContractiveAutoencoder
from pylearn2.corruption import BinomialCorruptor
import pylearn2.costs.autoencoder as cost_ae
from XOR import XOR

###############################
####  Setting for dataset  ####
###############################
dataset = XOR()

########################
####  Pre training  ####
########################
### First layer ###
dA_1 = DenoisingAutoencoder(BinomialCorruptor(corruption_level=.2), 2, 2, act_enc='sigmoid', act_dec='linear',
                 tied_weights=False)
train_1 = Train(
    dataset,
    dA_1,
    algorithm=sgd.SGD(learning_rate=.05, batch_size=10, termination_criterion=EpochCounter(30), cost=cost_ae.MeanSquaredReconstructionError(), monitoring_batches=5, monitoring_dataset=dataset)
)
train_1.main_loop()
dA_1_out = TransformerDataset(dataset, dA_1)
示例#3
0
# coding: utf-8
import theano, sys
sys.path.append('/home/fujikawa/lib/python/other/pylearn2/pylearn2')
from pylearn2.models import mlp
from pylearn2.training_algorithms import sgd
from pylearn2.termination_criteria import EpochCounter
from pylearn2.train import Train
import numpy as np
from XOR import XOR

###############################
####  Setting for dataset  ####
###############################
dataset = XOR()

##########################
####  Setting for NN  ####
##########################
# create layers
hidden_layer = mlp.Sigmoid(layer_name='hidden', dim=3, irange=.1, init_bias=1.)
output_layer = mlp.Softmax(2, 'output', irange=.1)
layers = [hidden_layer, output_layer]
model = mlp.MLP(layers, nvis=2)

####################
####  Training  ####
####################
train = Train(
	dataset,
	model,
	algorithm=sgd.SGD(learning_rate=.05, batch_size=10, termination_criterion=EpochCounter(400))