Exemplo n.º 1
0
class RBMTraining:
	def __init__(self, data_path="./datasets/", save_path="training.pkl", simulation_data = None, identifier = 0):
		self.id = identifier
		self.data_path = data_path
		self.save_path = save_path
		if simulation_data != None:
			self.sim_data = simulation_data
			self.save_data_loaded()
		else:
			self.sim_data = SimulationData(data_path)
			self.load_data()
		
	def load_data(self):
		self.sim_data.load_data()
		self.sim_data.preprocessor() 

		tmp = self.sim_data.split_train_test()
		self.datasets = {'train' : tmp[0], 'test' : tmp[1]}

		self.num_simulations = self.sim_data.num_simulations
		self.input_values = self.sim_data.input_values
		self.output_values = self.sim_data.output_values

	def set_structure(self, num_layers = 4, shape = 'linear'):
		self.vis = self.input_values
		self.hid = self.output_values
		return [self.vis, self.hid]
		
		   
	def get_model(self):
		self.model = RBM(nvis=self.vis, nhid=self.hid, irange=.05)
		return self.model
	   
	def set_training_criteria(self, 
							learning_rate=0.05,
							batch_size=10, 
							max_epochs=10):
		
		self.training_alg = DefaultTrainingAlgorithm(batch_size = batch_size, 
													monitoring_dataset = self.datasets, 
													termination_criterion = EpochCounter(max_epochs))
	
	def set_extensions(self, extensions=None):
		self.extensions = None #[MonitorBasedSaveBest(channel_name='objective',
												#save_path = './training/training_monitor_best.pkl')]
		
	def set_attributes(self, attributes):
		self.attributes = attributes

	def define_training_experiment(self, save_freq = 10):
		self.experiment = Train(dataset=self.datasets['train'], 
								model=self.model, 
								algorithm=self.training_alg, 
								save_path=self.save_path , 
								save_freq=save_freq, 
								allow_overwrite=True, 
								extensions=self.extensions)

	def train_experiment(self):
		self.experiment.main_loop()
Exemplo n.º 2
0
    def __init__(self, data_path='./datasets/', save_path='./training/'):
        super(Experiment, self).__init__()
        self.data_path = data_path
        self.save_path = save_path

        # Save the different experiments into an array
        self.experiments = []

        self.sim_data = SimulationData(data_path)
Exemplo n.º 3
0
	def __init__(self, data_path="./datasets/", save_path="training.pkl", simulation_data = None, identifier = 0):
		self.id = identifier
		self.data_path = data_path
		self.save_path = save_path
		if simulation_data != None:
			self.sim_data = simulation_data
			self.save_data_loaded()
		else:
			self.sim_data = SimulationData(data_path)
			self.load_data()
Exemplo n.º 4
0
def train_model():
    global ninput, noutput
    simdata = SimulationData(
        sim_path="../../javaDataCenter/generarDadesV1/CA_SDN_topo1/")
    simdata.load_data()
    simdata.preprocessor()
    dataset = simdata.get_matrix()

    structure = get_structure()
    layers = []
    for pair in structure:
        layers.append(get_autoencoder(pair))

    model = DeepComposedAutoencoder(layers)
    training_alg = SGD(learning_rate=1e-3,
                       cost=MeanSquaredReconstructionError(),
                       batch_size=1296,
                       monitoring_dataset=dataset,
                       termination_criterion=EpochCounter(max_epochs=50))
    extensions = [MonitorBasedLRAdjuster()]
    experiment = Train(dataset=dataset,
                       model=model,
                       algorithm=training_alg,
                       save_path='training2.pkl',
                       save_freq=10,
                       allow_overwrite=True,
                       extensions=extensions)
    experiment.main_loop()
Exemplo n.º 5
0
	def __init__(self, data_path='./datasets/', save_path='./training/'):
		super(Experiment, self).__init__()
		self.data_path = data_path
		self.save_path = save_path

		# Save the different experiments into an array
		self.experiments = []

		self.sim_data = SimulationData(data_path)
Exemplo n.º 6
0
	def __init__(self, data_path="./datasets/", save_path="training.pkl", simulation_data = None, identifier = 0):
		self.id = identifier
		self.data_path = data_path
		self.save_path = save_path
		if simulation_data != None:
			self.sim_data = simulation_data
			self.save_data_loaded()
		else:
			self.sim_data = SimulationData(data_path)
			self.load_data()
Exemplo n.º 7
0
    def __init__(self,
                 data_path="./datasets/",
                 save_path="training.pkl",
                 simulation_data=None,
                 identifier=0,
                 preprocessor='uniform'):
        self.id = identifier
        self.data_path = data_path
        self.save_path = save_path
        if simulation_data != None:
            self.sim_data = simulation_data
        else:
            self.sim_data = SimulationData(data_path)
        if not self.sim_data.is_loaded:
            self.sim_data.load_data()

        self.sim_data.preprocessor(kind=preprocessor)

        tmp = self.sim_data.split_train_test()
        self.datasets = {'train': tmp[0], 'test': tmp[1]}

        self.num_simulations = self.sim_data.num_simulations
        self.input_values = self.sim_data.input_values
        self.output_values = self.sim_data.output_values
Exemplo n.º 8
0
	def __init__(self, data_path="./datasets/", save_path="training.pkl", simulation_data = None, identifier = 0, preprocessor='uniform'):
		self.id = identifier
		self.data_path = data_path
		self.save_path = save_path
		if simulation_data != None:
			self.sim_data = simulation_data
		else:
			self.sim_data = SimulationData(data_path)
		if not self.sim_data.is_loaded:
			self.sim_data.load_data()

		self.sim_data.preprocessor(kind = preprocessor)

		tmp = self.sim_data.split_train_test()
		self.datasets = {'train' : tmp[0], 'test' : tmp[1]}

		self.num_simulations = self.sim_data.num_simulations
		self.input_values = self.sim_data.input_values
		self.output_values = self.sim_data.output_values
Exemplo n.º 9
0
def train_model():
	global ninput, noutput
	simdata = SimulationData(sim_path="../../javaDataCenter/generarDadesV1/CA_SDN_topo1/")
	simdata.load_data()
	simdata.preprocessor() 
	dataset = simdata.get_matrix()
	
	structure = get_structure()
	layers = []
	for pair in structure:
		layers.append(get_autoencoder(pair))
	  
	model = DeepComposedAutoencoder(layers)
	training_alg = SGD(learning_rate=1e-3, cost=MeanSquaredReconstructionError(), batch_size=1296, monitoring_dataset=dataset , termination_criterion=EpochCounter(max_epochs=50))
	extensions = [MonitorBasedLRAdjuster()]
	experiment = Train(dataset=dataset , model=model, algorithm=training_alg, save_path='training2.pkl' , save_freq=10, allow_overwrite=True, extensions=extensions)
	experiment.main_loop()
Exemplo n.º 10
0
import theano
from numpy import mean, square
import numpy as np
from pylearn2.utils import serial
from datasets.simulation_data import SimulationData
import matplotlib.pyplot as plt
import scipy

sim = SimulationData()
sim.load_data()
sim.preprocessor()
[train, test] = sim.split_train_test()

dataset = sim.data

x = np.array([])
y = np.array([])
plt.figure()

plt.hist(sim.data.X[0])
plt.show()
# for i in range(len(sim.data.X)):
# 	plt.hist(sim.data.X[i])
# 	x = np.append(x, sim.data.X[i])
# 	y = np.append(y, sim.data.X[i])


# plt.hist(x)
# plt.show()
Exemplo n.º 11
0
class MLPTraining:
    def __init__(self,
                 data_path="./datasets/",
                 save_path="training.pkl",
                 simulation_data=None,
                 identifier=0,
                 preprocessor='uniform'):
        self.id = identifier
        self.data_path = data_path
        self.save_path = save_path
        if simulation_data != None:
            self.sim_data = simulation_data
        else:
            self.sim_data = SimulationData(data_path)
        if not self.sim_data.is_loaded:
            self.sim_data.load_data()

        self.sim_data.preprocessor(kind=preprocessor)

        tmp = self.sim_data.split_train_test()
        self.datasets = {'train': tmp[0], 'test': tmp[1]}

        self.num_simulations = self.sim_data.num_simulations
        self.input_values = self.sim_data.input_values
        self.output_values = self.sim_data.output_values

    def set_structure(self, num_layers=4, shape='linear'):
        structure = []

        lower_number = self.input_values
        for i in range(num_layers):
            upper_number = lower_number
            lower_number = self.input_values - (i + 1) * (
                self.input_values - self.output_values) / num_layers
            structure.append([upper_number, lower_number])

        self.structure = structure
        return structure

    def get_structure(self):
        return self.structure

    def get_Linear_Layer(self, structure, i=0):
        n_input, n_output = structure
        config = {
            'dim': n_output,
            'layer_name': ("l%d" % i),
            'irange': .5,
            'use_abs_loss': False,
            'use_bias': False,
        }
        return Linear(**config)

    def get_Sigmoid_Layer(self, structure, i=0):
        n_input, n_output = structure
        config = {
            'dim': n_output,
            'layer_name': ("s%d" % i),
            'irange': 0.05,
        }
        return Sigmoid(**config)

    def get_Tanh_Layer(self, structure, i=0):
        n_input, n_output = structure
        config = {
            'dim': n_output,
            'layer_name': ("t%d" % i),
            'irange': 0.05,
        }
        return Tanh(**config)

    def get_layers(self, act_function='linear'):
        self.layers = []
        i = 0
        for pair in self.structure:
            i += 1
            if (act_function == 'linear'):
                self.layers.append(self.get_Linear_Layer(structure=pair, i=i))
            if (act_function == 'sigmoid'):
                self.layers.append(self.get_Sigmoid_Layer(structure=pair, i=i))
            if (act_function == 'tanh'):
                self.layers.append(self.get_Tanh_Layer(structure=pair, i=i))
        return self.layers

    def get_model(self, batch_size):
        vis = self.structure[0][0]
        self.model = MLP(layers=self.layers,
                         nvis=vis,
                         batch_size=batch_size,
                         layer_name=None)
        return self.model

    def set_training_criteria(self,
                              learning_rate=0.05,
                              cost=Default(),
                              batch_size=10,
                              max_epochs=10):

        self.training_alg = SGD(learning_rate=learning_rate,
                                cost=cost,
                                batch_size=batch_size,
                                monitoring_dataset=self.datasets,
                                termination_criterion=EpochCounter(max_epochs))

    def set_extensions(self, extensions):
        self.extensions = extensions  #[MonitorBasedSaveBest(channel_name='objective',
        #save_path = './training/training_monitor_best.pkl')]

    def set_attributes(self, attributes):
        self.attributes = attributes

    def define_training_experiment(self, save_freq=10):
        self.experiment = Train(dataset=self.datasets['train'],
                                model=self.model,
                                algorithm=self.training_alg,
                                save_path=self.save_path,
                                save_freq=save_freq,
                                allow_overwrite=True,
                                extensions=self.extensions)

    def train_experiment(self):
        self.experiment.main_loop()
        self.save_model()

    def save_model(self):
        self.model = serial.load(self.save_path)

    def predict(self, test=None, X=None, y=None):
        if test != None:
            x_test = test.X
            y_test = test.y
        else:
            x_test = X
            y_test = y

        X = self.model.get_input_space().make_theano_batch()
        Y = self.model.fprop(X)
        f = theano.function([X], Y)

        y_pred = f(x_test)

        if y_test != None:
            MSE = np.mean(np.square(y_test - y_pred))
            print "MSE:", MSE
            var = np.mean(np.square(y_test))
            print "Var:", var
            self.plot_prediction(y_test, y_pred)
        else:
            return y_pred

    def plot_prediction(self, y_test, y_pred):
        m = int(np.sqrt(self.output_values)) + 1
        f, axarr = plt.subplots(m, m)

        r = []
        s = []
        f = 0
        c = 0
        for i in range(self.output_values):
            x = np.array([])
            y = np.array([])
            for j in range(len(y_test)):
                x = np.append(x, y_test[j][i])
                y = np.append(y, y_pred[j][i])

            slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(
                x, y)
            r.append(r_value**2)
            axarr[f, c].plot(x, y, 'ro')
            c += 1
            if (c == m):
                c = 0
                f += 1

        plt.show()
Exemplo n.º 12
0
class EncoderTraining:
	def __init__(self, data_path="./datasets/", save_path="training.pkl", simulation_data = None, identifier = 0):
		self.id = identifier
		self.data_path = data_path
		self.save_path = save_path
		if simulation_data != None:
			self.sim_data = simulation_data
			self.save_data_loaded()
		else:
			self.sim_data = SimulationData(data_path)
			self.load_data()
		
	def load_data(self):
		self.sim_data.load_data()
		self.sim_data.preprocessor()
		self.save_data_loaded()

	def save_data_loaded(self):
		self.data_matrix = self.sim_data.get_matrix()
		self.num_simulations = self.sim_data.num_simulations
		self.input_values = self.sim_data.input_values
		self.output_values = self.sim_data.output_values

	def set_structure(self, num_layers = 4, shape = 'linear'):
		structure = []

		lower_number = self.input_values
		for i in range(num_layers):
			upper_number = lower_number
			lower_number = self.input_values-(i+1)*(self.input_values-self.output_values)/num_layers
			structure.append([upper_number, lower_number])
		
		self.structure = structure
		return structure
		
	def get_structure(self):
		return self.structure
		
	def get_autoencoder(self, structure, encoder='sigmoid'):
		n_input, n_output = structure
		config = {
			'nvis': n_input,
			'nhid': n_output,
			'act_enc': encoder,
			'act_dec': encoder,
			"irange" : 0.05,
			}
		return Autoencoder(**config)
		
	def get_layers(self, encoder='tanh'):
		self.layers = []
		for pair in self.structure:
			self.layers.append(self.get_autoencoder(structure = pair, encoder=encoder))
		return self.layers
		   
	def get_model(self):
		self.model = DeepComposedAutoencoder(self.layers)
		return self.model
	   
	def set_training_criteria(self, 
							learning_rate=0.05, 
							cost=MeanSquaredReconstructionError(), 
							batch_size=10, 
							max_epochs=10):
		dataset = self.data_matrix
		self.training_alg = SGD(learning_rate = learning_rate, 
								cost = cost, 
								batch_size = batch_size, 
								monitoring_dataset = dataset, 
								termination_criterion = EpochCounter(max_epochs))
	
	def set_extensions(self, extensions=None):
		self.extensions = [MonitorBasedSaveBest(channel_name='objective',
												save_path = './training/training_monitor_best.pkl')]
		
	def set_attributes(self, attributes):
		self.attributes = attributes

	def define_training_experiment(self, save_freq = 10):
		self.experiment = Train(dataset=self.data_matrix, 
								model=self.model, 
								algorithm=self.training_alg, 
								save_path=self.save_path , 
								save_freq=save_freq, 
								allow_overwrite=True, 
								extensions=self.extensions)

	def train_experiment(self):
		self.experiment.main_loop()
Exemplo n.º 13
0
class EncoderTraining:
	def __init__(self, data_path="./datasets/", save_path="training.pkl", simulation_data = None, identifier = 0):
		self.id = identifier
		self.data_path = data_path
		self.save_path = save_path
		if simulation_data != None:
			self.sim_data = simulation_data
			self.save_data_loaded()
		else:
			self.sim_data = SimulationData(data_path)
			self.load_data()
		
	def load_data(self):
		self.sim_data.load_data()
		self.sim_data.preprocessor()

		tmp = self.sim_data.split_train_test()
		self.datasets = {'train' : tmp[0], 'test' : tmp[1]}

		self.num_simulations = self.sim_data.num_simulations
		self.input_values = self.sim_data.input_values
		self.output_values = self.sim_data.output_values

	def set_structure(self, num_layers = 4, shape = 'linear'):
		structure = []

		lower_number = self.input_values
		for i in range(num_layers):
			upper_number = lower_number
			lower_number = self.input_values-(i+1)*(self.input_values-self.output_values)/num_layers
			structure.append([upper_number, lower_number])
		
		self.structure = structure
		return structure
		
	def get_structure(self):
		return self.structure
		
	def get_autoencoder(self, structure, act_function='sigmoid'):
		n_input, n_output = structure
		config = {
			'nvis': n_input,
			'nhid': n_output,
			'act_enc': act_function,
			'act_dec': act_function,
			"irange" : 0.05,
			}
		return Autoencoder(**config)
		
	def get_layers(self, act_function='tanh'):
		self.layers = []
		for pair in self.structure:
			self.layers.append(self.get_autoencoder(structure = pair, act_function=act_function))
		return self.layers
		   
	def get_model(self):
		self.model = DeepComposedAutoencoder(self.layers)
		return self.model
	   
	def set_training_criteria(self, 
							learning_rate=0.05, 
							cost=MeanSquaredReconstructionError(), 
							batch_size=10, 
							max_epochs=10):
		
		self.training_alg = SGD(learning_rate = learning_rate, 
								cost = cost, 
								batch_size = batch_size, 
								monitoring_dataset = self.datasets, 
								termination_criterion = EpochCounter(max_epochs))
	
	def set_extensions(self, extensions=None):
		self.extensions = [MonitorBasedSaveBest(channel_name='test_objective',
												save_path = './training/training_monitor_best.pkl')]
		
	def set_attributes(self, attributes):
		self.attributes = attributes

	def define_training_experiment(self, save_freq = 10):
		self.experiment = Train(dataset=self.datasets['train'], 
								model=self.model, 
								algorithm=self.training_alg, 
								save_path=self.save_path , 
								save_freq=save_freq, 
								allow_overwrite=True, 
								extensions=self.extensions)

	def train_experiment(self):
		self.experiment.main_loop()

	def computeMSE(self):
		model = serial.load('./training/training_monitor_best.pkl')
		X=model.get_input_space().make_theano_batch()
		Y=model.encode(X)
		f=theano.function([X], Y)
		x_test = self.datasets['test'].X
		y_test = self.datasets['test'].y
		y_pred = f(x_test)

		MSE = mean(square(y_test - y_pred))
		print MSE
Exemplo n.º 14
0
class MLPTraining:
	def __init__(self, data_path="./datasets/", save_path="training.pkl", simulation_data = None, identifier = 0, preprocessor='uniform'):
		self.id = identifier
		self.data_path = data_path
		self.save_path = save_path
		if simulation_data != None:
			self.sim_data = simulation_data
		else:
			self.sim_data = SimulationData(data_path)
		if not self.sim_data.is_loaded:
			self.sim_data.load_data()

		self.sim_data.preprocessor(kind = preprocessor)

		tmp = self.sim_data.split_train_test()
		self.datasets = {'train' : tmp[0], 'test' : tmp[1]}

		self.num_simulations = self.sim_data.num_simulations
		self.input_values = self.sim_data.input_values
		self.output_values = self.sim_data.output_values

	def set_structure(self, num_layers = 4, shape = 'linear'):
		structure = []

		lower_number = self.input_values
		for i in range(num_layers):
			upper_number = lower_number
			lower_number = self.input_values-(i+1)*(self.input_values-self.output_values)/num_layers
			structure.append([upper_number, lower_number])
		
		self.structure = structure
		return structure
		
	def get_structure(self):
		return self.structure
		
	def get_Linear_Layer(self, structure, i = 0):
		n_input, n_output = structure
		config = {
			'dim': n_output,
			'layer_name': ("l%d" % i),
			'irange': .5,
			'use_abs_loss': False,
			'use_bias': False,
			}
		return Linear(**config)

	def get_Sigmoid_Layer(self, structure, i = 0):
		n_input, n_output = structure
		config = {
			'dim': n_output,
			'layer_name': ("s%d" % i),
			'irange' : 0.05,
			}
		return Sigmoid(**config)

	def get_Tanh_Layer(self, structure, i = 0):
		n_input, n_output = structure
		config = {
			'dim': n_output,
			'layer_name': ("t%d" % i),
			'irange' : 0.05,
			}
		return Tanh(**config)
		
	def get_layers(self, act_function='linear'):
		self.layers = []
		i = 0
		for pair in self.structure:
			i += 1
			if(act_function == 'linear'):
				self.layers.append(self.get_Linear_Layer(structure = pair, i = i))
			if(act_function == 'sigmoid'):
				self.layers.append(self.get_Sigmoid_Layer(structure = pair, i = i))
			if(act_function == 'tanh'):
				self.layers.append(self.get_Tanh_Layer(structure = pair, i = i))
		return self.layers
		   
	def get_model(self, batch_size):
		vis = self.structure[0][0]
		self.model = MLP(layers = self.layers, nvis = vis, batch_size = batch_size, layer_name = None)
		return self.model
	   
	def set_training_criteria(self, 
							learning_rate=0.05, 
							cost=Default(), 
							batch_size=10, 
							max_epochs=10):
		
		self.training_alg = SGD(learning_rate = learning_rate, 
								cost = cost, 
								batch_size = batch_size, 
								monitoring_dataset = self.datasets, 
								termination_criterion = EpochCounter(max_epochs))
	
	def set_extensions(self, extensions):
		self.extensions = extensions #[MonitorBasedSaveBest(channel_name='objective',
												#save_path = './training/training_monitor_best.pkl')]
		
	def set_attributes(self, attributes):
		self.attributes = attributes

	def define_training_experiment(self, save_freq = 10):
		self.experiment = Train(dataset=self.datasets['train'], 
								model=self.model, 
								algorithm=self.training_alg, 
								save_path=self.save_path , 
								save_freq=save_freq, 
								allow_overwrite=True, 
								extensions=self.extensions)

	def train_experiment(self):
		self.experiment.main_loop()
		self.save_model()

	def save_model(self):
		self.model = serial.load(self.save_path)
		
	def predict(self, test=None, X=None, y=None):
		if test != None:
			x_test = test.X
			y_test = test.y
		else:
			x_test = X
			y_test = y

		X=self.model.get_input_space().make_theano_batch()
		Y=self.model.fprop(X)
		f=theano.function([X], Y)

		y_pred = f(x_test)

		if y_test != None:
			MSE = np.mean(np.square(y_test - y_pred))
			print "MSE:", MSE
			var = np.mean(np.square(y_test))
			print "Var:", var
			self.plot_prediction(y_test, y_pred)
		else:
			return y_pred

	def plot_prediction(self, y_test, y_pred):
		m = int(np.sqrt(self.output_values)) + 1
		f, axarr = plt.subplots(m,m)

		r = []
		s = []
		f = 0;
		c = 0;
		for i in range(self.output_values):
			x = np.array([])
			y = np.array([])
			for j in range(len(y_test)):
				x = np.append(x, y_test[j][i])
				y = np.append(y, y_pred[j][i])

			slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(x, y)
			r.append(r_value**2)
			axarr[f,c].plot(x, y, 'ro')
			c += 1
			if (c==m):
				c = 0
				f += 1

		plt.show()
from datasets.simulation_data import SimulationData
import numpy as np
from numpy import mean, square
import matplotlib.pyplot as plt

s=SimulationData()
s.load_data()
tmp = s.split_train_test()

x_train = tmp[0].X
y_train = tmp[0].y
x_test = tmp[1].X
y_test = tmp[1].y
"""
Delete the columns of X (DenseDesignMatrix) which value is always 0
"""
X = x_train
Xt = x_test
for i in range(36):
	X = np.delete(X, i*36, 1)
	Xt = np.delete(Xt, i*36, 1)

"""
Compute:
		coeficients = inv(X'*X)*X'*y
"""
A = np.dot(np.transpose(X),X)
B = np.dot(np.linalg.inv(A),np.transpose(X))
coef = np.dot(B, y_train)

"""
Exemplo n.º 16
0
from numpy import mean, square
import numpy as np
from pylearn2.utils import serial
import matplotlib.pyplot as plt
import scipy

## Test MLP Training
identifier = 10002
num_layers = 1
learning_rate = 0.1
activation_function = 'linear'
batch_size = 10
epochs = 10
save_path = './training/training_linear_regressor_%d.pkl' % (identifier)

sim = SimulationData()
sim.load_data()
#sim.remove_input_zeros()
sim.preprocessor('uniform')

# Create the experiment
experiment = MLPTraining(save_path=save_path,
                         simulation_data=sim,
                         identifier=identifier,
                         preprocessor=None)

print experiment.sim_data.data.X.shape

# Set up the experiment
experiment.set_structure(num_layers=num_layers)
experiment.get_layers(act_function=activation_function)
Exemplo n.º 17
0
class RBMTraining:
    def __init__(self,
                 data_path="./datasets/",
                 save_path="training.pkl",
                 simulation_data=None,
                 identifier=0):
        self.id = identifier
        self.data_path = data_path
        self.save_path = save_path
        if simulation_data != None:
            self.sim_data = simulation_data
            self.save_data_loaded()
        else:
            self.sim_data = SimulationData(data_path)
            self.load_data()

    def load_data(self):
        self.sim_data.load_data()
        self.sim_data.preprocessor()

        tmp = self.sim_data.split_train_test()
        self.datasets = {'train': tmp[0], 'test': tmp[1]}

        self.num_simulations = self.sim_data.num_simulations
        self.input_values = self.sim_data.input_values
        self.output_values = self.sim_data.output_values

    def set_structure(self, num_layers=4, shape='linear'):
        self.vis = self.input_values
        self.hid = self.output_values
        return [self.vis, self.hid]

    def get_model(self):
        self.model = RBM(nvis=self.vis, nhid=self.hid, irange=.05)
        return self.model

    def set_training_criteria(self,
                              learning_rate=0.05,
                              batch_size=10,
                              max_epochs=10):

        self.training_alg = DefaultTrainingAlgorithm(
            batch_size=batch_size,
            monitoring_dataset=self.datasets,
            termination_criterion=EpochCounter(max_epochs))

    def set_extensions(self, extensions=None):
        self.extensions = None  #[MonitorBasedSaveBest(channel_name='objective',
        #save_path = './training/training_monitor_best.pkl')]

    def set_attributes(self, attributes):
        self.attributes = attributes

    def define_training_experiment(self, save_freq=10):
        self.experiment = Train(dataset=self.datasets['train'],
                                model=self.model,
                                algorithm=self.training_alg,
                                save_path=self.save_path,
                                save_freq=save_freq,
                                allow_overwrite=True,
                                extensions=self.extensions)

    def train_experiment(self):
        self.experiment.main_loop()
Exemplo n.º 18
0
class Experiment(object):
    """docstring for Experiment"""
    def __init__(self, data_path='./datasets/', save_path='./training/'):
        super(Experiment, self).__init__()
        self.data_path = data_path
        self.save_path = save_path

        # Save the different experiments into an array
        self.experiments = []

        self.sim_data = SimulationData(data_path)

    def load_data(self):
        """
		Load data and store it once to be accessible for each experiment
		that is going to be run
		"""
        self.sim_data.load_data()
        self.sim_data.preprocessor()
        self.sim_data.save_data()

    def set_experiments(self, attributes=None):
        if attributes == None:
            self.experiments_arguments = generator()
        else:
            self.experiments_arguments = attributes

        i = 1000
        # for arg in self.experiments_arguments:
        # 	set_single_experiment(attributes = arg, id = i)
        #	i = i + 1
        self.set_single_experiment(attributes=self.experiments_arguments[869],
                                   identifier=i)

    def set_single_experiment(self,
                              num_layers=4,
                              learning_rate=0.05,
                              activation_function='tanh',
                              batch_size=10,
                              epochs=10,
                              attributes=None,
                              identifier=0):
        """
		Possible values for the inputs:
		- num_layers = 3 to 7
		- learning_rate = from 0.05 to 0.45 with jumps of 0.05
		- activation_function = tanh, logistic, sigmoideal
		- batch_size = 5 to 20 with jumps of 5
		- epochs = 5 to 20 with jumps of 5 epochs
		"""
        if (num_layers == None): num_layers = args[0]
        if (learning_rate == None): learning_rate = args[1]
        if (activation_function == None): activation_function = args[2]
        if (batch_size == None): batch_size = args[3]
        if (epochs == None): epochs = args[4]

        save_path = self.save_path + 'training_encoder_%d.pkl' % (identifier)
        experiment = EncoderTraining(data_path=self.data_path,
                                     save_path=save_path,
                                     simulation_data=self.sim_data,
                                     identifier=identifier)

        experiment.set_attributes(attributes)
        # Set up the experiment
        experiment.set_structure(num_layers=num_layers)
        experiment.get_layers(act_function=activation_function)
        experiment.get_model()
        experiment.set_training_criteria(learning_rate=learning_rate,
                                         batch_size=batch_size,
                                         max_epochs=epochs)
        experiment.set_extensions()
        experiment.define_training_experiment()

        self.experiments.append(experiment)

    def run_experiments(self):
        i = 0
        for exp in self.experiments:
            print("Running experiment ", i, ":")
            i = i + 1
            exp.train_experiment()
Exemplo n.º 19
0
import theano
from numpy import mean, square
import numpy as np
from pylearn2.utils import serial
from datasets.simulation_data import SimulationData
import matplotlib.pyplot as plt
import scipy

sim = SimulationData()
sim.load_data()

[train, test] = sim.split_train_test()

model = serial.load('./training/training_encoder_10001.pkl')
X=model.get_input_space().make_theano_batch()
Y=model.fprop(X)
f=theano.function([X], Y)
x_test = test.X
y_test = test.y
y_pred = f(x_test)

MSE = mean(square(y_test - y_pred))
print "MSE:", MSE
var = mean(square(y_test))
print "Var:", var


f, axarr = plt.subplots(8,8)
r = []
f = 0;
Exemplo n.º 20
0
class Experiment(object):
	"""docstring for Experiment"""
	def __init__(self, data_path='./datasets/', save_path='./training/'):
		super(Experiment, self).__init__()
		self.data_path = data_path
		self.save_path = save_path

		# Save the different experiments into an array
		self.experiments = []

		self.sim_data = SimulationData(data_path)

	def load_data(self):
		"""
		Load data and store it once to be accessible for each experiment
		that is going to be run
		"""
		self.sim_data.load_data()
		self.sim_data.preprocessor()
		self.sim_data.save_data()

	def set_experiments(self, attributes=None):
		if attributes == None:
			self.experiments_arguments = generator()
		else:
			self.experiments_arguments = attributes

		i = 1000
		# for arg in self.experiments_arguments:
		# 	set_single_experiment(attributes = arg, id = i)
		#	i = i + 1
		self.set_single_experiment(attributes=self.experiments_arguments[869], identifier = i)
		
	def set_single_experiment(self, 
							num_layers=4,
							learning_rate=0.05,
							activation_function='tanh',
							batch_size = 10,
							epochs = 10,
							attributes = None,
							identifier = 0):
		"""
		Possible values for the inputs:
		- num_layers = 3 to 7
		- learning_rate = from 0.05 to 0.45 with jumps of 0.05
		- activation_function = tanh, logistic, sigmoideal
		- batch_size = 5 to 20 with jumps of 5
		- epochs = 5 to 20 with jumps of 5 epochs
		"""
		if(num_layers == None): num_layers = args[0]
		if(learning_rate == None): learning_rate = args[1]
		if(activation_function == None): activation_function = args[2]
		if(batch_size == None): batch_size = args[3]
		if(epochs == None): epochs = args[4]

		save_path = self.save_path+'training_encoder_%d.pkl' % (identifier)
		experiment = EncoderTraining(data_path = self.data_path, 
									save_path = save_path, 
									simulation_data = self.sim_data,
									identifier = identifier)

		experiment.set_attributes(attributes)
		# Set up the experiment
		experiment.set_structure(num_layers = num_layers)
		experiment.get_layers(act_function = activation_function)
		experiment.get_model()
		experiment.set_training_criteria(learning_rate = learning_rate,
								batch_size = batch_size,
								max_epochs = epochs)
		experiment.set_extensions()
		experiment.define_training_experiment()

		self.experiments.append(experiment)

	def run_experiments(self):
		i = 0
		for exp in self.experiments:
			print ("Running experiment ", i, ":")
			i = i + 1
			exp.train_experiment()
from datasets.simulation_data import SimulationData
import numpy as np
from numpy import mean, square
import matplotlib.pyplot as plt

s = SimulationData()
s.load_data()
tmp = s.split_train_test()

x_train = tmp[0].X
y_train = tmp[0].y
x_test = tmp[1].X
y_test = tmp[1].y
"""
Delete the columns of X (DenseDesignMatrix) which value is always 0
"""
X = x_train
Xt = x_test
for i in range(36):
    X = np.delete(X, i * 36, 1)
    Xt = np.delete(Xt, i * 36, 1)
"""
Compute:
		coeficients = inv(X'*X)*X'*y
"""
A = np.dot(np.transpose(X), X)
B = np.dot(np.linalg.inv(A), np.transpose(X))
coef = np.dot(B, y_train)
"""
Predict the from the test set of data
"""
Exemplo n.º 22
0
from numpy import mean, square
import numpy as np
from pylearn2.utils import serial
import matplotlib.pyplot as plt
import scipy

## Test MLP Training
identifier = 10002
num_layers = 1
learning_rate = 0.1
activation_function = 'linear'
batch_size = 10
epochs = 10
save_path = './training/training_linear_regressor_%d.pkl' % (identifier)

sim = SimulationData()
sim.load_data()
#sim.remove_input_zeros()
sim.preprocessor('uniform')

# Create the experiment
experiment = MLPTraining(save_path = save_path,
						simulation_data = sim,
						identifier = identifier,
						preprocessor = None)

print experiment.sim_data.data.X.shape

# Set up the experiment
experiment.set_structure(num_layers = num_layers)
experiment.get_layers(act_function=activation_function)