def learn(self): self.train_losses = [] self.validation_losses = None if self.validation_set is None else [] self.validation_accuracies = None if self.validation_set is None else [] xtrain = self.train_set ytrain = self.train_labels for i in range(Params.MAX_EPOCH): loss = Statistics.MSELoss() for i, x, y in zip(range(1, len(xtrain) + 1), xtrain, ytrain): o = [np.random.normal(0, 1) for _ in range(self.outputDim)] loss.update(o, y) self.train_losses.append(loss.get()) if self.validation_set is not None: loss = Statistics.MSELoss() accuracy = Statistics.MEELoss() for x, y in zip(self.validation_set, self.validation_labels): o = [np.random.normal(0, 1) for _ in range(self.outputDim)] loss.update(o, y) accuracy.update(o, y) self.validation_losses.append(loss.get()) self.validation_accuracies.append(accuracy.get())
def learn(self): self.buildGraph() self.train_losses = [] self.validation_losses = None if self.validation_set is None else [] self.validation_accuracies = None if self.validation_set is None else [] epoch = 0 xtrain = self.train_set ytrain = self.train_labels #~ print ("[DEBUG]len(xtrain): {}".format(len(xtrain))) #~ print ("[DEBUG]first 10 elements of xtrain: {}".format(xtrain[:10])) while (epoch < self.params.MAX_EPOCH): if self.params.ETA_DECAY: if epoch >= self.params.ETA_DECREASING_PERIOD * self.params.MAX_EPOCH: self.params.ETA = self.params.ETA_RANGE[0] else: self.params.ETA = self.params.ETA_RANGE[1] - ( self.params.ETA_RANGE[1] - self.params.ETA_RANGE[0] ) * (epoch / (self.params.MAX_EPOCH * self.params.ETA_DECREASING_PERIOD)) #~ print ("[DEBUG] epoch {} eta {}".format(epoch, self.params.ETA)) if self.params.MINIBATCH: xtrain, ytrain = parallel_shuffle(xtrain, ytrain) #~ print ("[DEBUG]after shuffling xtrain") #~ print ("[DEBUG]first 10 elements of xtrain: {}".format(xtrain[:10])) #~ print ("epoch {}".format(epoch)) loss = Statistics.MSELoss() self.normalization_factor = self.sum_weights() for i, x, y in zip(range(1, len(xtrain) + 1), xtrain, ytrain): self.fire_network(x) self.update_backpropagation(y) #~ print ("[DEBUG] after feeding with example {}".format(x)) #~ print ("[DEBUG] out is {}".format([neuron.getValue() for neuron in self.lista_neuroni[-1]])) #~ print ("[DEBUG] y is {}".format(y)) #~ input() loss.update( [neuron.getValue() for neuron in self.lista_neuroni[-1]], y) if self.params.MINIBATCH and i % self.params.MINIBATCH_SAMPLE == 0: #~ print ("epoch {} num samples {}: weights updating".format(epoch, i)) #~ print ("end epoch: {}".format(i==len(xtrain))) self.update_weights(self.params.MINIBATCH_SAMPLE, end_epoch=(i == len(xtrain))) #~ print ("after feeding") #~ self.dump() #~ input() if self.params.MINIBATCH: #~ print ("MINIBATCH: updating the weights at the end of the {} epoch, for the remaininng {} patterns".format(epoch, len(xtrain)%self.params.MINIBATCH_SAMPLE )) self.update_weights(len(xtrain) % self.params.MINIBATCH_SAMPLE) else: #~ print (" BATCH: updating the weights at the end of the {} epoch, for the remaininng {} patterns".format(epoch, len(xtrain))) self.update_weights(len(xtrain)) #~ print ("after weight update") #~ self.dump() #~ input() self.train_losses.append(loss.get()) if self.validation_set is not None: loss = Statistics.MSELoss() #~ accuracy = Statistics.MulticlassificationAccuracy () #~ accuracy = Statistics.Accuracy () #~ accuracy = Statistics.MSELoss () accuracy = Statistics.MEELoss() for x, y in zip(self.validation_set, self.validation_labels): self.fire_network(x) loss.update([ neuron.getValue() for neuron in self.lista_neuroni[-1] ], y) accuracy.update([ neuron.getValue() for neuron in self.lista_neuroni[-1] ], y) self.validation_losses.append(loss.get()) self.validation_accuracies.append(accuracy.get()) epoch += 1