def startTraining(self):
        iterations_sum = 0
        loss_record_train = [] # each epoch
        loss_record_valid = []
        latest_loss = 99999
        max_iteration = math.inf

        if self.max_iteration is not None:
            max_iteration = self.max_iteration

        console.log('training', 'start training session...')
        console.log('training', str(len(self.input_data))+' training datasets. '+str(len(self.input_data_valid))+' validation datasets.')
        console.log('training', 'input shape: '+str(self.input_data.shape)+', output shape: '+str(self.output_data.shape))
        console.log('training', 'valid input shape: '+str(self.input_data_valid.shape)+', valid output shape: '+str(self.output_data_valid.shape))
        console.log('training', 'mini batch size: '+str(self.mini_batch_size))
        while(iterations_sum <= max_iteration and latest_loss > self.target_loss):
            # Do forward first reduce computation
            this_batch_input, this_batch_output = util.get_mini_batch_ramdom([self.input_data, self.output_data], self.mini_batch_size)

            # Do record of loss
            if iterations_sum%self.iterations_each_epoch == 0:
                # Do backup
                if (iterations_sum/self.iterations_each_epoch)%self.backup_intervel == 0:
                    nnio.save_neuralnet(self.neuralnet, self.backup_path+self.neuralnet.name)
                self.neuralnet.new_dropout(self.forward_config['dropout_keep'])
                train_loss = np.mean(self.loss_function(self.neuralnet.forward(this_batch_input, var.forward_config), this_batch_output))
                valid_loss = np.mean(self.loss_function(self.neuralnet.forward(self.input_data_valid), self.output_data_valid))
                loss_record_train.append(train_loss)
                loss_record_valid.append(valid_loss)
                if (iterations_sum/self.iterations_each_epoch)%self.verbose_interval == 0 and self.verbose:
                    self.dumpLog(iterations_sum/self.iterations_each_epoch, loss_record_train[-1], loss_record_valid[-1])

            self.neuralnet.forward(this_batch_input, self.forward_config)
            self.neuralnet.backward(this_batch_output, self.loss_function, self.backward_config)

            iterations_sum += 1
            # latest_loss = loss_record_train[-1]
            latest_loss = 999

        return loss_record_train, loss_record_valid
Exemple #2
0
    def startTraining(self):
        iterations_sum = 0
        loss_record_train = []  # each epoch
        loss_record_valid = []
        latest_loss = 99999
        max_iteration = np.inf

        if self.max_iteration is not None:
            max_iteration = self.max_iteration

        console.log('training', 'start training session...')
        console.log(
            'training',
            str(len(self.input_data)) + ' training datasets. ' +
            str(len(self.input_data_valid)) + ' validation datasets.')
        while (iterations_sum <= max_iteration
               and latest_loss > self.target_loss):
            # Do record of loss first
            if iterations_sum % self.iterations_each_epoch == 0:
                train_loss = np.mean(
                    self.loss_function(self.neuralnet.forward(self.input_data),
                                       self.output_data))
                valid_loss = np.mean(
                    self.loss_function(
                        self.neuralnet.forward(self.input_data_valid),
                        self.output_data_valid))
                loss_record_train.append(train_loss)
                loss_record_valid.append(valid_loss)
                if (iterations_sum / self.iterations_each_epoch
                    ) % self.verbose_interval == 0 and self.verbose:
                    console.log(
                        'training', 'epochs: ' +
                        str(iterations_sum / self.iterations_each_epoch) +
                        ', train_loss: ' + str(loss_record_train[-1]) +
                        ', valid_loss: ' + str(loss_record_valid[-1]))

            this_batch_input, this_batch_output = self.getRandomMiniBatch()
            self.neuralnet.forward(this_batch_input, trace=True)
            self.neuralnet.backward(this_batch_output, self.loss_function,
                                    self.learning_algorithm,
                                    self.learning_configuration)
            iterations_sum += 1
            latest_loss = loss_record_train[-1]

        return loss_record_train, loss_record_valid
import nodenet.layers as layers
import nodenet.functions as f
import nodenet.trainingsessions as sessions
import nodenet.interface.graph as graph
import nodenet.utilities as util
import nodenet.interface.console as console
import nodenet.io as nnio
import nodenet.variables as var

console.logo()

# Graphing test 1
fig = graph.Figure((2, 1))
datasets = util.get_sin_1x1_datasets(2000, noise=0.1)
datasets = util.cut_dataset_by_ratio_ramdom([datasets[0], datasets[1]])
console.log('tester', 'graphing test 1...', msg_color='Green')
fig.plot_2D(datasets[0].flatten(), datasets[1].flatten(), 0, 'graph of sin(x) and training result')
console.log('tester', 'graphing 1 passed.', msg_color='Red')

# NeuralNet test
console.log('tester', 'fullyconnectednet test...', msg_color='Green')
neuralnet = nn.SimpleContainer()
layers = [
    layers.Nodes1D(1, f.linear),
    layers.FullyConnected1D(1, 16),
    layers.Nodes1D(16, f.tanh),
    layers.FullyConnected1D(16, 16),
    layers.Nodes1D(16, f.tanh),
    layers.FullyConnected1D(16, 1),
    layers.Nodes1D(1, f.linear),
]
 def dumpLog(self, epochs, batch_train_loss, valid_loss):
     console.log('training', 'epochs: '+str(epochs)+', batch_train_loss: '+str(batch_train_loss)+', valid_loss: '+str(valid_loss))
import nodenet.neuralnets as nn
import nodenet.layers as layers
import nodenet.functions as f
import nodenet.trainingsessions as sessions
import nodenet.interface.graph as graph
import nodenet.utilities.datagenerator as datagen
import nodenet.utilities.commons as util
import nodenet.interface.console as console
import nodenet.io as nnio

# Graphing test 1
console.logo()
fig = graph.Figure((2, 1))
datasets = datagen.sin_1x1(1000)
datasets = util.cut_dataset_segment_to_validation([datasets[0], datasets[1]])
console.log('tester', 'graphing test 1...')
fig.plot_2D(datasets[0].flatten(), datasets[1].flatten(), 0,
            'graph of sin(x) and training result')
console.log('tester', 'graphing 1 passed.')

# NeuralNet test
console.log('tester', 'neuralnet test...')
neuralnet = nn.SimpleContainer()
layers = [
    layers.NodesVector(1, f.linear),
    layers.FullyConnected1D(1, 8),
    layers.NodesVector(8, f.tanh),
    layers.FullyConnected1D(8, 8),
    layers.NodesVector(8, f.tanh),
    layers.FullyConnected1D(8, 1),
    layers.NodesVector(1, f.linear),