Ejemplo n.º 1
0
 def do_train_sgd(self, data, label, num_data, batch_size,
                  learning_rate_array):
     return sgd_train(model=self,
                      data=data,
                      label=label,
                      num_data=num_data,
                      batch_size=batch_size,
                      learning_rate_array=learning_rate_array)
 def do_log_layer_training_only(self, data, label, num_data, batch_size, learning_rate_array, num_epochs=1):
     self.cost = self.logLayer.cost
     for i in range(num_epochs):
         print('epoch %d' % i)
         monitor_results = sgd_train(
             model=self, data=data, label=label,
             num_data=num_data, batch_size=batch_size,
             learning_rate_array=learning_rate_array,
             params=self.logLayer.params)
         for monitor_index, monitor in enumerate(self.monitors):
             print("\t%10s: %f" % (monitor.name, monitor_results[monitor_index]))
 def do_fine_tuning(self, data, label, num_data, batch_size, learning_rate_array, num_epochs=1):
     monitor_results = None
     self.cost = self.logLayer.cost
     for i in range(num_epochs):
         self.logger.info('epoch %d' % i)
         monitor_results = sgd_train(
             model=self, data=data, label=label,
             num_data=num_data, batch_size=batch_size,
             learning_rate_array=learning_rate_array)
         for monitor_index, monitor in enumerate(self.monitors):
             self.logger.info("\t%10s: %f" % (monitor.name, monitor_results[monitor_index]))
     return monitor_results
 def do_pretraining(self, data, num_data, batch_size, learning_rate_array, num_epochs=1):
     for i in range(len(self.dA_layers)):
         # Training auto-encoder layer by layer
         self.logger.info("Pre-training layer %d" % i)
         self.cost = self.dA_layers[i].cost
         for j in range(num_epochs):
             self.logger.info("Epoch %d" % j)
             monitor_results = sgd_train(
                 model=self, data=data, label=None,
                 num_data=num_data, batch_size=batch_size,
                 learning_rate_array=learning_rate_array,
                 params=self.dA_layers[i].params,
                 monitors=self.pretrain_monitor_array[i])
             for monitor_index, monitor in enumerate(self.monitors):
                 self.logger.info("\t%10s: %f" % (monitor.name, monitor_results[monitor_index]))
     return
Ejemplo n.º 5
0
 def do_training_sgd(self,
                     data,
                     label,
                     num_data,
                     batch_size,
                     learning_rate_array,
                     num_epochs=1,
                     prevalence=None):
     """
     Use Stochastic Gradient Descent to train the model
     :param data: Training Data Tensor
     :param label: Corresponding Class Tensor
     :param num_data: The total amount of data
     :param batch_size: The size per batch
     :param learning_rate_array: Learning Rate for each parameter
     :param num_epochs: Epochs
     :param prevalence: inverse of Percentage of each class in the training samples
     :return: None
     """
     if prevalence is not None:
         self.logRegressionLayer.prevalence = prevalence
     for i in range(num_epochs):
         self.logger.info("Epoch %d" % i)
         monitor_results = sgd_train(
             model=self,
             data=data,
             label=label,
             num_data=num_data,
             batch_size=batch_size,
             learning_rate_array=learning_rate_array)
         for monitor_index, monitor in enumerate(self.monitors):
             self.logger.info(
                 "\t%10s: %f" %
                 (monitor.name, monitor_results[monitor_index]))
     self.logRegressionLayer.prevalence = np.ones(
         (self.logRegressionLayer.W.get_value().shape[1], ),
         dtype=theano.config.floatX)