Exemple #1
0
 def learning_updates(self):
     """
     Return updates in the training.
     """
     params = self.network.parameters
     gradients = T.grad(self.cost, params)
     return optimize_updates(params, gradients, self.config)
Exemple #2
0
 def optimization_updates(self, params, gradients):
     """
     Return updates from optimization.
     """
     updates, free_parameters = optimize_updates(params, gradients, self.config)
     self.network.free_parameters.extend(free_parameters)
     logging.info("Added %d free parameters for optimization" % len(free_parameters))
     return updates
Exemple #3
0
 def optimization_updates(self, params, gradients):
     """
     Return updates from optimization.
     """
     updates, free_parameters = optimize_updates(params, gradients, self.config)
     self.network.free_parameters.extend(free_parameters)
     logging.info("Added %d free parameters for optimization" % len(free_parameters))
     return updates
Exemple #4
0
 def learning_updates(self):
     """
     Return updates in the training.
     """
     params = self.network.parameters
     gradients = T.grad(self.cost, params)
     updates, free_parameters = optimize_updates(params, gradients, self.config)
     self.network.free_parameters.extend(free_parameters)
     logging.info("Added %d free parameters for optimization" % len(free_parameters))
     return updates
Exemple #5
0
 def learning_updates(self):
     """
     Return updates in the training.
     """
     params = self.network.parameters
     # Freeze parameters
     if self.config.freeze_params:
         logging.info("freeze parameters: %s" % ", ".join(map(str, self.config.freeze_params)))
         params = [p for p in params if p not in self.config.freeze_params]
     gradients = T.grad(self.cost, params)
     updates, free_parameters = optimize_updates(params, gradients, self.config)
     self.network.free_parameters.extend(free_parameters)
     logging.info("Added %d free parameters for optimization" % len(free_parameters))
     return updates
Exemple #6
0
 def learning_updates(self):
     """
     Return updates in the training.
     """
     params = self.network.parameters
     # Freeze parameters
     if self.config.freeze_params:
         logging.info("freeze parameters: %s" %
                      ", ".join(map(str, self.config.freeze_params)))
         params = [p for p in params if p not in self.config.freeze_params]
     gradients = T.grad(self.cost, params)
     updates, free_parameters = optimize_updates(params, gradients,
                                                 self.config)
     self.network.free_parameters.extend(free_parameters)
     logging.info("Added %d free parameters for optimization" %
                  len(free_parameters))
     return updates