def __init__(self,
              input_dim,
              target_tensor=2,
              clip_value=None,
              input_tensor=None):
     self.target_tensor = dim_to_var(
         target_tensor,
         "k") if type(target_tensor) == int else target_tensor
     self.clip_value = clip_value
     super(NeuralRegressor, self).__init__(input_dim,
                                           input_tensor=input_tensor)
Esempio n. 2
0
def optimize_function(params, config=None):
    """
    Create a optimizing function receives gradients.
    Parameters:
        params - parameters
        config - training configuration
    Returns:
        updating function receives gradients
    """
    gs = [dim_to_var(p.ndim) for p in params]
    updates, _ = optimize_updates(params, gs, config)
    return theano.function(gs, [], updates=updates)
Esempio n. 3
0
 def setup_variables(self):
     """
     Set up variables.
     """
     if self.input_tensor:
         if type(self.input_tensor) == int:
             x = dim_to_var(self.input_tensor, name="x")
         else:
             x = self.input_tensor
     else:
         x = T.matrix('x')
     self.input_variables.append(x)
     self._output = x
     self._test_output = x
Esempio n. 4
0
 def setup_variables(self):
     """
     Set up variables.
     """
     if self.input_tensor:
         if type(self.input_tensor) == int:
             x = dim_to_var(self.input_tensor, name="x")
         else:
             x = self.input_tensor
     else:
         x = T.matrix('x')
     self.input_variables.append(x)
     self._output = x
     self._test_output = x
Esempio n. 5
0
 def __init__(self, input_dim, target_tensor=2, clip_value=None, input_tensor=None):
     self.target_tensor = dim_to_var(target_tensor, "k") if type(target_tensor) == int else target_tensor
     self.clip_value = clip_value
     super(NeuralRegressor, self).__init__(input_dim, input_tensor=input_tensor)