Example #1
0
 def add_param(self, param, name="", constraints=True,
               custom_update=None, custom_update_normalized=False, custom_update_exp_average=0,
               custom_update_condition=None, custom_update_accumulate_batches=None, live_update=None):
   """
   :type param: theano.SharedVariable
   :type name: str
   :rtype: theano.SharedVariable
   """
   param = super(Layer, self).add_param(param, name)
   param.live_update = live_update
   if custom_update:
     # Handled in Device and Updater.
     param.custom_update = custom_update
     param.custom_update_normalized = custom_update_normalized
     param.custom_update_exp_average = custom_update_exp_average
     param.custom_update_condition = custom_update_condition
     param.custom_update_accumulate_batches = custom_update_accumulate_batches
   if constraints:
     if 'L1' in self.attrs and self.attrs['L1'] > 0:
       self.constraints += T.constant(self.attrs['L1'], name="L1", dtype='floatX') * abs(param).sum()
     if 'L2' in self.attrs and self.attrs['L2'] > 0:
       self.constraints += T.constant(self.attrs['L2'], name="L2", dtype='floatX') * (param**2).sum()
     if self.attrs.get('L2_eye', 0) > 0:
       L2_eye = T.constant(self.attrs['L2_eye'], name="L2_eye", dtype='floatX')
       if param.ndim == 2:
         eye = tiled_eye(param.shape[0], param.shape[1], dtype=param.dtype)
         self.constraints += L2_eye * ((param - eye)**2).sum()
       else:  # standard L2
         self.constraints += L2_eye * (param**2).sum()
     if 'varreg' in self.attrs and self.attrs['varreg'] > 0:
       self.constraints += self.attrs['varreg'] * (1.0 * T.sqrt(T.var(param)) - 1.0 / numpy.sum(param.get_value().shape))**2
   return param
Example #2
0
 def add_param(self, param, name="", constraints=True,
               custom_update=None, custom_update_normalized=False, custom_update_exp_average=0,
               custom_update_condition=None, custom_update_accumulate_batches=None):
   """
   :type param: theano.SharedVariable
   :type name: str
   :rtype: theano.SharedVariable
   """
   param = super(Layer, self).add_param(param, name)
   if custom_update:
     # Handled in Device and Updater.
     param.custom_update = custom_update
     param.custom_update_normalized = custom_update_normalized
     param.custom_update_exp_average = custom_update_exp_average
     param.custom_update_condition = custom_update_condition
     param.custom_update_accumulate_batches = custom_update_accumulate_batches
   if constraints:
     if 'L1' in self.attrs and self.attrs['L1'] > 0:
       self.constraints += T.constant(self.attrs['L1'], name="L1", dtype='floatX') * abs(param).sum()
     if 'L2' in self.attrs and self.attrs['L2'] > 0:
       self.constraints += T.constant(self.attrs['L2'], name="L2", dtype='floatX') * (param**2).sum()
     if self.attrs.get('L2_eye', 0) > 0:
       L2_eye = T.constant(self.attrs['L2_eye'], name="L2_eye", dtype='floatX')
       if param.ndim == 2:
         eye = tiled_eye(param.shape[0], param.shape[1], dtype=param.dtype)
         self.constraints += L2_eye * ((param - eye)**2).sum()
       else:  # standard L2
         self.constraints += L2_eye * (param**2).sum()
     if 'varreg' in self.attrs and self.attrs['varreg'] > 0:
       self.constraints += self.attrs['varreg'] * (1.0 * T.sqrt(T.var(param)) - 1.0 / numpy.sum(param.get_value().shape))**2
   return param
Example #3
0
 def add_param(self, param, name="", constraints=True):
   """
   :type param: theano.SharedVariable
   :type name: str
   :rtype: theano.SharedVariable
   """
   param = super(Layer, self).add_param(param, name)
   if constraints:
     if 'L1' in self.attrs and self.attrs['L1'] > 0:
       self.constraints += T.constant(self.attrs['L1'], name="L1", dtype='floatX') * abs(param).sum()
     if 'L2' in self.attrs and self.attrs['L2'] > 0:
       self.constraints += T.constant(self.attrs['L2'], name="L2", dtype='floatX') * (param**2).sum()
     if self.attrs.get('L2_eye', 0) > 0:
       L2_eye = T.constant(self.attrs['L2_eye'], name="L2_eye", dtype='floatX')
       if param.ndim == 2:
         eye = tiled_eye(param.shape[0], param.shape[1], dtype=param.dtype)
         self.constraints += L2_eye * ((param - eye)**2).sum()
       else:  # standard L2
         self.constraints += L2_eye * (param**2).sum()
     if 'varreg' in self.attrs and self.attrs['varreg'] > 0:
       self.constraints += self.attrs['varreg'] * (1.0 * T.sqrt(T.var(param)) - 1.0 / numpy.sum(param.get_value().shape))**2
   return param