예제 #1
0
    def __init__(self,
                 units,
                 activation=None,
                 use_bias=False,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=unit_norm(),
                 bias_constraint=None,
                 **kwargs):
        """
        Initialize like Dense.

        *****************************
        """
        # explicit call to parent constructor
        Dense.__init__(self,
                       units,
                       activation=activation,
                       use_bias=use_bias,
                       kernel_initializer=kernel_initializer,
                       bias_initializer=bias_initializer,
                       kernel_regularizer=kernel_regularizer,
                       bias_regularizer=bias_regularizer,
                       activity_regularizer=activity_regularizer,
                       kernel_constraint=kernel_constraint,
                       bias_constraint=bias_constraint,
                       **kwargs)
예제 #2
0
    def __init__(self,
                 alpha=1,
                 **kwargs):
        # Weight decay (regularizer_l2) is important because we want to keep the trace of the W matrix
        # (the matrix of the weight of this layer, i.e. the label-flip confusion matrix) low.
        # This because in the paper is proven that the tr(Q*) <= tr(Q), where Q* is the Q that best represents
        # the label-flip noise.

        Dense.__init__(self, output_dim=-1, bias=False, trainable=False, init='identity', **kwargs)
        if alpha >= 1:
            raise ValueError("OutlierNoise Layer: alpha must be < 1 "
                             "(theorically alpha = (outlaiers labelled as outlaier i.e. class K+1) / (total outlaiers)")
        self.alpha = alpha
예제 #3
0
    def __init__(self, alpha=1, **kwargs):

        Dense.__init__(self,
                       output_dim=-1,
                       bias=False,
                       trainable=False,
                       init='identity',
                       **kwargs)
        if alpha >= 1:
            raise ValueError(
                "OutlierNoise Layer: alpha must be < 1 "
                "(theorically alpha = (outlaiers labelled as outlaier i.e. class K+1) / (total outlaiers)"
            )
        self.alpha = alpha
    def __init__(self,
                 weight_decay=0.1,
                 W_learning_rate_multiplier=None,
                 b_learning_rate_multiplier=None,
                 **kwargs):
        # Weight decay (regularizer_l2) is important because we want to keep the trace of the W matrix
        # (the matrix of the weight of this layer, i.e. the label-flip confusion matrix) low.
        # This because in the paper is proven that the tr(Q*) <= tr(Q), where Q* is the Q that best represents
        # the label-flip noise.

        Dense.__init__(self, output_dim=-1, bias=False, b_learning_rate_multiplier=None,
                      # W_learning_rate_multiplier=W_learning_rate_multiplier,
                       W_regularizer=l2(weight_decay),
                       W_constraint=stochastic2(),
                       init='identity',
                       **kwargs)
예제 #5
0
 def __init__(
         self,
         task_features,
         use_task_bias,
         use_task_gain,
         units,
         activation=None,
         use_bias=False,  # default false, since can be achieved by task-specific gains
         kernel_initializer='glorot_uniform',
         bias_initializer='zeros',
         kernel_regularizer=None,
         bias_regularizer=None,
         activity_regularizer=None,
         kernel_constraint=None,
         bias_constraint=None,
         **kwargs):
     self.current_task_bias = None
     self.current_task = None
     self.current_task_gain = None
     if 'input_shape' not in kwargs and 'input_dim' in kwargs:
         kwargs['input_shape'] = (kwargs.pop('input_dim'), )
     Dense.__init__(self,
                    units,
                    activation=activation,
                    use_bias=use_bias,
                    kernel_initializer=kernel_initializer,
                    bias_initializer=bias_initializer,
                    kernel_regularizer=kernel_regularizer,
                    bias_regularizer=bias_regularizer,
                    activity_regularizer=activity_regularizer,
                    kernel_constraint=kernel_constraint,
                    bias_constraint=bias_constraint,
                    **kwargs)
     self.task_features = task_features
     self.use_task_bias = use_task_bias
     self.use_task_gain = use_task_gain
    def __init__(self, units, prob=0.5, drop_bias=False, drop_noise_shape=None, **kwargs):        
        DropConnect.__init__(self, prob=prob, drop_bias=drop_bias, drop_noise_shape=drop_noise_shape)
        Dense.__init__(self, units, **kwargs)

        if self.needs_drop:
            self.uses_learning_phase = True