コード例 #1
0
ファイル: convlayers.py プロジェクト: evander-dacosta/nuronet
 def __init__(self,
              rank,
              filters,
              kernel_size,
              strides=1,
              padding='valid',
              dilation_rate=1,
              kernel_factory="xavier_uniform",
              bias_factory='zeros',
              activation="linear",
              kernel_regulariser=None,
              bias_regulariser=None,
              **kwargs):
     self.rank = rank
     self.filters = filters
     self.kernel_size = normalize_tuple(kernel_size, rank, 'kernel_size')
     self.strides = normalize_tuple(strides, rank, 'strides')
     self.padding = normalize_padding(padding)
     self.dilation_rate = normalize_tuple(dilation_rate, rank,
                                          'dilation_rate')
     self.activation = get_activation(activation)
     self.kernel_factory = get_weightfactory(kernel_factory)
     self.kernel_regulariser = get_regulariser(kernel_regulariser)
     self.bias_factory = get_weightfactory(bias_factory)
     self.bias_regulariser = get_regulariser(bias_regulariser)
     super(Conv, self).__init__(**kwargs)
コード例 #2
0
 def __init__(self, n, activation="tanh", h_activation="hard_sigmoid",
              weight_factory="xavier_uniform", h_factory="orthogonal",
              w_regulariser=None, h_regulariser=None, b_regulariser=None,
              w_dropout=0., 
              h_dropout=0., **kwargs):
     self.n = n
     self.w_factory = get_weightfactory(weight_factory)
     self.h_factory = get_weightfactory(h_factory)
     self.activation = get_activation(activation)
     self.h_activation = get_activation(h_activation)
     self.w_regulariser = get_regulariser(w_regulariser)
     self.b_regulariser = get_regulariser(b_regulariser)
     self.h_regulariser = get_regulariser(h_regulariser)
     self.dropout_w = min(1., max(0., w_dropout))
     self.dropout_h = min(1., max(0., h_dropout))
     
     self.state_spec = [InputDetail(shape=(None, self.n)),
                        InputDetail(shape=(None, self.n))]
     super(GRULayer, self).__init__(**kwargs)
コード例 #3
0
 def __init__(self, n, weight_factory='xavier_uniform',
              h_factory='orthogonal', activation='tanh',
              w_regulariser=None, h_regulariser=None, b_regulariser=None,
              w_dropout=0., h_dropout=0., **kwargs):
     self.n = n
     self.w_factory = get_weightfactory(weight_factory)
     self.h_factory = get_weightfactory(h_factory)
     self.activation = get_activation(activation)
     self.w_regulariser = get_regulariser(w_regulariser)
     self.b_regulariser = get_regulariser(b_regulariser)
     self.h_regulariser = get_regulariser(h_regulariser)
     self.dropout_w = min(1., max(0., w_dropout))
     self.dropout_h = min(1., max(0., h_dropout))
     self.state_spec = InputDetail(shape=(None, self.n))
     Recurrent.__init__(self, **kwargs)
コード例 #4
0
ファイル: convlayers.py プロジェクト: evander-dacosta/nuronet
    def __init__(self,
                 n,
                 weight_factory='xavier_uniform',
                 activation='linear',
                 weights=None,
                 w_regulariser=None,
                 b_regulariser=None,
                 input_shape=None,
                 **kwargs):
        self.weightFactory = get_weightfactory(weight_factory)
        self.activation = get_activation(activation)
        self.w_regulariser = get_regulariser(w_regulariser)
        self.b_regulariser = get_regulariser(b_regulariser)

        if (input_shape is not None):
            self.input_dim = input_shape[1]
        else:
            self.input_dim = None

        self.n = n
        if (input_shape is not None):
            kwargs['input_shape'] = input_shape
        Layer.__init__(self, **kwargs)
コード例 #5
0
ファイル: layer.py プロジェクト: evander-dacosta/nuronet
 def __init__(self, activation, **kwargs):
     self.activation = get_activation(activation)
     Layer.__init__(self, **kwargs)