Exemple #1
0
 def test_conv_lstm_2d_config(self):
     config_dict = {
         'filters': 20,
         'kernel_size': 3,
         'strides': [1, 1],
         'padding': 'valid',
         'data_format': None,
         'dilation_rate': [1, 1],
         'activation': 'tanh',
         'recurrent_activation': 'hard_sigmoid',
         'use_bias': True,
         'kernel_initializer': GlorotNormalInitializerConfig().to_schema(),
         'recurrent_initializer': OrthogonalInitializerConfig().to_schema(),
         'bias_initializer': ZerosInitializerConfig().to_schema(),
         'unit_forget_bias': True,
         'kernel_regularizer': None,
         'recurrent_regularizer': None,
         'bias_regularizer': None,
         'activity_regularizer': None,
         'kernel_constraint': None,
         'recurrent_constraint': None,
         'bias_constraint': None,
         'return_sequences': False,
         'go_backwards': False,
         'stateful': False,
         'dropout': 0.,
         'recurrent_dropout': 0.
     }
     config = ConvLSTM2DConfig.from_dict(config_dict)
     assert_equal_layers(config, config_dict)
Exemple #2
0
 def __init__(self,
              units,
              activation='tanh',
              recurrent_activation='hard_sigmoid',
              use_bias=True,
              kernel_initializer=GlorotUniformInitializerConfig(),
              recurrent_initializer=OrthogonalInitializerConfig(),
              bias_initializer=ZerosInitializerConfig(),
              kernel_regularizer=None,
              recurrent_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              recurrent_constraint=None,
              bias_constraint=None,
              dropout=0.,
              recurrent_dropout=0.,
              **kwargs):
     super(GRUConfig, self).__init__(**kwargs)
     self.units = units
     self.activation = activation
     self.recurrent_activation = recurrent_activation
     self.use_bias = use_bias
     self.kernel_initializer = kernel_initializer
     self.recurrent_initializer = recurrent_initializer
     self.bias_initializer = bias_initializer
     self.kernel_regularizer = kernel_regularizer
     self.recurrent_regularizer = recurrent_regularizer
     self.bias_regularizer = bias_regularizer
     self.activity_regularizer = activity_regularizer
     self.kernel_constraint = kernel_constraint
     self.recurrent_constraint = recurrent_constraint
     self.bias_constraint = bias_constraint
     self.dropout = dropout
     self.recurrent_dropout = recurrent_dropout
Exemple #3
0
 def test_simple_rnn_config(self):
     config_dict = {
         'units': 3,
         'activation': 'tanh',
         'use_bias': True,
         'kernel_initializer': GlorotUniformInitializerConfig().to_schema(),
         'recurrent_initializer': OrthogonalInitializerConfig().to_schema(),
         'bias_initializer': ZerosInitializerConfig().to_schema(),
         'kernel_regularizer': None,
         'recurrent_regularizer': None,
         'bias_regularizer': None,
         'activity_regularizer': None,
         'kernel_constraint': None,
         'recurrent_constraint': None,
         'bias_constraint': None,
         'dropout': 0.,
         'recurrent_dropout': 0.,
     }
     config = SimpleRNNConfig.from_dict(config_dict)
     assert_equal_layers(config, config_dict)