Пример #1
0
 def __init__(self,
              axis=-1,
              momentum=0.99,
              epsilon=1e-3,
              center=True,
              scale=True,
              beta_initializer=ZerosInitializerConfig(),
              gamma_initializer=OnesInitializerConfig(),
              moving_mean_initializer=ZerosInitializerConfig(),
              moving_variance_initializer=OnesInitializerConfig(),
              beta_regularizer=None,
              gamma_regularizer=None,
              beta_constraint=None,
              gamma_constraint=None,
              **kwargs):
     super(BatchNormalizationConfig, self).__init__(**kwargs)
     self.axis = axis
     self.momentum = momentum
     self.epsilon = epsilon
     self.center = center
     self.scale = scale
     self.beta_initializer = beta_initializer
     self.gamma_initializer = gamma_initializer
     self.moving_mean_initializer = moving_mean_initializer
     self.moving_variance_initializer = moving_variance_initializer
     self.beta_regularizer = beta_regularizer
     self.gamma_regularizer = gamma_regularizer
     self.beta_constraint = beta_constraint
     self.gamma_constraint = gamma_constraint
Пример #2
0
 def assert_separable_conv(conv_class, dim):
     config_dict = {
         'filters': 30,
         'kernel_size': 10,
         'strides': [1, 1] if dim == 2 else [1, 1, 1],
         'padding': 'valid',
         'data_format': None,
         'depth_multiplier': 1,
         'activation': None,
         'use_bias': True,
         'depthwise_initializer':
         GlorotNormalInitializerConfig().to_schema(),
         'pointwise_initializer':
         GlorotNormalInitializerConfig().to_schema(),
         'bias_initializer': ZerosInitializerConfig().to_schema(),
         'depthwise_regularizer': None,
         'pointwise_regularizer': None,
         'bias_regularizer': None,
         'activity_regularizer': None,
         'depthwise_constraint': None,
         'pointwise_constraint': None,
         'bias_constraint': None
     }
     config = conv_class.from_dict(config_dict)
     assert_equal_layers(config, config_dict)
Пример #3
0
 def __init__(self,
              filters,
              kernel_size,
              strides=(1, 1),
              padding='valid',
              data_format=None,
              activation=None,
              use_bias=True,
              kernel_initializer=GlorotUniformInitializerConfig(),
              bias_initializer=ZerosInitializerConfig(),
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(LocallyConnected2DConfig, self).__init__(**kwargs)
     self.filters = filters
     self.kernel_size = kernel_size
     self.strides = strides
     self.padding = padding
     self.data_format = data_format
     self.activation = activation
     self.use_bias = use_bias
     self.kernel_initializer = kernel_initializer
     self.bias_initializer = bias_initializer
     self.kernel_regularizer = kernel_regularizer
     self.bias_regularizer = bias_regularizer
     self.activity_regularizer = activity_regularizer
     self.kernel_constraint = kernel_constraint
     self.bias_constraint = bias_constraint
Пример #4
0
 def test_conv_lstm_2d_config(self):
     config_dict = {
         'filters': 20,
         'kernel_size': 3,
         'strides': [1, 1],
         'padding': 'valid',
         'data_format': None,
         'dilation_rate': [1, 1],
         'activation': 'tanh',
         'recurrent_activation': 'hard_sigmoid',
         'use_bias': True,
         'kernel_initializer': GlorotNormalInitializerConfig().to_schema(),
         'recurrent_initializer': OrthogonalInitializerConfig().to_schema(),
         'bias_initializer': ZerosInitializerConfig().to_schema(),
         'unit_forget_bias': True,
         'kernel_regularizer': None,
         'recurrent_regularizer': None,
         'bias_regularizer': None,
         'activity_regularizer': None,
         'kernel_constraint': None,
         'recurrent_constraint': None,
         'bias_constraint': None,
         'return_sequences': False,
         'go_backwards': False,
         'stateful': False,
         'dropout': 0.,
         'recurrent_dropout': 0.
     }
     config = ConvLSTM2DConfig.from_dict(config_dict)
     assert_equal_layers(config, config_dict)
Пример #5
0
 def __init__(self,
              units,
              activation='tanh',
              recurrent_activation='hard_sigmoid',
              use_bias=True,
              kernel_initializer=GlorotUniformInitializerConfig(),
              recurrent_initializer=OrthogonalInitializerConfig(),
              bias_initializer=ZerosInitializerConfig(),
              kernel_regularizer=None,
              recurrent_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              recurrent_constraint=None,
              bias_constraint=None,
              dropout=0.,
              recurrent_dropout=0.,
              **kwargs):
     super(GRUConfig, self).__init__(**kwargs)
     self.units = units
     self.activation = activation
     self.recurrent_activation = recurrent_activation
     self.use_bias = use_bias
     self.kernel_initializer = kernel_initializer
     self.recurrent_initializer = recurrent_initializer
     self.bias_initializer = bias_initializer
     self.kernel_regularizer = kernel_regularizer
     self.recurrent_regularizer = recurrent_regularizer
     self.bias_regularizer = bias_regularizer
     self.activity_regularizer = activity_regularizer
     self.kernel_constraint = kernel_constraint
     self.recurrent_constraint = recurrent_constraint
     self.bias_constraint = bias_constraint
     self.dropout = dropout
     self.recurrent_dropout = recurrent_dropout
 def test_prelu_config(self):
     config_dict = {
         'alpha_initializer': ZerosInitializerConfig().to_schema(),
         'alpha_regularizer': None,
         'alpha_constraint': None,
         'shared_axes': None
     }
     config = PReLUConfig.from_dict(config_dict)
     assert_equal_layers(config, config_dict)
    def test_batch_normalization_config(self):
        config_dict = {
            'axis': -1,
            'momentum': 0.99,
            'epsilon': 1e-3,
            'center': True,
            'scale': True,
            'beta_initializer': ZerosInitializerConfig().to_schema(),
            'gamma_initializer': OnesInitializerConfig().to_schema(),
            'moving_mean_initializer': ZerosInitializerConfig().to_schema(),
            'moving_variance_initializer': OnesInitializerConfig().to_schema(),
            'beta_regularizer': None,
            'gamma_regularizer': None,
            'beta_constraint': None,
            'gamma_constraint': None,
        }

        config = BatchNormalizationConfig.from_dict(config_dict)
        assert_equal_layers(config, config_dict)
 def __init__(self,
              alpha_initializer=ZerosInitializerConfig(),
              alpha_regularizer=None,
              alpha_constraint=None,
              shared_axes=None,
              **kwargs):
     super(PReLUConfig, self).__init__(**kwargs)
     self.alpha_initializer = alpha_initializer
     self.alpha_regularizer = alpha_regularizer
     self.alpha_constraint = alpha_constraint
     self.shared_axes = shared_axes
Пример #9
0
 def test_dense_config(self):
     config_dict = {
         'units': 12,
         'activation': 'elu',
         'use_bias': True,
         'kernel_initializer': GlorotNormalInitializerConfig().to_schema(),
         'bias_initializer': ZerosInitializerConfig().to_schema(),
         'kernel_regularizer': None,
         'bias_regularizer': None,
         'activity_regularizer': None,
         'kernel_constraint': None,
         'bias_constraint': None,
         'inbound_nodes': [['layer_1', 0, 1], ['layer_2', 1, 1]]
     }
     config = DenseConfig.from_dict(config_dict)
     assert_equal_layers(config, config_dict)
 def test_simple_rnn_config(self):
     config_dict = {
         'units': 3,
         'activation': 'tanh',
         'use_bias': True,
         'kernel_initializer': GlorotUniformInitializerConfig().to_schema(),
         'recurrent_initializer': OrthogonalInitializerConfig().to_schema(),
         'bias_initializer': ZerosInitializerConfig().to_schema(),
         'kernel_regularizer': None,
         'recurrent_regularizer': None,
         'bias_regularizer': None,
         'activity_regularizer': None,
         'kernel_constraint': None,
         'recurrent_constraint': None,
         'bias_constraint': None,
         'dropout': 0.,
         'recurrent_dropout': 0.,
     }
     config = SimpleRNNConfig.from_dict(config_dict)
     assert_equal_layers(config, config_dict)
Пример #11
0
 def assert_local_config(local_class, dim):
     config_dict = {
         'filters': 20,
         'kernel_size': 3,
         'strides': 1 if dim == 1 else [1, 1],
         'padding': 'valid',
         'data_format': None,
         'activation': None,
         'use_bias': True,
         'kernel_initializer': GlorotUniformInitializerConfig().to_schema(),
         'bias_initializer': ZerosInitializerConfig().to_schema(),
         'kernel_regularizer': None,
         'bias_regularizer': None,
         'activity_regularizer': None,
         'kernel_constraint': None,
         'bias_constraint': None,
     }
     if dim > 1:
         config_dict['data_format'] = None
     config = local_class.from_dict(config_dict)
     assert_equal_layers(config, config_dict)
Пример #12
0
 def assert_conv_config(conv_class, dim):
     config_dict = {
         'filters': 30,
         'kernel_size': 3,
         'strides': 1 if dim == 1 else [1, 1] if dim == 2 else [1, 1, 1],
         'padding': 'valid',
         'activation': 'relu',
         'dilation_rate':
         1 if dim == 1 else [1, 1] if dim == 2 else [1, 1, 1],
         'use_bias': True,
         'kernel_initializer': GlorotNormalInitializerConfig().to_schema(),
         'bias_initializer': ZerosInitializerConfig().to_schema(),
         'kernel_regularizer': L1L2RegularizerConfig().to_schema(),
         'bias_regularizer': None,
         'activity_regularizer': L1RegularizerConfig().to_schema(),
         'kernel_constraint': MaxNormConfig().to_schema(),
         'bias_constraint': None,
         'inbound_nodes': [['layer_1', 0, 1], ['layer_2', 1, 1]]
     }
     if dim > 1:
         config_dict['data_format'] = None
     config = conv_class.from_dict(config_dict)
     assert_equal_layers(config, config_dict)
Пример #13
0
 def __init__(self,
              units,
              activation=None,
              use_bias=True,
              kernel_initializer=GlorotNormalInitializerConfig(),
              bias_initializer=ZerosInitializerConfig(),
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(DenseConfig, self).__init__(**kwargs)
     self.units = units
     self.activation = activation
     self.use_bias = use_bias
     self.kernel_initializer = kernel_initializer
     self.bias_initializer = bias_initializer
     self.kernel_regularizer = kernel_regularizer
     self.bias_regularizer = bias_regularizer
     self.activity_regularizer = activity_regularizer
     self.kernel_constraint = kernel_constraint
     self.bias_constraint = bias_constraint