Пример #1
0
    def test_return_non_default_batch_norm_params_keras_override(self):
        conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
      batch_norm {
        decay: 0.7
        center: false
        scale: true
        epsilon: 0.03
      }
    """
        conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
        text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
        keras_config = hyperparams_builder.KerasLayerHyperparams(
            conv_hyperparams_proto)

        self.assertTrue(keras_config.use_batch_norm())
        batch_norm_params = keras_config.batch_norm_params(momentum=0.4)
        self.assertAlmostEqual(batch_norm_params['momentum'], 0.4)
        self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
        self.assertFalse(batch_norm_params['center'])
        self.assertTrue(batch_norm_params['scale'])
Пример #2
0
 def test_use_relu_6_activation_keras(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
   activation: RELU_6
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     self.assertEqual(keras_config.params()['activation'], tf.nn.relu6)
Пример #3
0
 def test_do_not_use_batch_norm_if_default_keras(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     self.assertFalse(keras_config.use_batch_norm())
     self.assertEqual(keras_config.batch_norm_params(), {})
Пример #4
0
 def test_variance_in_range_with_random_normal_initializer_keras(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     random_normal_initializer {
       mean: 0.0
       stddev: 0.8
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     initializer = keras_config.params()['kernel_initializer']
     self._assert_variance_in_range(initializer,
                                    shape=[100, 40],
                                    variance=0.64,
                                    tol=1e-1)
Пример #5
0
    def test_return_l2_regularizer_weights_keras(self):
        conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
          weight: 0.42
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
        conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
        text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
        keras_config = hyperparams_builder.KerasLayerHyperparams(
            conv_hyperparams_proto)

        regularizer = keras_config.params()['kernel_regularizer']
        weights = np.array([1., -1, 4., 2.])
        with self.test_session() as sess:
            result = sess.run(regularizer(tf.constant(weights)))
        self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result)
Пример #6
0
 def test_variance_in_range_with_variance_scaling_initializer_uniform_keras(
         self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     variance_scaling_initializer {
       factor: 2.0
       mode: FAN_IN
       uniform: true
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     initializer = keras_config.params()['kernel_initializer']
     self._assert_variance_in_range(initializer,
                                    shape=[100, 40],
                                    variance=2. / 100.)