Esempio n. 1
0
 def test_adam(self):
     with self.cached_session():
         self._test_optimizer(optimizer_v1.Adam())
         # Accuracy seems dependent on the seed initialization.
         # TODO(b/121051441): fix test flakiness.
         self._test_optimizer(optimizer_v1.Adam(decay=1e-3), target=0.73)
         self._test_optimizer(optimizer_v1.Adam(amsgrad=True))
Esempio n. 2
0
 def testSetWeightsFromV1AdamWithoutMinimize(self):
   keras_v1_adam = optimizer_v1.Adam()
   keras_v2_adam = adam.Adam()
   keras_v2_adam.set_weights(keras_v1_adam.get_weights())
   keras_v1_iteration = keras_v1_adam.iterations
   keras_v2_iteration = keras_v2_adam.iterations
   self.evaluate(tf.compat.v1.global_variables_initializer())
   self.assertEqual(
       self.evaluate(keras_v1_iteration), self.evaluate(keras_v2_iteration))
Esempio n. 3
0
  def test_model_saving_to_pre_created_h5py_file(self):
    saved_model_dir = self._save_model_dir()
    save_format = test_utils.get_save_format()
    with tf.Graph().as_default(), self.cached_session():
      inputs = keras.Input(shape=(3,))
      x = keras.layers.Dense(2)(inputs)
      outputs = keras.layers.Dense(3)(x)

      model = keras.Model(inputs, outputs)
      model.compile(
          loss=keras.losses.MSE,
          optimizer=optimizer_v1.Adam(),
          metrics=[
              keras.metrics.categorical_accuracy,
              keras.metrics.CategoricalAccuracy()
          ])
      x = np.random.random((1, 3))
      y = np.random.random((1, 3))
      model.train_on_batch(x, y)

      out = model.predict(x)

      keras.models.save_model(model, saved_model_dir, save_format=save_format)
      loaded_model = keras.models.load_model(saved_model_dir)
      out1 = loaded_model.predict(x)
      self.assertAllClose(out, out1, atol=1e-05)
      if save_format in ['tf', 'tensorflow']:
        return

      # Test h5 format specifically
      fd, fname = tempfile.mkstemp('.h5')
      with h5py.File(fname, mode='r+') as h5file:
        keras.models.save_model(model, h5file)
        loaded_model = keras.models.load_model(h5file)
        out2 = loaded_model.predict(x)
      self.assertAllClose(out, out2, atol=1e-05)

      # Test non-default options in h5
      with h5py.File(
          '_', driver='core', mode='w', backing_store=False) as h5file:
        keras.models.save_model(model, h5file)
        loaded_model = keras.models.load_model(h5file)
        out2 = loaded_model.predict(x)
      self.assertAllClose(out, out2, atol=1e-05)

      # Cleanup
      os.close(fd)
      os.remove(fname)
Esempio n. 4
0
  def testNumericEquivalenceForAmsgrad(self):
    if tf.executing_eagerly():
      self.skipTest(
          'v1 optimizer does not run in eager mode')
    np.random.seed(1331)
    with test_utils.use_gpu():
      train_samples = 20
      input_dim = 3
      num_classes = 2
      (x, y), _ = test_utils.get_test_data(
          train_samples=train_samples,
          test_samples=10,
          input_shape=(input_dim,),
          num_classes=num_classes)
      y = np_utils.to_categorical(y)

      num_hidden = 5
      model_k_v1 = test_utils.get_small_sequential_mlp(
          num_hidden=num_hidden, num_classes=num_classes, input_dim=input_dim)
      model_k_v2 = test_utils.get_small_sequential_mlp(
          num_hidden=num_hidden, num_classes=num_classes, input_dim=input_dim)
      model_k_v2.set_weights(model_k_v1.get_weights())

      opt_k_v1 = optimizer_v1.Adam(amsgrad=True)
      opt_k_v2 = adam.Adam(amsgrad=True)

      model_k_v1.compile(
          opt_k_v1,
          loss='categorical_crossentropy',
          metrics=[],
          run_eagerly=test_utils.should_run_eagerly())
      model_k_v2.compile(
          opt_k_v2,
          loss='categorical_crossentropy',
          metrics=[],
          run_eagerly=test_utils.should_run_eagerly())

      hist_k_v1 = model_k_v1.fit(x, y, batch_size=5, epochs=10, shuffle=False)
      hist_k_v2 = model_k_v2.fit(x, y, batch_size=5, epochs=10, shuffle=False)

      self.assertAllClose(model_k_v1.get_weights(), model_k_v2.get_weights())
      self.assertAllClose(opt_k_v1.get_weights(), opt_k_v2.get_weights())
      self.assertAllClose(hist_k_v1.history['loss'], hist_k_v2.history['loss'])
Esempio n. 5
0
 def test_negative_clipvalue_or_clipnorm(self):
     with self.assertRaises(ValueError):
         _ = optimizer_v1.SGD(lr=0.01, clipvalue=-0.5)
     with self.assertRaises(ValueError):
         _ = optimizer_v1.Adam(clipnorm=-2.0)
Esempio n. 6
0
 def testAdamCompatibility(self):
   opt_v1 = optimizer_v1.Adam()
   opt_v2 = adam.Adam()
   self._testOptimizersCompatibility(opt_v1, opt_v2)