Ejemplo n.º 1
0
 def test_adam(self):
     with self.cached_session():
         self._test_optimizer(optimizer_v1.Adam())
         # Accuracy seems dependent on the seed initialization.
         # TODO(b/121051441): fix test flakiness.
         self._test_optimizer(optimizer_v1.Adam(decay=1e-3), target=0.73)
         self._test_optimizer(optimizer_v1.Adam(amsgrad=True))
Ejemplo n.º 2
0
 def test_keras_optimizer_warning(self):
   graph = ops.Graph()
   with graph.as_default(), self.session(graph):
     model = keras.models.Sequential()
     model.add(keras.layers.Dense(2, input_shape=(3,)))
     model.add(keras.layers.Dense(3))
     model.compile(loss='mse', optimizer=optimizer_v1.Adam(), metrics=['acc'])
     if not ops.executing_eagerly_outside_functions():
       model._make_train_function()
     temp_dir = self.get_temp_dir()
     prefix = os.path.join(temp_dir, 'ckpt')
     with test.mock.patch.object(logging, 'warning') as mock_log:
       model.save_weights(prefix)
       self.assertRegex(str(mock_log.call_args), 'Keras optimizer')
Ejemplo n.º 3
0
    def test_model_saving_to_pre_created_h5py_file(self):
        saved_model_dir = self._save_model_dir()
        save_format = testing_utils.get_save_format()
        with ops.Graph().as_default(), self.cached_session():
            inputs = keras.Input(shape=(3, ))
            x = keras.layers.Dense(2)(inputs)
            outputs = keras.layers.Dense(3)(x)

            model = keras.Model(inputs, outputs)
            model.compile(loss=keras.losses.MSE,
                          optimizer=optimizer_v1.Adam(),
                          metrics=[
                              keras.metrics.categorical_accuracy,
                              keras.metrics.CategoricalAccuracy()
                          ])
            x = np.random.random((1, 3))
            y = np.random.random((1, 3))
            model.train_on_batch(x, y)

            out = model.predict(x)

            keras.models.save_model(model,
                                    saved_model_dir,
                                    save_format=save_format)
            loaded_model = keras.models.load_model(saved_model_dir)
            out1 = loaded_model.predict(x)
            self.assertAllClose(out, out1, atol=1e-05)
            if save_format in ['tf', 'tensorflow']:
                return

            # Test h5 format specifically
            fd, fname = tempfile.mkstemp('.h5')
            with h5py.File(fname, mode='r+') as h5file:
                keras.models.save_model(model, h5file)
                loaded_model = keras.models.load_model(h5file)
                out2 = loaded_model.predict(x)
            self.assertAllClose(out, out2, atol=1e-05)

            # Test non-default options in h5
            with h5py.File('_', driver='core', mode='w',
                           backing_store=False) as h5file:
                keras.models.save_model(model, h5file)
                loaded_model = keras.models.load_model(h5file)
                out2 = loaded_model.predict(x)
            self.assertAllClose(out, out2, atol=1e-05)

            # Cleanup
            os.close(fd)
            os.remove(fname)
  def testNumericEquivalenceForAmsgrad(self):
    if context.executing_eagerly():
      self.skipTest(
          'v1 optimizer does not run in eager mode')
    np.random.seed(1331)
    with testing_utils.use_gpu():
      train_samples = 20
      input_dim = 3
      num_classes = 2
      (x, y), _ = testing_utils.get_test_data(
          train_samples=train_samples,
          test_samples=10,
          input_shape=(input_dim,),
          num_classes=num_classes)
      y = np_utils.to_categorical(y)

      num_hidden = 5
      model_k_v1 = testing_utils.get_small_sequential_mlp(
          num_hidden=num_hidden, num_classes=num_classes, input_dim=input_dim)
      model_k_v2 = testing_utils.get_small_sequential_mlp(
          num_hidden=num_hidden, num_classes=num_classes, input_dim=input_dim)
      model_k_v2.set_weights(model_k_v1.get_weights())

      opt_k_v1 = optimizer_v1.Adam(amsgrad=True)
      opt_k_v2 = adam.Adam(amsgrad=True)

      model_k_v1.compile(
          opt_k_v1,
          loss='categorical_crossentropy',
          metrics=[],
          run_eagerly=testing_utils.should_run_eagerly())
      model_k_v2.compile(
          opt_k_v2,
          loss='categorical_crossentropy',
          metrics=[],
          run_eagerly=testing_utils.should_run_eagerly())

      hist_k_v1 = model_k_v1.fit(x, y, batch_size=5, epochs=10, shuffle=False)
      hist_k_v2 = model_k_v2.fit(x, y, batch_size=5, epochs=10, shuffle=False)

      self.assertAllClose(model_k_v1.get_weights(), model_k_v2.get_weights())
      self.assertAllClose(opt_k_v1.get_weights(), opt_k_v2.get_weights())
      self.assertAllClose(hist_k_v1.history['loss'], hist_k_v2.history['loss'])
Ejemplo n.º 5
0
 def test_negative_clipvalue_or_clipnorm(self):
     with self.assertRaises(ValueError):
         _ = optimizer_v1.SGD(lr=0.01, clipvalue=-0.5)
     with self.assertRaises(ValueError):
         _ = optimizer_v1.Adam(clipnorm=-2.0)
 def testAdamCompatibility(self):
   opt_v1 = optimizer_v1.Adam()
   opt_v2 = adam.Adam()
   self._testOptimizersCompatibility(opt_v1, opt_v2)