Exemplo n.º 1
0
def _configure_optimizer(learning_rate, opt_type='adam'):
    if opt_type == 'adadelta':
        optimizer = training.AdadeltaOptimizer(learning_rate,
                                               rho=FLAGS.adadelta_rho,
                                               epsilon=FLAGS.opt_epsilon)
    elif opt_type == 'adagrad':
        optimizer = training.AdagradOptimizer(
            learning_rate,
            initial_accumulator_value=FLAGS.adagrad_initial_accumulator_value)
    elif opt_type == 'adam':
        optimizer = training.AdamOptimizer(learning_rate, )
    elif opt_type == 'ftrl':
        optimizer = training.FtrlOptimizer(
            learning_rate,
            learning_rate_power=FLAGS.ftrl_learning_rate_power,
            initial_accumulator_value=FLAGS.ftrl_initial_accumulator_value,
            l1_regularization_strength=FLAGS.ftrl_l1,
            l2_regularization_strength=FLAGS.ftrl_l2)
    elif opt_type == 'momentum':
        optimizer = training.MomentumOptimizer(learning_rate,
                                               momentum=FLAGS.momentum,
                                               name='Momentum')
    elif opt_type == 'rmsprop':
        optimizer = training.RMSPropOptimizer(learning_rate,
                                              decay=FLAGS.rmsprop_decay,
                                              momentum=FLAGS.rmsprop_momentum,
                                              epsilon=FLAGS.opt_epsilon)
    elif opt_type == 'sgd':
        optimizer = training.GradientDescentOptimizer(learning_rate)
    else:
        raise ValueError('Optimizer [%s] was not recognized', FLAGS.optimizer)
    return optimizer
Exemplo n.º 2
0
    def test_saving_with_dir_not_created(self):
        if h5py is None:
            self.skipTest('h5py required to run this test')

        temp_dir = self.get_temp_dir()
        self.addCleanup(shutil.rmtree, temp_dir)
        for f in ['tf', 'h5']:
            with self.cached_session():
                data = np.random.random((1000, 32)).astype(np.float32)
                labels = np.random.random((1000, 10)).astype(np.float32)

                model = keras.models.Sequential([
                    keras.layers.Dense(10, activation='softmax'),
                    keras.layers.Dense(10, activation='softmax')
                ])

                model.compile(
                    optimizer=training_module.RMSPropOptimizer(0.001),
                    loss='categorical_crossentropy',
                    metrics=['accuracy'])

                model.fit(data, labels)
                prefix = os.path.join(temp_dir, f, 'ckpt')
                model.save_weights(prefix, save_format=f)
                model.load_weights(prefix)
Exemplo n.º 3
0
def model_fn(features, labels, mode, params):
    del params  # unused
    with variable_scope.variable_scope('m', reuse=variable_scope.AUTO_REUSE):
        w = variable_scope.get_variable('W', shape=[1000, 10])
    logits = math_ops.matmul(features, w)
    loss = losses.sparse_softmax_cross_entropy(labels=labels, logits=logits)

    if mode == model_fn_lib.ModeKeys.TRAIN:
        optimizer = training.RMSPropOptimizer(learning_rate=0.01)
        optimizer = tpu_optimizer.CrossShardOptimizer(optimizer)
        train_op = optimizer.minimize(loss, training.get_global_step())
        return tpu_estimator.TPUEstimatorSpec(
            mode=model_fn_lib.ModeKeys.TRAIN,
            loss=loss,
            train_op=train_op,
        )
    elif mode == model_fn_lib.ModeKeys.EVAL:

        def metric_fn(labels, logits):
            labels = math_ops.cast(labels, dtypes.int64)
            logging.info('LABELS %s %s', labels, logits)
            return {
                'recall@1': metrics_lib.recall_at_k(labels, logits, 1),
                'recall@5': metrics_lib.recall_at_k(labels, logits, 5),
            }

        loss = losses.sparse_softmax_cross_entropy(labels=labels,
                                                   logits=logits)
        eval_metrics = (metric_fn, [labels, logits])
        return tpu_estimator.TPUEstimatorSpec(mode=model_fn_lib.ModeKeys.EVAL,
                                              loss=loss,
                                              eval_metrics=eval_metrics)
Exemplo n.º 4
0
    def _weight_loading_test_template(self, make_model_fn):
        with self.cached_session():
            model = make_model_fn()
            model.compile(loss='mse',
                          optimizer=training_module.RMSPropOptimizer(0.1),
                          metrics=['acc',
                                   keras.metrics.CategoricalAccuracy()])
            temp_dir = self.get_temp_dir()
            prefix = os.path.join(temp_dir, 'ckpt')
            train_x = np.random.random((3, 2))
            train_y = np.random.random((3, ))
            x = constant_op.constant(train_x, dtype=dtypes.float32)

            model.train_on_batch(train_x, train_y)
            model.save_weights(prefix, save_format='tf')
            ref_y_before_train = model.predict(train_x)
            model.train_on_batch(train_x, train_y)
            ref_y_after_train = model.predict(train_x)
            for v in model.variables:
                self.evaluate(
                    v.assign(
                        random_ops.random_normal(shape=array_ops.shape(v))))

            self.addCleanup(shutil.rmtree, temp_dir)

            model.load_weights(prefix)
            self.assertAllClose(ref_y_before_train, self.evaluate(model(x)))

            # Test restore-on-create if this is a subclassed Model (graph Networks
            # will have already created their variables).
            load_model = make_model_fn()
            load_model.load_weights(prefix)
            self.assertAllClose(ref_y_before_train,
                                self.evaluate(load_model(x)))
            load_model = make_model_fn()
            load_model.load_weights(prefix)
            # We need to run some of the restore ops for predict(), but not all
            # variables have been created yet (optimizer slot variables). Tests
            # incremental restore.
            load_model.predict(train_x)
            load_model.compile(
                loss='mse',
                optimizer=training_module.RMSPropOptimizer(0.1),
                metrics=['acc', keras.metrics.CategoricalAccuracy()])
            load_model.train_on_batch(train_x, train_y)
            self.assertAllClose(ref_y_after_train,
                                self.evaluate(load_model(x)))
Exemplo n.º 5
0
    def test_saving_with_tf_optimizer(self):
        with self.cached_session():
            model = keras.models.Sequential()
            model.add(keras.layers.Dense(2, input_shape=(3, )))
            model.add(keras.layers.Dense(3))
            model.compile(loss='mse',
                          optimizer=training_module.RMSPropOptimizer(0.1),
                          metrics=['acc'])

            x = np.random.random((1, 3))
            y = np.random.random((1, 3))
            model.train_on_batch(x, y)
            model.train_on_batch(x, y)

            ref_y = model.predict(x)

            temp_saved_model = self._save_model_dir()
            output_path = keras_saved_model.save_keras_model(
                model, temp_saved_model)
            loaded_model = keras_saved_model.load_keras_model(output_path)
            loaded_model.compile(
                loss='mse',
                optimizer=training_module.RMSPropOptimizer(0.1),
                metrics=['acc'])
            y = loaded_model.predict(x)
            self.assertAllClose(ref_y, y, atol=1e-05)

            # test that new updates are the same with both models
            x = np.random.random((1, 3))
            y = np.random.random((1, 3))

            ref_loss = model.train_on_batch(x, y)
            loss = loaded_model.train_on_batch(x, y)
            self.assertAllClose(ref_loss, loss, atol=1e-05)

            ref_y = model.predict(x)
            y = loaded_model.predict(x)
            self.assertAllClose(ref_y, y, atol=1e-05)

            # test saving/loading again
            temp_saved_model2 = self._save_model_dir('saved_model_2')
            output_path2 = keras_saved_model.save_keras_model(
                loaded_model, temp_saved_model2)
            loaded_model = keras_saved_model.load_keras_model(output_path2)
            y = loaded_model.predict(x)
            self.assertAllClose(ref_y, y, atol=1e-05)
Exemplo n.º 6
0
  def test_saving_with_tf_optimizer(self):
    with self.test_session():
      model = keras.models.Sequential()
      model.add(keras.layers.Dense(2, input_shape=(3,)))
      model.add(keras.layers.Dense(3))
      model.compile(
          loss='mse',
          optimizer=training_module.RMSPropOptimizer(0.1),
          metrics=['acc'])

      x = np.random.random((1, 3))
      y = np.random.random((1, 3))
      model.train_on_batch(x, y)

      ref_y = model.predict(x)
      temp_dir = self.get_temp_dir()
      self.addCleanup(shutil.rmtree, temp_dir)

      temp_saved_model = os.path.join(temp_dir, 'saved_model')
      keras_saved_model.save_model(model, temp_saved_model)
      loaded_model = keras_saved_model.load_model(temp_saved_model)
      loaded_model.compile(
          loss='mse',
          optimizer=training_module.RMSPropOptimizer(0.1),
          metrics=['acc'])
      y = loaded_model.predict(x)
      self.assertAllClose(ref_y, y, atol=1e-05)

      # test that new updates are the same with both models
      x = np.random.random((1, 3))
      y = np.random.random((1, 3))

      ref_loss = model.train_on_batch(x, y)
      loss = loaded_model.train_on_batch(x, y)
      self.assertAllClose(ref_loss, loss, atol=1e-05)

      ref_y = model.predict(x)
      y = loaded_model.predict(x)
      self.assertAllClose(ref_y, y, atol=1e-05)

      # test saving/loading again
      keras_saved_model.save_model(loaded_model, temp_saved_model)
      loaded_model = keras_saved_model.load_model(temp_saved_model)
      y = loaded_model.predict(x)
      self.assertAllClose(ref_y, y, atol=1e-05)
    def test_saving_with_tf_optimizer(self):
        model = keras.models.Sequential()
        model.add(keras.layers.Dense(2, input_shape=(3, )))
        model.add(keras.layers.Dense(3))
        model.compile(loss='mse',
                      optimizer=training_module.RMSPropOptimizer(0.1),
                      metrics=['acc'])

        x = np.random.random((1, 3))
        y = np.random.random((1, 3))
        model.train_on_batch(x, y)
        ref_y = model.predict(x)

        saved_model_dir = self._save_model_dir()
        keras_saved_model.export_saved_model(model, saved_model_dir)
        loaded_model = keras_saved_model.load_from_saved_model(saved_model_dir)
        loaded_model.compile(loss='mse',
                             optimizer=training_module.RMSPropOptimizer(0.1),
                             metrics=['acc'],
                             run_eagerly=testing_utils.should_run_eagerly(),
                             experimental_run_tf_function=testing_utils.
                             should_run_tf_function())
        y = loaded_model.predict(x)
        self.assertAllClose(ref_y, y, atol=1e-05)

        # test that new updates are the same with both models
        x = np.random.random((1, 3))
        y = np.random.random((1, 3))

        ref_loss = model.train_on_batch(x, y)
        loss = loaded_model.train_on_batch(x, y)
        self.assertAllClose(ref_loss, loss, atol=1e-05)

        ref_y = model.predict(x)
        y = loaded_model.predict(x)
        self.assertAllClose(ref_y, y, atol=1e-05)

        # test saving/loading again
        saved_model_dir2 = self._save_model_dir('saved_model_2')
        keras_saved_model.export_saved_model(loaded_model, saved_model_dir2)
        loaded_model = keras_saved_model.load_from_saved_model(
            saved_model_dir2)
        y = loaded_model.predict(x)
        self.assertAllClose(ref_y, y, atol=1e-05)
Exemplo n.º 8
0
  def test_no_default_session(self):
    with ops.Graph().as_default():
      self.assertFalse(ops.get_default_session())
      data = np.random.random((1000, 32)).astype(np.float32)
      labels = np.random.random((1000, 10)).astype(np.float32)

      model = keras.models.Sequential([
          keras.layers.Dense(10, activation='softmax'),
          keras.layers.Dense(10, activation='softmax')])

      model.compile(optimizer=training_module.RMSPropOptimizer(0.001),
                    loss='categorical_crossentropy',
                    metrics=['accuracy'])

      model.fit(data, labels)
      fname = os.path.join(self.get_temp_dir(), 'weights', 'ckpt')
      model.save_weights(fname)
      model.load_weights(fname)