Beispiel #1
0
    def test_tpu_estimator_simple_lifecycle(self, use_tpu):
        config = tf.contrib.tpu.RunConfig(master="", tf_random_seed=42)
        estimator = TPUEstimator(
            head=tu.head(),
            subnetwork_generator=SimpleGenerator(
                [_DNNBuilder("dnn", use_tpu=use_tpu)]),
            max_iteration_steps=200,
            mixture_weight_initializer=tf.zeros_initializer(),
            use_bias=True,
            model_dir=self.test_subdirectory,
            config=config,
            use_tpu=use_tpu,
            train_batch_size=64 if use_tpu else 0)
        max_steps = 300

        xor_features = [[1., 0.], [0., 0], [0., 1.], [1., 1.]]
        xor_labels = [[1.], [0.], [1.], [0.]]
        train_input_fn = tu.dummy_input_fn(xor_features, xor_labels)

        # Train.
        estimator.train(input_fn=train_input_fn,
                        steps=None,
                        max_steps=max_steps,
                        hooks=None)

        # Evaluate.
        eval_results = estimator.evaluate(input_fn=train_input_fn,
                                          steps=10,
                                          hooks=None)

        # Predict.
        # TODO: skip predictions on TF versions 1.11 and 1.12 since
        # some TPU hooks seem to be failing on predict.
        predictions = []
        tf_version = LooseVersion(tf.VERSION)
        if (tf_version != LooseVersion("1.11.0")
                and tf_version != LooseVersion("1.12.0")):
            predictions = estimator.predict(
                input_fn=tu.dataset_input_fn(features=[0., 0.], labels=None))

        # Export SavedModel.
        def serving_input_fn():
            """Input fn for serving export, starting from serialized example."""
            serialized_example = tf.placeholder(dtype=tf.string,
                                                shape=(None),
                                                name="serialized_example")
            return tf.estimator.export.ServingInputReceiver(
                features={"x": tf.constant([[0., 0.]], name="serving_x")},
                receiver_tensors=serialized_example)

        export_saved_model_fn = getattr(estimator, "export_saved_model", None)
        if not callable(export_saved_model_fn):
            export_saved_model_fn = estimator.export_savedmodel
        export_saved_model_fn(export_dir_base=estimator.model_dir,
                              serving_input_receiver_fn=serving_input_fn)

        self.assertAlmostEqual(0.32416, eval_results["loss"], places=3)
        self.assertEqual(max_steps, eval_results["global_step"])
        for prediction in predictions:
            self.assertIsNotNone(prediction["predictions"])
Beispiel #2
0
    def test_tpu_estimator_simple_lifecycle(self, use_tpu,
                                            subnetwork_generator, want_loss):
        config = tf_compat.v1.estimator.tpu.RunConfig(master="",
                                                      tf_random_seed=42)
        estimator = TPUEstimator(head=tf.contrib.estimator.regression_head(
            loss_reduction=tf_compat.v1.losses.Reduction.SUM_OVER_BATCH_SIZE),
                                 subnetwork_generator=subnetwork_generator,
                                 max_iteration_steps=10,
                                 model_dir=self.test_subdirectory,
                                 config=config,
                                 use_tpu=use_tpu,
                                 train_batch_size=64 if use_tpu else 0)
        max_steps = 30

        xor_features = [[1., 0.], [0., 0], [0., 1.], [1., 1.]]
        xor_labels = [[1.], [0.], [1.], [0.]]
        train_input_fn = tu.dummy_input_fn(xor_features, xor_labels)

        # Train.
        estimator.train(input_fn=train_input_fn,
                        steps=None,
                        max_steps=max_steps,
                        hooks=None)

        # Evaluate.
        eval_results = estimator.evaluate(input_fn=train_input_fn,
                                          steps=1,
                                          hooks=None)

        # Predict.
        predictions = estimator.predict(
            input_fn=tu.dataset_input_fn(features=[0., 0.], labels=None))

        # Export SavedModel.
        def serving_input_fn():
            """Input fn for serving export, starting from serialized example."""
            serialized_example = tf.placeholder(dtype=tf.string,
                                                shape=(None),
                                                name="serialized_example")
            return tf.estimator.export.ServingInputReceiver(
                features={"x": tf.constant([[0., 0.]], name="serving_x")},
                receiver_tensors=serialized_example)

        export_saved_model_fn = getattr(estimator, "export_saved_model", None)
        if not callable(export_saved_model_fn):
            export_saved_model_fn = estimator.export_savedmodel
        export_saved_model_fn(export_dir_base=estimator.model_dir,
                              serving_input_receiver_fn=serving_input_fn)

        self.assertAlmostEqual(want_loss, eval_results["loss"], places=2)
        self.assertEqual(max_steps, eval_results["global_step"])
        self.assertEqual(2, eval_results["iteration"])
        for prediction in predictions:
            self.assertIsNotNone(prediction["predictions"])
Beispiel #3
0
    def test_tpu_estimator_simple_lifecycle(self, use_tpu,
                                            subnetwork_generator, want_loss):
        config = tf.compat.v1.estimator.tpu.RunConfig(master="",
                                                      tf_random_seed=42)
        estimator = TPUEstimator(
            # TODO: Add test with estimator Head v2.
            head=make_regression_head(use_tpu),
            subnetwork_generator=subnetwork_generator,
            max_iteration_steps=10,
            model_dir=self.test_subdirectory,
            config=config,
            use_tpu=use_tpu,
            train_batch_size=64 if use_tpu else 0)
        max_steps = 30

        xor_features = [[1., 0.], [0., 0], [0., 1.], [1., 1.]]
        xor_labels = [[1.], [0.], [1.], [0.]]
        train_input_fn = tu.dummy_input_fn(xor_features, xor_labels)

        # Train.
        estimator.train(input_fn=train_input_fn,
                        steps=None,
                        max_steps=max_steps,
                        hooks=None)

        # Evaluate.
        eval_results = estimator.evaluate(input_fn=train_input_fn,
                                          steps=1,
                                          hooks=None)

        # Predict.
        predictions = estimator.predict(input_fn=tu.dataset_input_fn(
            features=[0., 0.], return_dataset=True))
        # We need to iterate over all the predictions before moving on, otherwise
        # the TPU will not be shut down.
        for prediction in predictions:
            self.assertIsNotNone(prediction["predictions"])

        # Export SavedModel.
        def serving_input_fn():
            """Input fn for serving export, starting from serialized example."""
            serialized_example = tf.compat.v1.placeholder(
                dtype=tf.string, shape=(None), name="serialized_example")
            return tf.estimator.export.ServingInputReceiver(
                features={"x": tf.constant([[0., 0.]], name="serving_x")},
                receiver_tensors=serialized_example)

        estimator.export_saved_model(
            export_dir_base=estimator.model_dir,
            serving_input_receiver_fn=serving_input_fn)

        self.assertAlmostEqual(want_loss, eval_results["loss"], places=2)
        self.assertEqual(max_steps, eval_results["global_step"])
        self.assertEqual(2, eval_results["iteration"])