Esempio n. 1
0
def run_evaluation_loop(hub_module,
                        hub_module_signature,
                        work_dir,
                        tpu_name,
                        save_checkpoints_steps,
                        optimization_params,
                        data_params):
  """Runs evaluation loop."""
  data_params["dataset"] = data_loader.get_dataset_instance(data_params)
  estimator = setup_estimator(hub_module,
                              hub_module_signature,
                              work_dir,
                              tpu_name,
                              save_checkpoints_steps,
                              optimization_params,
                              data_params)
  input_fn = data_loader.build_data_pipeline(data_params, mode="eval")

  with tf.gfile.Open(os.path.join(work_dir, "result_file.txt"), "w") as f:
    all_checkpoints = set([".".join(f.split(".")[:-1])
                           for f in tf.gfile.ListDirectory(work_dir)
                           if f.startswith("model.ckpt")])
    # Sort checkpoints by the global step.
    all_checkpoints = sorted(all_checkpoints,
                             key=lambda x: int(x.split("-")[-1]))
    # For efficiency reasons we evluate only the last checkpoint
    for ckpt in all_checkpoints[-1:]:
      ckpt = os.path.join(work_dir, ckpt)
      res = estimator.evaluate(input_fn,
                               steps=(data_params["dataset"].get_num_samples(
                                   data_params["dataset_eval_split_name"]) //
                                      data_params["batch_size_eval"]),
                               checkpoint_path=ckpt)
      f.write("Accuracy at step {}: {}\n".format(res["global_step"],
                                                 res["accuracy"]))
Esempio n. 2
0
def run_training_loop(hub_module, hub_module_signature, work_dir, tpu_name,
                      save_checkpoints_steps, optimization_params,
                      data_params):
    """Runs training loop."""
    estimator = setup_estimator(hub_module, hub_module_signature, work_dir,
                                tpu_name, save_checkpoints_steps,
                                optimization_params, data_params)
    input_fn = data_loader.build_data_pipeline(data_params, mode="train")

    # TPUs require the max number of steps to be specified explicitly.
    estimator.train(input_fn, max_steps=optimization_params["max_steps"])
Esempio n. 3
0
 def test_build_data_pipeline(self):
     input_fn = data_loader.build_data_pipeline(
         test_utils.get_data_params(), mode="eval")
     data = input_fn({"batch_size": 32}).make_one_shot_iterator().get_next()
     self.assertIsInstance(data["image"], tf.Tensor)
     self.assertIsInstance(data["label"], tf.Tensor)