time_callback.on_train_end() eval_result = None train_result = None if not flags_obj.skip_eval: eval_result = [ test_loss.result().numpy(), test_accuracy.result().numpy() ] train_result = [ train_loss.numpy(), training_accuracy.result().numpy() ] stats = build_stats(train_result, eval_result, time_callback) return stats def main(_): model_helpers.apply_clean(flags.FLAGS) with logger.benchmark_context(flags.FLAGS): return run(flags.FLAGS) if __name__ == '__main__': logging.set_verbosity(logging.INFO) common.define_keras_flags() ctl_common.define_ctl_flags() flags.adopt_module_key_flags(ctl_common) absl_app.run(main)
def setUpClass(cls): # pylint: disable=invalid-name super(CtlImagenetTest, cls).setUpClass() imagenet_main.define_imagenet_flags() ctl_common.define_ctl_flags()