def inference(self, data_reader, inference_batch_size=64, output_path=None, category_name=None): pad_id = 0 eval_batches = data_reader.gen_mini_batches('test', inference_batch_size, pad_id, shuffle=False) Trainer._test_sess(self, eval_batches)
def train_and_evaluate(self, data_reader, evaluator, epochs=1, eposides=1, save_dir=None, summary_dir=None, save_summary_steps=10, batch_size=32): if not self.initialized: self.session.run(tf.global_variables_initializer()) Trainer._train_and_evaluate(self, data_reader, evaluator, epochs=epochs, eposides=eposides, save_dir=save_dir, summary_dir=summary_dir, save_summary_steps=save_summary_steps, batch_size=batch_size)
def evaluate(self, batch_generator, evaluator): Trainer._evaluate(self, batch_generator, evaluator)