def run(self, train_set, valid_set=None, test_set=None, train_size=None, controllers=None): """ Run until the end. """ if isinstance(train_set, Dataset): dataset = train_set train_set = dataset.train_set() valid_set = dataset.valid_set() test_set = dataset.test_set() train_size = dataset.train_size() timer = Timer() for _ in self.train(train_set, valid_set=valid_set, test_set=test_set, train_size=train_size): if controllers: ending = False for controller in controllers: if hasattr(controller, 'invoke') and controller.invoke(): ending = True if ending: break timer.report() return
ap.add_argument("--random_glimpse", default=False) args = ap.parse_args() mnist = MiniBatches((MnistDataset()), batch_size=1) model_path = args.model network = get_network(model_path, std=args.variance, disable_reinforce=args.disable_reinforce, random_glimpse=args.random_glimpse) trainer_conf = TrainerConfig() trainer_conf.learning_rate = args.learning_rate trainer_conf.weight_l2 = 0.0001 trainer_conf.hidden_l2 = 0.0001 trainer_conf.method = args.method trainer_conf.disable_reinforce=args.disable_reinforce trainer_conf.disable_backprop=args.disable_backprop trainer = AttentionTrainer(network, network.layers[0], config=trainer_conf) trainer_conf.report() timer = Timer() for _ in list(trainer.train(mnist.train_set(), mnist.valid_set(), mnist.test_set())): pass timer.end() network.save_params(model_path) timer.report()