Esempio n. 1
0
    def train(self):
        """
        Function to train our model. Calls train_epoch function every epoch.
        Also performs learning rate annhealing
        """
        logger.info("train_loader length: {}".format(len(self.train_loader)))
        logger.info("target_loader length: {}".format(len(self.target_loader)))
        iters_per_epoch = min(len(self.target_loader), len(self.train_loader))

        self.iters_per_epoch = iters_per_epoch - (iters_per_epoch %
                                                  self.batch_size) - 1

        logger.info("iters_per_epoch :{}".format(self.iters_per_epoch))
        self.max_epoch = args.max_epoch
        for epoch in tqdm.trange(self.epoch, args.max_epoch, desc='Train'):
            self.epoch = epoch
            self.optim = step_scheduler(self.optim, self.epoch,
                                        base_lr_schedule, "base model")
            self.optimD = step_scheduler(self.optimD, self.epoch,
                                         dis_lr_schedule,
                                         "discriminater model")

            self.model.train()
            self.netD.train()
            self.train_epoch()

            self.model.eval()
            self.validate()
            self.model.train()  # return to training mode
Esempio n. 2
0
    def train(self):
        """
        Function to train our model. Calls train_epoch function every epoch.
        Also performs learning rate annhealing
        """
        logger.info("train_loader length: {}".format(len(self.train_loader)))
        logger.info("target_loader length: {}".format(len(self.target_loader)))
        iters_per_epoch = min(len(self.target_loader), len(self.train_loader))

        self.iters_per_epoch = iters_per_epoch - (iters_per_epoch %
                                                  self.batch_size) - 1

        logger.info("iters_per_epoch :{}".format(self.iters_per_epoch))
        max_epoch = int(math.ceil(self.max_iter / self.iters_per_epoch))
        self.max_epoch = max_epoch
        for epoch in tqdm.trange(self.epoch, max_epoch, desc='Train'):
            self.epoch = epoch
            if self.epoch % 8 == 0 and self.epoch > 0:
                self.optim = step_scheduler(self.optim, self.epoch)

            self.train_epoch()
            if self.iteration >= self.max_iter:
                break