Esempio n. 1
0
    def train(self, epochs, optimizer, save=True, savename='', best_accuracy=0, soft_labels=False):
        """Performs training."""
        best_accuracy = best_accuracy
        error_history = []
        if soft_labels:
            model_copy = copy.deepcopy(self.model)
            self.model_copy = model_copy.cuda()
            # Check the sucess here
            self.model_copy.eval()
        else:
            self.model_copy = None

        if self.args.cuda:
            self.model = self.model.cuda()

        for idx in range(epochs):
            epoch_idx = idx + 1
            # print('Epoch: %d' % (epoch_idx))

            # optimizer = utils.step_lr_CIFAR(epoch_idx, self.args.lr, self.args.lr_decay_every,
            #                                 self.args.lr_decay_factor, optimizer)
            optimizer = utils.step_lr(epoch_idx, self.args.lr, self.args.lr_decay_every,
                                      self.args.lr_decay_factor, optimizer)
            if self.args.train_bn:
                self.model.train()
            else:
                self.model.train_nobn()
            self.do_epoch(epoch_idx, optimizer)
            errors = self.eval(self.pruner.current_dataset_idx)
            error_history.append(errors)
            accuracy = 100 - errors[0]  # Top-1 accuracy.

            # Save performance history and stats.
            with open(savename + '.json', 'w') as fout:
                json.dump({
                    'error_history': error_history,
                    'args': vars(self.args),
                }, fout)

            # Save best model, if required.
            if save and accuracy > best_accuracy:
                print('Best model so far, Accuracy: %0.2f%% -> %0.2f%%' %
                      (best_accuracy, accuracy))
                best_accuracy = accuracy
                self.save_model(epoch_idx, best_accuracy, errors, savename)

        print('Finished finetuning...')
        print('Best error/accuracy: %0.2f%%, %0.2f%%' %
              (100 - best_accuracy, best_accuracy))
        print('-' * 16)
Esempio n. 2
0
    def train(self,
              epochs,
              optimizer,
              save=True,
              savename='',
              best_accuracy=0):
        """Performs training."""
        best_accuracy = best_accuracy
        error_history = []

        if self.args.cuda:
            self.model = self.model.cuda()

        for idx in range(epochs):
            epoch_idx = idx + 1
            print('Epoch: %d' % (epoch_idx))

            optimizer = utils.step_lr(epoch_idx, self.args.lr,
                                      self.args.lr_decay_every,
                                      self.args.lr_decay_factor, optimizer)
            self.model.train()
            self.do_epoch(epoch_idx, optimizer)
            errors = self.eval()
            error_history.append(errors)
            accuracy = 100 - errors[0]  # Top-1 accuracy.

            # Save performance history and stats.
            with open(savename + '.json', 'w') as fout:
                json.dump(
                    {
                        'error_history': error_history,
                        'args': vars(self.args),
                    }, fout)

            # Save best model, if required.
            if save and accuracy > best_accuracy:
                print('Best model so far, Accuracy: %0.2f%% -> %0.2f%%' %
                      (best_accuracy, accuracy))
                best_accuracy = accuracy
                self.save_model(epoch_idx, best_accuracy, errors, savename)

        if isinstance(self.model, nn.DataParallel):
            self.model = self.model.module
        print('Finished finetuning...')
        print('Best error/accuracy: %0.2f%%, %0.2f%%' %
              (100 - best_accuracy, best_accuracy))
        print('-' * 16)
 def update_lr(self, epoch_idx):
     """Update learning rate of every optimizer."""
     for optimizer, init_lr, decay_every in zip(self.optimizers, self.lrs,
                                                self.decay_every):
         optimizer = utils.step_lr(epoch_idx, init_lr, decay_every,
                                   self.args.lr_decay_factor, optimizer)
Esempio n. 4
0
    def train(self,
              epochs,
              optimizer,
              save=True,
              savename='',
              best_accuracy=0):
        """Performs training."""
        best_accuracy = best_accuracy
        error_history = []

        if self.args.cuda:
            self.model = self.model.cuda()

        self.md = ModelData(self.args.train_path, self.train_data_loader,
                            self.test_data_loader)
        bptt, em_sz, nh, nl = 70, 400, 1150, 3
        opt_fn = partial(optim.Adam, betas=(0.8, 0.99))
        dps = np.array([0.4, 0.5, 0.05, 0.3, 0.4]) * 1.0
        self.modell = TextModel(to_gpu(self.model.model))
        self.learn = RNN_Learner(self.md,
                                 TextModel(to_gpu(self.model.model)),
                                 opt_fn=opt_fn)
        self.learn.reg_fn = partial(seq2seq_reg, alpha=2, beta=1)
        self.learn.clip = 25.
        self.learn.metrics = [metrics.accuracy]
        lr = 0.01
        lrm = 2.6
        self.lrs = np.array(
            [lr / (lrm**4), lr / (lrm**3), lr / (lrm**2), lr / lrm, lr])
        self.wd = 1e-6
        self.learn.unfreeze()
        #self.learn.fit(self.lrs, 3, wds=self.wd)
        self.model.shared = self.model.model[0]
        self.model.classifier = self.model.model[1]
        optimizer = self.learn.get_layer_opt(self.lrs, self.wd).opt
        self.loptimizer = self.learn.get_layer_opt(self.lrs, self.wd)

        set_trainable(children(self.model.shared), True)
        set_trainable(children(self.model.classifier), True)

        for idx in range(epochs):
            epoch_idx = idx + 1
            print('Epoch: %d' % (epoch_idx))

            optimizer = utils.step_lr(epoch_idx, self.args.lr,
                                      self.args.lr_decay_every,
                                      self.args.lr_decay_factor, optimizer)
            self.model.train()
            self.do_epoch(epoch_idx, optimizer)
            errors, _, _, _ = self.eval()
            self.eval_all()
            error_history.append(errors)
            accuracy = 100 - errors[0]  # Top-1 accuracy.

            # Save performance history and stats.
            with open(savename + '.json', 'w') as fout:
                json.dump(
                    {
                        'error_history': error_history,
                        'args': vars(self.args),
                    }, fout)

            # Save best model, if required.
            if save and accuracy > best_accuracy:
                print('Best model so far, Accuracy: %0.2f%% -> %0.2f%%' %
                      (best_accuracy, accuracy))
                best_accuracy = accuracy
                self.save_model(epoch_idx, best_accuracy, errors, savename)

        if isinstance(self.model, nn.DataParallel):
            self.model = self.model.module
        print('Finished finetuning...')
        print('Best error/accuracy: %0.2f%%, %0.2f%%' %
              (100 - best_accuracy, best_accuracy))
        print('-' * 16)