def __init__(self,
              model,
              optimizer,
              train_loader,
              test_loader,
              statspath,
              criterion,
              writer,
              scheduler=None,
              batch_scheduler=False,
              L1lambda=0):
     self.model = model
     self.scheduler = scheduler
     self.criterion = criterion
     self.batch_scheduler = batch_scheduler
     self.optimizer = optimizer
     self.stats = ModelStats(model, statspath)
     self.train = Train(model, train_loader, optimizer, self.stats,
                        self.scheduler if self.batch_scheduler else None,
                        L1lambda, criterion)
     self.test = Test(model, test_loader, self.stats, writer,
                      self.scheduler, criterion)
     self.misclass = Misclass(model, test_loader, self.stats)
     self.test_loader = test_loader
     torch.backends.cudnn.benchmark = True
コード例 #2
0
class ModelTrainer:
    def __init__(self,
                 model,
                 optimizer,
                 train_loader,
                 test_loader,
                 statspath,
                 scheduler=None,
                 batch_scheduler=False,
                 L1lambda=0):
        self.model = model
        self.scheduler = scheduler
        self.batch_scheduler = batch_scheduler
        self.optimizer = optimizer
        self.stats = ModelStats(model, statspath)
        self.train = Train(
            model, train_loader, optimizer, self.stats, self.scheduler
            if self.scheduler and self.batch_scheduler else None, L1lambda)
        self.test = Test(model, test_loader, self.stats)

    def run(self, epochs=10):
        pbar = tqdm_notebook(range(1, epochs + 1), desc="Epochs")
        for epoch in pbar:
            self.train.run()
            self.test.run()
            self.stats.next_epoch(
                self.scheduler.get_last_lr()[0] if self.scheduler else 0)
            pbar.write(self.stats.get_epoch_desc())
            if self.scheduler and not self.batch_scheduler:
                self.scheduler.step()
            if self.scheduler:
                pbar.write(
                    f"Learning Rate = {self.scheduler.get_last_lr()[0]:0.6f}")
        # save stats for later lookup
        self.stats.save()
コード例 #3
0
ファイル: eva4modeltrainer.py プロジェクト: Lakshman511/EVA
 def __init__(self, model, optimizer, train_loader, test_loader, statspath, scheduler=None, batch_scheduler=False, L1lambda = 0):
   self.model = model
   self.scheduler = scheduler
   self.batch_scheduler = batch_scheduler
   self.optimizer = optimizer
   self.stats = ModelStats(model, statspath)
   self.train = Train(model, train_loader, optimizer, self.stats, self.scheduler if self.scheduler and self.batch_scheduler else None, L1lambda)
   self.test = Test(model, test_loader, self.stats)
コード例 #4
0
class ModelTrainer:
    def __init__(self,
                 model,
                 optimizer,
                 train_loader,
                 test_loader,
                 statspath,
                 scheduler=None,
                 batch_scheduler=False,
                 criterion1=None,
                 criterion2=None,
                 L1lambda=0):
        self.tb = SummaryWriter()
        self.model = model

        #x = torch.rand(1,3,128,128)
        #self.tb.add_graph(self.model, x.to(self.model.device), x.to(self.model.device))
        self.scheduler = scheduler
        self.batch_scheduler = batch_scheduler
        self.optimizer = optimizer
        self.stats = ModelStats(model, statspath)
        self.criterion1 = criterion1
        self.criterion2 = criterion2
        self.train = Train(model,
                           train_loader,
                           optimizer,
                           self.stats,
                           self.scheduler if self.batch_scheduler else None,
                           criterion1=criterion1,
                           criterion2=criterion2,
                           L1lambda=L1lambda,
                           tb=self.tb)
        self.test = Test(model,
                         test_loader,
                         self.stats,
                         self.scheduler,
                         criterion1=criterion1,
                         criterion2=criterion2,
                         tb=self.tb)

    def run(self, epochs=10):
        pbar = tqdm_notebook(range(1, epochs + 1), desc="Epochs")
        for epoch in pbar:
            gc.collect()
            self.train.run()
            self.test.run()
            lr = self.optimizer.param_groups[0]['lr']
            self.stats.next_epochmaskdepth(lr)
            pbar.write(self.stats.get_epoch_desc())
            # need to ake it more readable and allow for other schedulers
            if self.scheduler and not self.batch_scheduler and not isinstance(
                    self.scheduler,
                    torch.optim.lr_scheduler.ReduceLROnPlateau):
                self.scheduler.step()
                print(self.scheduler.get_last_lr())
            pbar.write(f"Learning Rate = {lr:0.6f}")
            self.tb.close()
コード例 #5
0
ファイル: eva4modeltrainer.py プロジェクト: sasi-0453/EVA4
class ModelTrainer:
    def __init__(self,
                 statsmanager,
                 model,
                 optimizer,
                 train_loader,
                 test_loader,
                 statspath,
                 scheduler=None,
                 batch_scheduler=False,
                 L1lambda=0):
        self.model = model
        self.statsmanager = statsmanager
        self.scheduler = scheduler
        self.batch_scheduler = batch_scheduler
        self.optimizer = optimizer
        self.stats = ModelStats(model, statspath)
        self.train = Train(statsmanager, model, train_loader, optimizer,
                           self.stats,
                           self.scheduler if self.batch_scheduler else None,
                           L1lambda)
        self.test = Test(model, test_loader, self.stats, statsmanager,
                         self.scheduler)

    def run(self, epochs=10):
        pbar = tqdm_notebook(range(1, epochs + 1), desc="Epochs")
        for epoch in pbar:
            self.train.run()
            self.test.run()
            lr = self.optimizer.param_groups[0]['lr']
            self.stats.next_epoch(lr)
            pbar.write(self.stats.get_epoch_desc())
            #self.statsmanager.append_lr(lr)
            self.statsmanager.append_train_loss(self.stats.avg_train_loss[-1])
            self.statsmanager.append_test_loss(self.stats.avg_test_loss[-1])
            self.statsmanager.append_test_accuracy(100 *
                                                   self.stats.test_acc[-1])
            self.statsmanager.append_train_accuracy(100 *
                                                    self.stats.train_acc[-1])
            if len(self.statsmanager.data['lr']) == 0:
                self.statsmanager.append_lr(self.stats.batch_lr[0])
            else:
                self.statsmanager.append_lr(self.stats.lr[-1])
            # need to ake it more readable and allow for other schedulers
            if self.scheduler and not self.batch_scheduler and not isinstance(
                    self.scheduler,
                    torch.optim.lr_scheduler.ReduceLROnPlateau):
                self.scheduler.step()
            pbar.write(f"Learning Rate = {lr:0.6f}")
        # save stats for later lookup
        self.stats.save()
class ModelTrainer:
    def __init__(self,
                 model,
                 optimizer,
                 train_loader,
                 test_loader,
                 statspath,
                 criterion,
                 writer,
                 scheduler=None,
                 batch_scheduler=False,
                 L1lambda=0):
        self.model = model
        self.scheduler = scheduler
        self.criterion = criterion
        self.batch_scheduler = batch_scheduler
        self.optimizer = optimizer
        self.stats = ModelStats(model, statspath)
        self.train = Train(model, train_loader, optimizer, self.stats,
                           self.scheduler if self.batch_scheduler else None,
                           L1lambda, criterion)
        self.test = Test(model, test_loader, self.stats, writer,
                         self.scheduler, criterion)
        self.misclass = Misclass(model, test_loader, self.stats)
        self.test_loader = test_loader
        torch.backends.cudnn.benchmark = True

    def run(self, epochs=10):
        pbar = tqdm_notebook(range(1, epochs + 1), desc="Epochs")
        for epoch in pbar:
            self.train.run()
            self.test.run()
            lr = self.optimizer.param_groups[0]['lr']
            self.stats.next_epoch(lr)
            pbar.write(self.stats.get_epoch_desc())
            # need to ake it more readable and allow for other schedulers
            if self.scheduler and not self.batch_scheduler and not isinstance(
                    self.scheduler,
                    torch.optim.lr_scheduler.ReduceLROnPlateau):
                self.scheduler.step()
            pbar.write(f"Learning Rate = {lr:0.6f}")
            print("printing results")
            printing_results(self.model, self.test_loader, epoch)

        # save stats for later lookup
        self.stats.save()
コード例 #7
0
    def __init__(self,
                 model,
                 optimizer,
                 train_loader,
                 test_loader,
                 statspath,
                 scheduler=None,
                 batch_scheduler=False,
                 criterion1=None,
                 criterion2=None,
                 L1lambda=0):
        self.tb = SummaryWriter()
        self.model = model

        #x = torch.rand(1,3,128,128)
        #self.tb.add_graph(self.model, x.to(self.model.device), x.to(self.model.device))
        self.scheduler = scheduler
        self.batch_scheduler = batch_scheduler
        self.optimizer = optimizer
        self.stats = ModelStats(model, statspath)
        self.criterion1 = criterion1
        self.criterion2 = criterion2
        self.train = Train(model,
                           train_loader,
                           optimizer,
                           self.stats,
                           self.scheduler if self.batch_scheduler else None,
                           criterion1=criterion1,
                           criterion2=criterion2,
                           L1lambda=L1lambda,
                           tb=self.tb)
        self.test = Test(model,
                         test_loader,
                         self.stats,
                         self.scheduler,
                         criterion1=criterion1,
                         criterion2=criterion2,
                         tb=self.tb)