def __init__(self, model, optimizer, train_loader, test_loader, statspath, criterion, writer, scheduler=None, batch_scheduler=False, L1lambda=0): self.model = model self.scheduler = scheduler self.criterion = criterion self.batch_scheduler = batch_scheduler self.optimizer = optimizer self.stats = ModelStats(model, statspath) self.train = Train(model, train_loader, optimizer, self.stats, self.scheduler if self.batch_scheduler else None, L1lambda, criterion) self.test = Test(model, test_loader, self.stats, writer, self.scheduler, criterion) self.misclass = Misclass(model, test_loader, self.stats) self.test_loader = test_loader torch.backends.cudnn.benchmark = True
def __init__(self, model, optimizer, train_loader, test_loader, statspath, scheduler=None, batch_scheduler=False, L1lambda = 0): self.model = model self.scheduler = scheduler self.batch_scheduler = batch_scheduler self.optimizer = optimizer self.stats = ModelStats(model, statspath) self.train = Train(model, train_loader, optimizer, self.stats, self.scheduler if self.scheduler and self.batch_scheduler else None, L1lambda) self.test = Test(model, test_loader, self.stats)
def __init__(self, model, optimizer, train_loader, test_loader, statspath, scheduler=None, batch_scheduler=False, criterion1=None, criterion2=None, L1lambda=0): self.tb = SummaryWriter() self.model = model #x = torch.rand(1,3,128,128) #self.tb.add_graph(self.model, x.to(self.model.device), x.to(self.model.device)) self.scheduler = scheduler self.batch_scheduler = batch_scheduler self.optimizer = optimizer self.stats = ModelStats(model, statspath) self.criterion1 = criterion1 self.criterion2 = criterion2 self.train = Train(model, train_loader, optimizer, self.stats, self.scheduler if self.batch_scheduler else None, criterion1=criterion1, criterion2=criterion2, L1lambda=L1lambda, tb=self.tb) self.test = Test(model, test_loader, self.stats, self.scheduler, criterion1=criterion1, criterion2=criterion2, tb=self.tb)