Exemple #1
0
 def getDevsetAccuracy(self, model=None):
     accSum = 0
     for xy in self.dev:
         xy = to_var_gpu(xy)
         accSum += self.batchAccuracy(*xy, model=model)
     acc = accSum / len(self.dev)
     return acc
Exemple #2
0
 def train(self, numEpochs=100):
     """ The main training loop called (also for subclasses)"""
     for epoch in range(self.epoch, numEpochs):
         self.lr_scheduler.step(epoch)
         self.epoch = epoch
         for i in range(self.numBatchesPerEpoch):
             trainData = to_var_gpu(next(self.train_iter))
             self.step(*trainData)
             self.logStuff(i, epoch, numEpochs, trainData)
Exemple #3
0
    def constSWA(self, numEpochs=100, lr=1e-4):
        """ runs Stochastic Weight Averaging for numEpochs epochs using const lr """
        self.SWAupdates = 0
        self.SWA = copy.deepcopy(self.CNN)
        ## Set the new constant learning rate
        new_lr_lambda = lambda epoch: lr / self.hypers['base_lr']
        self.lr_scheduler = optim.lr_scheduler.LambdaLR(
            self.optimizer, new_lr_lambda)

        for epoch in range(self.epoch, numEpochs + self.epoch):
            self.lr_scheduler.step(epoch)
            self.epoch = epoch
            for i in range(self.numBatchesPerEpoch):
                trainData = to_var_gpu(next(self.train_iter))
                self.step(*trainData)
                self.swaLogStuff(i, epoch)
                self.logStuff(i, epoch, numEpochs + self.epoch, trainData)
            self.updateSWA()
Exemple #4
0
 def step(self, x_unlab, *_):
     x_unlab = to_var_gpu(x_unlab)
     for _ in range(self.hypers['n_critic']):
         self.d_optimizer.zero_grad()
         z = self.getNoise(self.hypers["ul_BS"]) #*.1
         x_fake = self.G(z).detach()
         wass_loss = self.D(x_fake).mean() - self.D(x_unlab).mean()
         d_loss = wass_loss + self.grad_penalty(x_unlab, x_fake)
         d_loss.backward()
         self.d_optimizer.step()
     
     self.g_optimizer.zero_grad()
     z = self.getNoise(self.hypers["ul_BS"]) #*.1
     x_fake = self.G(z)
     g_loss = -self.D(x_fake).mean()
     g_loss.backward()
     self.g_optimizer.step()
     return d_loss, g_loss
Exemple #5
0
 def updateBatchNorm(self, model):
     model.train()
     for _ in range(self.numBatchesPerEpoch):
         tensors = next(self.train_iter)
         trainData = to_var_gpu(tensors, volatile=True)
         out = model(self.getLabeledXYonly(trainData)[0])