def evalloop(self): self.tt.tick("testing") tt = ticktock("-") totaltestbats = len(self.dataloader) self.model.eval() outs = [] with torch.no_grad(): for i, batch in enumerate(self.dataloader): batch = (batch, ) if not q.issequence(batch) else batch batch = [batch_e.to(self._device) for batch_e in batch] if self.transform_batch_inp is not None: batch = self.transform_batch_inp(*batch) batch_reset(self.model) modelouts = self.model(*batch) if self.transform_batch_out is not None: modelouts = self.transform_batch_out(modelouts) tt.live("eval - [{}/{}]".format(i + 1, totaltestbats)) outs.append(modelouts) ttmsg = "eval done" tt.stoplive() tt.tock(ttmsg) self.tt.tock("tested") out = torch.cat(outs, 0) return out
def __init__(self, model): super(eval, self).__init__() self.model = model self._device = torch.device("cpu") self.transform_batch_inp = None self.transform_batch_out = None self.dataloader = None self.tt = ticktock("eval")
def testloop(self, epoch=None): if epoch is None: self.tt.tick("testing") tt = ticktock("-") self.model.eval() self.do_callbacks(self.START_TEST) self.losses.push_and_reset() totalbats = len(self.dataloader) for i, _batch in enumerate(self.dataloader): self.do_callbacks(self.START_BATCH) _batch = (_batch, ) if not q.issequence(_batch) else _batch _batch = [batch_e.to(self._device) for batch_e in _batch] if self.transform_batch_inp is not None: batch = self.transform_batch_inp(*_batch) else: batch = _batch if self.no_gold: batch_in = batch gold = None else: batch_in = batch[:-1] gold = batch[-1] batch_reset(self.model) modelouts = self.model(*batch_in) modelout2loss = modelouts if self.transform_batch_out is not None: modelout2loss = self.transform_batch_out(modelouts) gold = batch[-1] if self.transform_batch_gold is not None: gold = self.transform_batch_gold(gold) losses = self.losses(modelout2loss, gold) epochmsg = "" if epoch is not None: curepoch, maxepoch = epoch epochmsg = "Epoch {}/{} -".format(curepoch, maxepoch) tt.live("{} - {}[{}/{}]: {}".format(self._name, epochmsg, i + 1, totalbats, self.losses.pp())) self.do_callbacks(self.END_BATCH) # losses = self.losses.get_agg_errors() tt.stoplive() ttmsg = "{}: {}" \ .format( self._name, self.losses.pp() ) self.do_callbacks(self.END_TEST) if epoch is None: tt.tock(ttmsg) self.tt.tock("tested") return ttmsg
def __init__(self, model, **kw): super(tester, self).__init__(**kw) self.model = model self.losses = None self.transform_batch_inp = None self.transform_batch_out = None self.transform_batch_gold = None self.dataloader = None self.tt = ticktock(self._name) self._device = torch.device("cpu")
def trainloop(self): if self.max_epochs == 0: self.tt.msg("skipping training") return self.stop_training = False self.tt.tick("training") tt = ticktock("-") while not self.stop_training: tt.tick() ttmsg = self.do_epoch(tt=tt) tt.tock(ttmsg) self.tt.tock("trained")
def __init__(self, model, **kw): super(trainer, self).__init__(**kw) self.model = model self.losses = None self.max_epochs = None self.current_epoch = 0 self.stop_training = None self._device = torch.device("cpu") self.optim = None self.transform_batch_inp = None self.transform_batch_out = None self.transform_batch_gold = None self.dataloader = None self.tt = ticktock("trainer")