def __init__(self, gamma: float = 2., alpha: float = 0.25, pad_idx: int = 0, scales=None, ratios=None, reg_loss=F.smooth_l1_loss): super().__init__() self.gamma, self.alpha, self.pad_idx, self.reg_loss = gamma, alpha, pad_idx, reg_loss self.scales = ifnone(scales, [1, 2**(-1 / 3), 2**(-2 / 3)]) self.ratios = ifnone(ratios, [1 / 2, 1, 2])
def before_fit(self): self.run = (rank_distrib() == 0) and not (hasattr( self.learn, 'lr_finder') or hasattr(self, "gather_preds")) if self.run: self.writer = SummaryWriter(log_dir=self.log_dir) self.train_metrics = listify( ifnone(self.train_metrics, self.learn.loss_func)) self.train_metric_names = listify( ifnone(self.train_metric_names, [self.get_name(s) for s in self.train_metrics])) self.train_metric_names = [ 'train_' + name for name in self.train_metric_names ] self.smooth_dict = { name: (0, 0) for name in self.train_metric_names }