def __call__(self, force_pt=False, *input, **kwargs): pt_call = len(input) > 0 or force_pt if pt_call: with t.no_grad(): return self.forward(*input, **kwargs) else: return NeuralModule.__call__(self, **kwargs)
def __call__(self, force_pt=False, *input, **kwargs): pt_call = len(input) > 0 or force_pt if pt_call: # [inds] = kwargs.values() inds = kwargs["indices"] np_inds = inds.detach().cpu().numpy().tolist() result = [] for lst in np_inds: sublst = [] for tid in lst: if tid != 1: sublst.append(tid) else: break result.append(self._detokenizer(sublst)) return result else: return NeuralModule.__call__(self, **kwargs)
def __call__(self, force_pt=False, *input, **kwargs): pt_call = len(input) > 0 or force_pt if pt_call: # [inds] = kwargs.values() # np_inds = inds.detach().cpu().numpy().reshape(-1) # result = [self._ids2classes[i] for i in np_inds] # #result = list(map(lambda x: self._ids2classes[x], np_inds)) # return result inds = kwargs["indices"] np_inds = inds.detach().transpose_(1, 0).cpu().numpy().tolist() result = [] for lst in np_inds: sublst = [] for tid in lst: if tid != 1: sublst.append(tid) else: break result.append(list(map(lambda x: self._ids2classes[x], sublst))) return [result] else: return NeuralModule.__call__(self, **kwargs)
def __call__(self, *input, force_pt=False, **kwargs): pt_call = len(input) > 0 or force_pt if pt_call: return nn.Module.__call__(self, *input, **kwargs) else: return NeuralModule.__call__(self, **kwargs)
def __call__(self, force_pt=False, *input, **kwargs): if force_pt: return self._loss_function(**kwargs) else: return NeuralModule.__call__(self, **kwargs)