def __init__(self): super().__init__() QLEN = 20 KMAX = 2 NFILTERS = 32 MINGRAM = 1 MAXGRAM = 3 self.simmat = modeling_util.SimmatModule() self.ngrams = torch.nn.ModuleList() self.rbf_bank = None for ng in range(MINGRAM, MAXGRAM+1): ng = modeling_util.PACRRConvMax2dModule(ng, NFILTERS, k=KMAX, channels=self.CHANNELS) self.ngrams.append(ng) qvalue_size = len(self.ngrams) * KMAX self.linear1 = torch.nn.Linear(self.BERT_SIZE + QLEN * qvalue_size, 32) self.linear2 = torch.nn.Linear(32, 32) self.linear3 = torch.nn.Linear(32, 1)
def __init__(self, args): super().__init__() # QLEN = 20 self.args = args QLEN = self.args.maxlen KMAX = 1 # Original was 2, which causes unknown bug NFILTERS = 32 MINGRAM = 1 MAXGRAM = 3 self.simmat = modeling_util.SimmatModule() self.ngrams = torch.nn.ModuleList() self.rbf_bank = None for ng in range(MINGRAM, MAXGRAM + 1): ng = modeling_util.PACRRConvMax2dModule(ng, NFILTERS, k=KMAX, channels=self.CHANNELS) self.ngrams.append(ng) qvalue_size = len(self.ngrams) * KMAX self.linear1 = torch.nn.Linear(self.BERT_SIZE + QLEN * qvalue_size, 32) self.linear2 = torch.nn.Linear(32, 32) self.linear3 = torch.nn.Linear(32, 1)