Exemplo n.º 1
0
    def __init__(self, config_path):
        super().__init__(config_path)

        # self.bert_ranker = VanillaBertRanker()
        self.topk = 20
        self.attention = modeling_util.Attention(self.ATTEN_SIZE)  # combine+transform
        # self.transform = torch.nn.Linear(self.BERT_SIZE,self.ATTEN_SIZE)#combine+transform
        # self.transform_ent = torch.nn.Linear(self.ENTITY_SIZE,self.ATTEN_SIZE)#combine+transform
        self.linear = torch.nn.Linear(self.topk, 1)
        self.out = torch.nn.Linear(1, 1)
Exemplo n.º 2
0
 def __init__(self,QLEN):
     super().__init__(QLEN)
     
     # self.bert_ranker = VanillaBertRanker()
     self.topk = 20
     self.BERT_SIZE = 768
     
     self.attention = modeling_util.Attention(self.BERT_SIZE)
     self.linear = torch.nn.Linear(self.topk,1)
     self.out = torch.nn.Linear(13,1)
Exemplo n.º 3
0
    def __init__(self, config):
        super().__init__(config)

        #self.bert_ranker = VanillaBertRanker()
        self.topk = 20
        # self.BERT_SIZE = 768
        # self.ATTEN_SIZE = 500

        self.attention = modeling_util.Attention(
            self.ATTEN_SIZE)  #combine+transform
        self.linear = torch.nn.Linear(self.topk, 1)
        self.out = torch.nn.Linear(self.BERT_SIZE + 13, 1)
Exemplo n.º 4
0
    def __init__(self, config):
        super().__init__(config)

        #self.bert_ranker = VanillaBertRanker()
        self.topk = 20
        self.BERT_SIZE = 768

        self.ENTITY_SIZE = 100

        self.attention = modeling_util.Attention(self.BERT_SIZE +
                                                 self.ENTITY_SIZE)
        self.transform = torch.nn.Linear(self.BERT_SIZE + self.ENTITY_SIZE,
                                         self.BERT_SIZE + self.ENTITY_SIZE)
        self.linear = torch.nn.Linear(self.topk, 1)
        self.dropout = torch.nn.Dropout(0.1)
        self.out = torch.nn.Linear(self.BERT_SIZE + 13, 1)  # combine+transform