Esempio n. 1
0
 def __init__(self, input_dim, hidden_size):
     super(SelfAttention, self).__init__()
     self.input_dim = input_dim
     self.hidden_size = hidden_size
     self.linear_query = CustomLinear(input_dim, hidden_size, bias=True, bn=True, act=t.nn.ReLU())
     self.linear_passage = CustomLinear(input_dim, hidden_size, bias=True, bn=True, act=t.nn.ReLU())
     self.gate = CustomLinear(input_dim, input_dim, bias=False, bn=True, act=t.nn.Sigmoid())
Esempio n. 2
0
 def __init__(self, input_dim, hidden_size, num_head, output_dim):
     super(MultiHeadSelfAttention, self).__init__()
     self.num_head = num_head
     self.hidden_size = hidden_size
     self.passage_linear = CustomLinear(input_dim, hidden_size)
     self.query_linear = CustomLinear(input_dim, hidden_size)
     self.output_linear = CustomLinear(num_head*hidden_size, output_dim)
Esempio n. 3
0
 def __init__(self, input_dim, hidden_size):
     super(AttentionPooling, self).__init__()
     self.linear1 = CustomLinear(input_dim, hidden_size, act=t.nn.Tanh())
     self.linear2 = CustomLinear(hidden_size,
                                 1,
                                 bias=False,
                                 bn=False,
                                 act=None)
Esempio n. 4
0
 def __init__(self, query_dim, passage_dim, hidden_size):
     super(DotAttention, self).__init__()
     self.query_dim = query_dim
     self.passage_dim = passage_dim
     self.hidden_size = hidden_size
     self.linear_query = CustomLinear(query_dim, hidden_size, True, True,
                                      t.nn.ReLU())
     self.linear_passage = CustomLinear(passage_dim, hidden_size, True,
                                        True, t.nn.ReLU())
Esempio n. 5
0
 def __init__(self, embedding_dim, hidden_size, num_head, output_dim):
     super(EncoderFact, self).__init__()
     self.multiheadselfattention = MultiHeadSelfAttention(
         input_dim=embedding_dim,
         hidden_size=hidden_size,
         num_head=num_head,
         output_dim=output_dim)
     self.layernorm = LayerNorm(output_dim)
     self.feedforward1 = CustomLinear(output_dim, output_dim)
     self.feedforward2 = CustomLinear(output_dim, output_dim)
Esempio n. 6
0
File: gate.py Progetto: fendaq/tc
 def __init__(self, input_dim):
     super(Gate, self).__init__()
     self.nonlinear_input = CustomLinear(input_dim,
                                         input_dim,
                                         bias=True,
                                         bn=True,
                                         act=t.nn.ReLU())
     self.gate = CustomLinear(input_dim,
                              input_dim,
                              bias=False,
                              bn=True,
                              act=t.nn.Sigmoid())
Esempio n. 7
0
 def __init__(self, fact_dim, accusation_dim, hidden_size):
     super(Fusion, self).__init__()
     self.fact_accusation_attention = DotAttention(accusation_dim, fact_dim,
                                                   hidden_size)
     self.fusion_nonlinear = CustomLinear(fact_dim,
                                          fact_dim,
                                          bias=True,
                                          bn=True,
                                          act=t.nn.ReLU())
     self.gate = CustomLinear(fact_dim,
                              fact_dim,
                              bias=False,
                              bn=True,
                              act=t.nn.Sigmoid())
Esempio n. 8
0
    def __init__(self, query_dim, passage_dim, hidden_size):
        super(DotAttentionGated, self).__init__()
        self.query_dim = query_dim
        self.passage_dim = passage_dim
        self.hidden_size = hidden_size
        self.linear_query = CustomLinear(query_dim, hidden_size, True, True,
                                         t.nn.ReLU())

        self.linear_passage = CustomLinear(passage_dim, hidden_size, True,
                                           True, t.nn.ReLU())
        self.gate = CustomLinear(hidden_size,
                                 hidden_size,
                                 bias=False,
                                 bn=True,
                                 act=t.nn.Sigmoid())