Ejemplo n.º 1
0
    def __init__(self, embedding_matrix, opt):
        super(MGANDPL, self).__init__(opt)

        self.embed = nn.Embedding.from_pretrained(torch.tensor(embedding_matrix, dtype=torch.float))
        self.ctx_lstm = DynamicLSTM(opt.embed_dim, opt.hidden_dim, num_layers=1, batch_first=True, bidirectional=True)
        self.asp_lstm = DynamicLSTM(opt.embed_dim, opt.hidden_dim, num_layers=1, batch_first=True, bidirectional=True)
        self.w_a2c = nn.Parameter(torch.Tensor(2 * opt.hidden_dim, 2 * opt.hidden_dim))
        self.w_c2a = nn.Parameter(torch.Tensor(2 * opt.hidden_dim, 2 * opt.hidden_dim))
        self.alignment = AlignmentMatrix(opt)
Ejemplo n.º 2
0
    def __init__(self, embedding_matrix, opt):
        super(AOADPL, self).__init__(opt)

        self.embed = nn.Embedding.from_pretrained(
            torch.tensor(embedding_matrix, dtype=torch.float))
        self.ctx_lstm = DynamicLSTM(opt.embed_dim,
                                    opt.hidden_dim,
                                    num_layers=1,
                                    batch_first=True,
                                    bidirectional=True)
        self.asp_lstm = DynamicLSTM(opt.embed_dim,
                                    opt.hidden_dim,
                                    num_layers=1,
                                    batch_first=True,
                                    bidirectional=True)
Ejemplo n.º 3
0
    def __init__(self, opt, fixed_dim):
        super(FixedSALLayer, self).__init__(opt)

        self.agg_lstm = DynamicLSTM(fixed_dim,
                                    fixed_dim,
                                    num_layers=1,
                                    batch_first=True)
Ejemplo n.º 4
0
    def __init__(self, opt, times_hidden=1):
        super(NormalSALLayer, self).__init__(opt)

        self.agg_lstm = DynamicLSTM(times_hidden * opt.hidden_dim,
                                    opt.hidden_dim,
                                    num_layers=1,
                                    batch_first=True)
Ejemplo n.º 5
0
    def __init__(self, embedding_matrix, opt):
        super(IANDPL, self).__init__(opt)

        self.embed = nn.Embedding.from_pretrained(
            torch.tensor(embedding_matrix, dtype=torch.float))
        self.lstm_context = DynamicLSTM(opt.embed_dim,
                                        opt.hidden_dim,
                                        num_layers=1,
                                        batch_first=True)
        self.lstm_aspect = DynamicLSTM(opt.embed_dim,
                                       opt.hidden_dim,
                                       num_layers=1,
                                       batch_first=True)
        self.attention_context = Attention(opt.hidden_dim,
                                           score_function='bi_linear')
        self.attention_aspect = Attention(opt.hidden_dim,
                                          score_function='bi_linear')
Ejemplo n.º 6
0
    def __init__(self, embedding_matrix, opt):
        super(RAMDPL, self).__init__(opt)

        self.embed = nn.Embedding.from_pretrained(torch.tensor(embedding_matrix, dtype=torch.float))
        self.bi_lstm_context = DynamicLSTM(opt.embed_dim, opt.hidden_dim, num_layers=1, batch_first=True,
                                           bidirectional=True)
        self.att_linear = nn.Linear(opt.hidden_dim * 2 + 1 + opt.embed_dim * 2, 1)
        self.gru_cell = nn.GRUCell(opt.hidden_dim * 2 + 1, opt.hidden_dim)
Ejemplo n.º 7
0
    def __init__(self, embedding_matrix, opt):
        super(TNETDPL, self).__init__(opt)

        self.embed = nn.Embedding.from_pretrained(
            torch.tensor(embedding_matrix, dtype=torch.float))

        self.lstm1 = DynamicLSTM(opt.embed_dim,
                                 opt.hidden_dim,
                                 num_layers=1,
                                 batch_first=True,
                                 bidirectional=True)
        self.lstm2 = DynamicLSTM(opt.embed_dim,
                                 opt.hidden_dim,
                                 num_layers=1,
                                 batch_first=True,
                                 bidirectional=True)
        self.convs3 = nn.Conv1d(2 * opt.hidden_dim, 50, 3, padding=1)
        self.fc1 = nn.Linear(4 * opt.hidden_dim, 2 * opt.hidden_dim)
Ejemplo n.º 8
0
    def __init__(self, embedding_matrix, opt):
        super(BaselineDPL, self).__init__(opt)

        self.embed = nn.Embedding.from_pretrained(
            torch.tensor(embedding_matrix, dtype=torch.float))
        self.squeeze_embed = SqueezeEmbedding()
        self.slice_lstm = DynamicLSTM(opt.embed_dim * 2,
                                      opt.hidden_dim,
                                      num_layers=1,
                                      batch_first=True)
Ejemplo n.º 9
0
    def __init__(self, embedding_matrix, opt):
        super(ATAEDPL, self).__init__(opt)

        self.embed = nn.Embedding.from_pretrained(
            torch.tensor(embedding_matrix, dtype=torch.float))
        self.squeeze_embed = SqueezeEmbedding()
        self.lstm = DynamicLSTM(opt.embed_dim * 2,
                                opt.hidden_dim,
                                num_layers=1,
                                batch_first=True)
        self.attention = NoQueryAttention(opt.hidden_dim + opt.embed_dim,
                                          score_function='bi_linear')