Ejemplo n.º 1
0
    def __init__(self, embed_dim, model_dim, rnn_num_layer, dropout,
                 column_maxlen):
        super(CondsNumPredictor, self).__init__()

        self.model_dim = model_dim
        self.column_maxlen = column_maxlen

        self.column_rnn = nn.LSTM(
            input_size=embed_dim,
            hidden_size=model_dim // 2,
            num_layers=rnn_num_layer,
            batch_first=True,
            dropout=dropout,
            bidirectional=True,
        )
        self.column_seq_attn = attention.LinearSeqAttn(model_dim)
        self.column_to_hidden_state = nn.Linear(model_dim, 2 * model_dim)
        self.column_to_cell_state = nn.Linear(model_dim, 2 * model_dim)

        self.question_rnn = nn.LSTM(
            input_size=embed_dim,
            hidden_size=model_dim // 2,
            num_layers=rnn_num_layer,
            batch_first=True,
            dropout=dropout,
            bidirectional=True,
        )
        self.question_seq_attn = attention.LinearSeqAttn(model_dim)

        self.mlp = nn.Sequential(nn.Linear(model_dim, model_dim), nn.Tanh(),
                                 nn.Linear(model_dim, column_maxlen + 1))
Ejemplo n.º 2
0
    def __init__(self,
                 embed_dim,
                 model_dim,
                 rnn_num_layer,
                 dropout,
                 column_attention=None):
        super(CondsColPredictor, self).__init__()
        self.column_attention = column_attention

        self.question_rnn = nn.LSTM(
            input_size=embed_dim,
            hidden_size=model_dim // 2,
            num_layers=rnn_num_layer,
            batch_first=True,
            dropout=dropout,
            bidirectional=True,
        )

        if column_attention:
            self.linear_attn = nn.Linear(model_dim, model_dim)
        else:
            self.seq_attn = attention.LinearSeqAttn(model_dim)

        self.column_rnn = nn.LSTM(
            input_size=embed_dim,
            hidden_size=model_dim // 2,
            num_layers=rnn_num_layer,
            batch_first=True,
            dropout=dropout,
            bidirectional=True,
        )

        self.linear_question = nn.Linear(model_dim, model_dim)
        self.linear_column = nn.Linear(model_dim, model_dim)
        self.mlp = nn.Sequential(nn.ReLU(), nn.Linear(model_dim, 1))
Ejemplo n.º 3
0
    def __init__(self, embed_dim, model_dim, rnn_num_layer, dropout,
                 agg_count):
        super(AggPredictor, self).__init__()

        self.question_rnn = nn.LSTM(
            input_size=embed_dim,
            hidden_size=model_dim // 2,
            num_layers=rnn_num_layer,
            batch_first=True,
            dropout=dropout,
            bidirectional=True,
        )
        self.seq_attn = attention.LinearSeqAttn(model_dim)
        self.mlp = nn.Sequential(nn.Linear(model_dim, model_dim), nn.Tanh(),
                                 nn.Linear(model_dim, agg_count))
Ejemplo n.º 4
0
    def __init__(
        self,
        token_embedder,
        lang_code="en",
        aligned_query_embedding=False,
        answer_maxlen=None,
        model_dim=128,
        dropout=0.3,
    ):
        super(DrQA, self).__init__(token_embedder)

        self.lang_code = lang_code
        self.aligned_query_embedding = aligned_query_embedding
        self.answer_maxlen = answer_maxlen
        self.token_embedder = token_embedder
        self.dropout = nn.Dropout(p=dropout)

        context_embed_dim, query_embed_dim = token_embedder.get_embed_dim()
        if self.aligned_query_embedding:
            context_embed_dim += query_embed_dim

        self.paragraph_rnn = nn.LSTM(
            input_size=context_embed_dim,
            hidden_size=model_dim,
            num_layers=3,
            dropout=dropout,
            bidirectional=True,
            batch_first=True,
        )

        self.query_rnn = nn.LSTM(
            input_size=query_embed_dim,
            hidden_size=model_dim,
            num_layers=3,
            dropout=dropout,
            bidirectional=True,
            batch_first=True,
        )

        self.query_att = attention.LinearSeqAttn(model_dim * 2)

        self.start_attn = attention.BilinearSeqAttn(model_dim * 2,
                                                    model_dim * 2)
        self.end_attn = attention.BilinearSeqAttn(model_dim * 2, model_dim * 2)

        self.criterion = nn.CrossEntropyLoss()
Ejemplo n.º 5
0
    def __init__(self, embed_dim, model_dim, rnn_num_layer, dropout,
                 column_maxlen, token_maxlen):
        super(CondsValuePointer, self).__init__()

        self.model_dim = model_dim
        self.column_maxlen = column_maxlen
        self.token_maxlen = token_maxlen

        self.question_rnn = nn.LSTM(
            input_size=embed_dim,
            hidden_size=model_dim // 2,
            num_layers=rnn_num_layer,
            batch_first=True,
            dropout=dropout,
            bidirectional=True,
        )
        self.seq_attn = attention.LinearSeqAttn(model_dim)

        self.column_rnn = nn.LSTM(
            input_size=embed_dim,
            hidden_size=model_dim // 2,
            num_layers=rnn_num_layer,
            batch_first=True,
            dropout=dropout,
            bidirectional=True,
        )

        self.decoder = nn.LSTM(
            input_size=self.token_maxlen,
            hidden_size=model_dim,
            num_layers=rnn_num_layer,
            batch_first=True,
            dropout=dropout,
        )

        self.linear_column = nn.Linear(model_dim, model_dim)
        self.linear_conds = nn.Linear(model_dim, model_dim)
        self.linear_question = nn.Linear(model_dim, model_dim)
        self.mlp = nn.Sequential(nn.ReLU(), nn.Linear(model_dim, 1))