Exemplo n.º 1
0
    def __init__(self, memory_size: int, answer_size: int, num_labels: int):
        super(OutputModule, self).__init__()
        self.memory_size = memory_size
        self.answer_size = answer_size

        self.combination = torch.nn.Linear(answer_size, memory_size)
        initalise_weights(xavier_normal_, self.combination)
Exemplo n.º 2
0
    def __init__(self,
                 word_embeddings: TextFieldEmbedder,
                 encoder: Seq2VecEncoder,
                 embedding_dropout: float = 0.5,
                 encoder_dropout: float = 0.5):
        super(AnswerModule, self).__init__()

        self.word_embeddings = word_embeddings
        self.encoder = encoder
        self.embedding_dropout = torch.nn.Dropout(embedding_dropout)
        self.encoder_dropout = torch.nn.Dropout(encoder_dropout)

        initalise_weights(xavier_normal_, self.encoder)
Exemplo n.º 3
0
    def __init__(self,
                 word_embeddings: TextFieldEmbedder,
                 sentence_encoder: Seq2VecEncoder,
                 document_encoder: Seq2VecEncoder,
                 embedding_dropout: float = 0.5,
                 encoder_dropout: float = 0.5):
        super(InputModule, self).__init__()

        self.word_embeddings = word_embeddings
        self.sentence_encoder = sentence_encoder
        self.document_encoder = document_encoder

        self.embedding_dropout = torch.nn.Dropout(embedding_dropout)
        self.encoder_dropout = torch.nn.Dropout(encoder_dropout)

        initalise_weights(xavier_normal_, self.sentence_encoder)
        initalise_weights(xavier_normal_, self.document_encoder)
Exemplo n.º 4
0
    def __init__(self, hidden_dim: int, num_hops: int, dropout: float = 0.5):
        super(MemoryModule, self).__init__()

        self.hidden_dim = hidden_dim
        self.attention_gru = AttentionGRU(hidden_dim, hidden_dim)

        self.gate_nn = torch.nn.Sequential(
            torch.nn.Linear(8 * hidden_dim, hidden_dim),
            torch.nn.Tanh(),
            torch.nn.Linear(hidden_dim, 1, bias=False),
        )

        memory = torch.nn.Sequential(
            torch.nn.Linear(4 * hidden_dim, hidden_dim),
            torch.nn.ReLU(inplace=True),
        )
        self.memories = clone_module(memory, num_hops)

        self.dropout = torch.nn.Dropout(dropout)

        initalise_weights(xavier_normal_, self.attention_gru)
        initalise_weights(xavier_normal_, self.gate_nn)
        for memory in self.memories:
            initalise_weights(xavier_normal_, memory)