Example #1
0
    def __init__(self,
                 config,
                 d_in_vocabs,
                 d_out_vocab,
                 device=None,
                 **kwargs):
        super(DeepTyper, self).__init__(config, d_in_vocabs, d_out_vocab)
        self.device = device

        self.lstm_inner = RNNFull(
            config.d_model,
            config.bidirectional,
            config.num_layers,
            config.dropout,
            config.bptt_len,
            device,
        )

        self.lstm_outer = RNNFull(
            config.d_model,
            config.bidirectional,
            config.num_layers,
            config.dropout,
            config.bptt_len,
            device,
        )

        self.layer_norm = LayerNorm(config.d_model)
Example #2
0
    def __init__(self,
                 config,
                 d_in_vocabs,
                 d_out_vocab,
                 device=None,
                 **kwargs):
        super(RNN, self).__init__(config.d_model,
                                  d_out_vocab,
                                  softmax_type=config.softmax_type)
        self.device = device
        self.hidden = None

        if config.dot_product_embedding:
            self.embeddings = DotProductEmbedding(d_in_vocabs,
                                                  config.d_model,
                                                  type="sum")
        else:
            self.embeddings = MultiEmbedding(d_in_vocabs,
                                             config.d_model,
                                             type="sum")

        # if this class is used with SequenceIterator then batch_first = False
        self.batch_first = False
        self.lstm = nn.LSTM(
            config.d_model,
            config.d_model if not config.bidirectional else config.d_model //
            2,
            num_layers=config.num_layers,
            dropout=config.dropout,
            bidirectional=config.bidirectional,
            batch_first=self.batch_first,
        )

        self.layer_norm = LayerNorm(config.d_model)
        self.bptt_len = config.bptt_len
Example #3
0
    def __init__(self, d_in_vocabs, d_embedding, scaled=False, type="concat"):
        super(MultiEmbedding, self).__init__()
        if not isinstance(d_in_vocabs, list):
            d_in_vocabs = [d_in_vocabs]

        self.factor = len(d_in_vocabs) if type == "concat" else 1
        # emb_cls = nn.Embedding if not delta_grad else EmbeddingWithGrad
        self.embeddings = nn.ModuleList(
            [
                nn.Embedding(d_vocab, d_embedding // self.factor)
                for d_vocab in d_in_vocabs
            ]
        )

        self.norm = LayerNorm(d_embedding)
        self.s_factor = math.sqrt(d_embedding // len(d_in_vocabs)) if scaled else 1
        self.type = type
        self.d_in_vocabs = d_in_vocabs
Example #4
0
    def __init__(self, d_model, bidirectional, num_layers, dropout, bptt_len,
                 device):
        super(RNNFull, self).__init__()

        self.batch_first = True
        self.lstm = nn.LSTM(
            d_model,
            d_model if not bidirectional else d_model // 2,
            num_layers=num_layers,
            dropout=dropout,
            bidirectional=bidirectional,
            batch_first=self.batch_first,
        )
        self.layer_norm = LayerNorm(d_model)

        self.d_model = d_model
        self.bptt_len = bptt_len
        self.device = device

        self.hidden = None
Example #5
0
 def __init__(self, layer):
     super(UEncoder, self).__init__()
     self.layer = layer
     self.norm = LayerNorm(layer.size)
Example #6
0
 def __init__(self, dim_model):
     super(HaltingUnit, self).__init__()
     self.linear = nn.Linear(dim_model, 1)
     self.norm = LayerNorm(dim_model)
     self.reset_parameters()
Example #7
0
 def __init__(self, layer, N):
     super(Decoder, self).__init__()
     self.layers = clones(layer, N)
     self.norm = LayerNorm(layer.size)
Example #8
0
 def __init__(self, size, dropout):
     super(SublayerConnection, self).__init__()
     self.norm = LayerNorm(size)
     self.dropout = nn.Dropout(dropout)