Exemplo n.º 1
0
    def __init__(self, hyperParams):
        super(Encoder, self).__init__()
        self.hyperParams = hyperParams
        if hyperParams.charEmbFile == "":
            self.charEmb = nn.Embedding(hyperParams.charNUM,
                                        hyperParams.charEmbSize)
            self.charDim = hyperParams.charEmbSize
        else:
            reader = Reader()
            self.charEmb, self.charDim = reader.load_pretrain(
                hyperParams.charEmbFile, hyperParams.charAlpha,
                hyperParams.unk)
        self.charEmb.weight.requires_grad = hyperParams.charFineTune

        if hyperParams.bicharEmbFile == "":
            self.bicharEmb = nn.Embedding(hyperParams.bicharNUM,
                                          hyperParams.bicharEmbSize)
            self.bicharDim = hyperParams.bicharEmbSize
        else:
            reader = Reader()
            self.bicharEmb, self.bicharDim = reader.load_pretrain(
                hyperParams.bicharEmbFile, hyperParams.bicharAlpha,
                hyperParams.unk)
        self.bicharEmb.weight.requires_grad = hyperParams.bicharFineTune

        self.dropOut = nn.Dropout(hyperParams.dropProb)
        self.bilstm = nn.LSTM(input_size=self.charDim + self.bicharDim,
                              hidden_size=hyperParams.rnnHiddenSize,
                              batch_first=True,
                              bidirectional=True,
                              num_layers=2,
                              dropout=hyperParams.dropProb)
Exemplo n.º 2
0
    def __init__(self, hyperParams):
        super(Decoder, self).__init__()

        reader = Reader()
        self.wordEmb, self.wordDim = reader.load_pretrain(
            hyperParams.wordEmbFile, hyperParams.wordAlpha, hyperParams.unk)

        self.wordEmb.weight.requires_grad = hyperParams.wordFineTune
        self.dropOut = torch.nn.Dropout(hyperParams.dropProb)
        self.lastWords = []
        self.hyperParams = hyperParams
        #self.linearLayer = nn.Linear(hyperParams.rnnHiddenSize * 2, hyperParams.labelSize)

        self.linearLayer = nn.Linear(
            hyperParams.rnnHiddenSize * 2 + self.wordDim,
            hyperParams.labelSize)

        self.softmax = nn.LogSoftmax()
Exemplo n.º 3
0
 def __init__(self, hyperParams):
     super(Encoder, self).__init__()
     self.hyperParams = hyperParams
     if hyperParams.wordEmbFile == "":
         self.wordEmb = nn.Embedding(hyperParams.postWordNum,
                                     hyperParams.wordEmbSize)
         self.wordDim = hyperParams.wordEmbSize
     else:
         reader = Reader()
         self.wordEmb, self.wordDim = reader.load_pretrain(
             hyperParams.wordEmbFile, hyperParams.postWordAlpha,
             hyperParams.unk)
     self.wordEmb.weight.requires_grad = hyperParams.wordFineTune
     self.dropOut = nn.Dropout(hyperParams.dropProb)
     self.gru = nn.GRU(input_size=self.wordDim,
                       hidden_size=hyperParams.rnnHiddenSize,
                       batch_first=True,
                       dropout=hyperParams.dropProb)