def __init__(self, args, emb_layer, nclasses=2): super(Model, self).__init__() self.args = args self.drop = nn.Dropout(args.dropout) self.emb_layer = emb_layer if args.cnn: self.encoder = modules.CNN_Text( emb_layer.n_d, widths = [3,4,5] ) d_out = 300 elif args.lstm: self.encoder = nn.LSTM( emb_layer.n_d, args.d, args.depth, dropout = args.dropout, ) d_out = args.d else: self.encoder = MF.SRU( emb_layer.n_d, args.d, args.depth, dropout = args.dropout, use_tanh = 1, ) d_out = args.d self.out = nn.Linear(d_out, nclasses)
def __init__(self, embedding, hidden_size=150, depth=1, dropout=0.3, cnn=False, nclasses=2): super(Model, self).__init__() self.cnn = cnn self.drop = nn.Dropout(dropout) self.emb_layer = modules.EmbeddingLayer( embs=dataloader.load_embedding(embedding)) self.word2id = self.emb_layer.word2id if cnn: self.encoder = modules.CNN_Text(self.emb_layer.n_d, widths=[3, 4, 5], filters=hidden_size) d_out = 3 * hidden_size else: self.encoder = nn.LSTM( self.emb_layer.n_d, hidden_size // 2, depth, dropout=dropout, # batch_first=True, bidirectional=True) d_out = hidden_size # else: # self.encoder = SRU( # emb_layer.n_d, # args.d, # args.depth, # dropout = args.dropout, # ) # d_out = args.d self.out = nn.Linear(d_out, nclasses)