def __init__(self): super().__init__() config = BertConfig() config.output_hidden_states = True self.bert = BertModel.from_pretrained('bert-base-uncased', config=config) self.bertTokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
def __init__(self): super().__init__() config = BertConfig() config.output_hidden_states = True self.bert = BertModel.from_pretrained('bert-base-uncased', config=config) self.bertTokenizer = BertTokenizer.from_pretrained('bert-base-uncased') self.ninput = 768 self.nlayers = 1 self.bidirectional = True self.num_directions = 2 if self.bidirectional else 1 self.nhidden = 256 // self.num_directions self.drop_prob = 0.5 self.rnn = nn.LSTM(self.ninput, self.nhidden, self.nlayers, batch_first=True, dropout=self.drop_prob, bidirectional=self.bidirectional)