示例#1
0
 def __init__(self, model_name_or_path, dropout=0.1):
     super(MyModel, self).__init__()
     self.bert = BertModel.from_pretrained(model_name_or_path)
     for param in self.bert.parameters():
         param.requires_grad = True
     self.linear1 = nn.Linear(768, 768)
     self.linear2 = nn.Linear(768, 1)
     self.dropout = dropout
示例#2
0
    def __init__(self, model_name_or_path, dropout=0.1):
        super().__init__()
        # self.bert = AutoModel.from_pretrained(model_name_or_path)
        self.bert = BertModel.from_pretrained(model_name_or_path)

        # self.bert.requires_grad_(True)
        for param in self.bert.parameters():
            param.requires_grad = True
        self.dropout = nn.Dropout(dropout)
        self.linear = nn.Linear(768, 1)
        self.n_class = 4
示例#3
0
 def __init__(self,
              model_name_or_path,
              hidden_size=768,
              dropout=0.1,
              num_choices=4):
     super(BertForMultipleChoiceWithMatch, self).__init__()
     self.num_choices = num_choices
     self.bert = BertModel.from_pretrained(model_name_or_path)
     self.dropout = nn.Dropout(dropout)
     # self.classifier = nn.Linear(hidden_size, 1)
     # self.classifier2 = nn.Linear(2 * hidden_size, 1)
     self.classifier3 = nn.Linear(3 * hidden_size, 1)
     # self.classifier4 = nn.Linear(4 * hidden_size, 1)
     # self.classifier6 = nn.Linear(6 * hidden_size, 1)
     self.ssmatch = SSingleMatchNet(hidden_size, dropout)
     self.fuse = FuseNet(hidden_size)
示例#4
0
import torch
from transformers.models.bert import BertModel, BertTokenizer

model_name = '/data/project/learn_code/data/chinese-bert-wwm-ext/'
# 读取模型对应的tokenizer
tokenizer = BertTokenizer.from_pretrained(model_name)
# 载入模型
model = BertModel.from_pretrained(model_name)
# 输入文本
# input_text = "Here is some text to encode"
input_text = "今天天气很好啊,你好吗"
# 通过tokenizer把文本变成 token_id
input_ids = tokenizer.encode(input_text, add_special_tokens=True)
print(len(input_ids))
# input_ids: [101, 2182, 2003, 2070, 3793, 2000, 4372, 16044, 102]
input_ids = torch.tensor([input_ids])
# 获得BERT模型最后一个隐层结果
print(input_ids.shape)
with torch.no_grad():
    last_hidden_states = model(input_ids)[0]
    print(last_hidden_states)
    print(last_hidden_states.shape)
示例#5
0
 def from_tinybert(cls):
     config = TinyBertConfig()
     tokenizer = BertTokenizerFast.from_pretrained(config.name)
     q_encoder = BertModel.from_pretrained(config.name)
     c_encoder = BertModel.from_pretrained(config.name)
     return cls(config, tokenizer, q_encoder, c_encoder)