def __init__(self, config): super().__init__(config) self.embeddings = BertEmbeddings(config) self.encoder = BertEncoder(config) # self.apply(self.init_weights) # old versions of pytorch_transformers self.init_weights()
def __init__(self, config, depth=None): super(CustomBertModel, self).__init__(config) self.depth = depth self.embeddings = BertEmbeddings(config) self.encoder = BertEncoder(config) self.cls = BertPreTrainingHeads(config) self.apply(self.init_weights)
def __init__(self, config, args): super(BERT_MAG_model, self).__init__(config) self.newly_added_config = args if args.output_mode == 'regression': self.num_labels = 1 #BertEncoder self.output_attentions = self.config.output_attentions self.output_hidden_states = self.config.output_hidden_states self.layer = nn.ModuleList([ BertLayer(self.config) for _ in range(self.config.num_hidden_layers) ]) self.MAG = MAG(self.config, args) self.MAG_all = nn.ModuleList([ MAG(self.config, args) for _ in range(self.config.num_hidden_layers) ]) # MultimodalBertModel self.embeddings = BertEmbeddings(self.config) self.pooler = BertPooler(self.config) # MultimodalBertForSequenceClassification self.classifier = nn.Linear(self.config.hidden_size, self.num_labels) self.dropout = nn.Dropout(args["hidden_dropout_prob"]) self.apply(self.init_weights)
def __init__(self, config): super(BertModel, self).__init__(config) self.embeddings = BertEmbeddings(config) self.encoder = BertEncoder(config) self.pooler = BertPooler(config) self.apply(self.init_weights)