Exemplo n.º 1
0
    def __init__(self, embedding_matrix, opt):
        super(LCF_GLOVE, self).__init__()
        self.config = BertConfig.from_json_file("config.json")
        self.opt = opt
        self.embed = nn.Embedding.from_pretrained(
            torch.tensor(embedding_matrix, dtype=torch.float))
        self.mha_global = SelfAttention(self.config, opt)
        self.mha_local = SelfAttention(self.config, opt)
        self.ffn_global = PositionwiseFeedForward(self.opt.hidden_dim,
                                                  dropout=self.opt.dropout)
        self.ffn_local = PositionwiseFeedForward(self.opt.hidden_dim,
                                                 dropout=self.opt.dropout)
        self.mha_local_SA = SelfAttention(self.config, opt)
        self.mha_global_SA = SelfAttention(self.config, opt)
        self.mha_SA_single = SelfAttention(self.config, opt)
        self.bert_pooler = BertPooler(self.config)

        self.bert_pooler1 = BertPooler(self.config)
        self.bert_pooler2 = BertPooler(self.config)
        self.dense1 = nn.Linear(opt.hidden_dim, opt.polarities_dim)
        self.dense2 = nn.Linear(opt.hidden_dim, opt.polarities_dim)
        self.sentiment_pool = nn.Linear(6, 3)

        self.dropout = nn.Dropout(opt.dropout)
        self.mean_pooling_double = nn.Linear(opt.embed_dim * 2, opt.hidden_dim)
        self.mean_pooling_single = nn.Linear(opt.embed_dim, opt.hidden_dim)
        self.dense = nn.Linear(opt.hidden_dim, opt.polarities_dim)
Exemplo n.º 2
0
 def __init__(self, bert, opt):
     super(BERT_BASE, self).__init__()
     self.bert = bert
     self.opt = opt
     self.dropout = nn.Dropout(opt.dropout)
     self.pooler = BertPooler(bert.config)
     self.dense = nn.Linear(opt.embed_dim, opt.polarities_dim)
Exemplo n.º 3
0
 def __init__(self, config):
     super(PatientLevelBert, self).__init__()
     self.config = config
     self.embeddings = PatientLevelEmbedding(config)
     self.encoder = BertEncoder(config)
     self.pooler = BertPooler(config)
     self.apply(self.init_weights)
Exemplo n.º 4
0
    def __init__(self, model, opt):
        super(LCFS_BERT, self).__init__()
        if 'bert' in opt.pretrained_bert_name:
            hidden = model.config.hidden_size
        elif 'xlnet' in opt.pretrained_bert_name:
            hidden = model.config.d_model
        self.hidden = hidden
        sa_config = BertConfig(hidden_size=self.hidden, output_attentions=True)

        self.bert_spc = model
        self.bert_g_sa = SelfAttention(sa_config, opt)
        self.bert_g_pct = PointwiseFeedForward(self.hidden)

        self.opt = opt
        self.bert_local = copy.deepcopy(model)
        self.bert_local_sa = SelfAttention(sa_config, opt)
        self.bert_local_pct = PointwiseFeedForward(self.hidden)

        self.dropout = nn.Dropout(opt.dropout)
        self.bert_sa = SelfAttention(sa_config, opt)

        # self.mean_pooling_double = nn.Linear(hidden * 2, hidden)
        self.mean_pooling_double = PointwiseFeedForward(
            hidden * 2, hidden, hidden)
        self.bert_pooler = BertPooler(sa_config)
        self.dense = nn.Linear(hidden, opt.polarities_dim)
Exemplo n.º 5
0
    def __init__(self, config, args):
        super(BERT_MAG_model, self).__init__(config)
        self.newly_added_config = args
        if args.output_mode == 'regression':
            self.num_labels = 1
        #BertEncoder
        self.output_attentions = self.config.output_attentions
        self.output_hidden_states = self.config.output_hidden_states
        self.layer = nn.ModuleList([
            BertLayer(self.config)
            for _ in range(self.config.num_hidden_layers)
        ])
        self.MAG = MAG(self.config, args)
        self.MAG_all = nn.ModuleList([
            MAG(self.config, args)
            for _ in range(self.config.num_hidden_layers)
        ])

        # MultimodalBertModel
        self.embeddings = BertEmbeddings(self.config)
        self.pooler = BertPooler(self.config)

        # MultimodalBertForSequenceClassification
        self.classifier = nn.Linear(self.config.hidden_size, self.num_labels)
        self.dropout = nn.Dropout(args["hidden_dropout_prob"])
        self.apply(self.init_weights)
Exemplo n.º 6
0
    def __init__(self, bert, opt):
        super(LCF_BERT_HAT, self).__init__()

        self.bert_spc = bert
        self.opt = opt
        # self.bert_local = copy.deepcopy(bert)  # Uncomment the line to use dual Bert
        self.bert_local = bert  # Default to use single Bert and reduce memory requirements
        self.dropout = nn.Dropout(opt.dropout)

        self.bert_SA = SelfAttention(bert.config, opt)
        self.linear_double = nn.Linear(opt.bert_dim * 2, opt.bert_dim)
        #self.linear_single = nn.Linear(opt.bert_dim, opt.bert_dim)
        self.bert_pooler = BertPooler(bert.config)
        #self.dense =

        self.last = torch.nn.ModuleList()
        for t in range(self.opt.taskcla):
            self.last.append(nn.Linear(opt.bert_dim, opt.polarities_dim))

        self.gate = torch.nn.Sigmoid()

        self.ec1 = torch.nn.Embedding(self.opt.taskcla, opt.bert_dim)
        self.ec2 = torch.nn.Embedding(self.opt.taskcla,
                                      bert.config.hidden_size)
        self.ec3 = torch.nn.Embedding(self.opt.taskcla,
                                      bert.config.hidden_size)

        self.hat = True
    def __init__(self, config):
        super(BertModelDialog, self).__init__(config)

        self.embeddings = BertEmbeddingsDialog(config)
        self.encoder = BertEncoder(config)
        self.pooler = BertPooler(config)
        self.init_weights()
Exemplo n.º 8
0
    def __init__(self, bert, opt, is_global_configuration=False):
        super(LCF_BERT, self).__init__()

        self.bert_spc = bert
        self.opt = opt
        self.is_global_configuration = is_global_configuration

        # self.bert_local = copy.deepcopy(bert)  # Uncomment the line to use dual Bert
        self.bert_local = (
            bert  # Default to use single Bert and reduce memory requirements
        )
        self.dropout = nn.Dropout(self.opt.dropout)
        self.bert_SA = SelfAttention(bert.config, self.opt)
        self.linear_double = nn.Linear(self.opt.bert_dim * 2,
                                       self.opt.bert_dim)
        self.linear_single = nn.Linear(self.opt.bert_dim, self.opt.bert_dim)
        self.bert_pooler = BertPooler(bert.config)

        if self.opt.use_global_context:
            self.gc_bert = bert
            self.linear_gc_merger = nn.Linear(
                self.opt.bert_dim * self.opt.global_context_seqs_per_doc,
                self.opt.bert_dim,
            )
            self.linear_lcf_and_gc_merger = nn.Linear(self.opt.bert_dim * 2,
                                                      self.opt.bert_dim)

        if not self.is_global_configuration:
            self.dense = nn.Linear(self.opt.bert_dim, self.opt.polarities_dim)
Exemplo n.º 9
0
    def __init__(self, config):
        super(BertModel, self).__init__(config)

        self.embeddings = BertEmbeddings(config)
        self.encoder = BertEncoder(config)
        self.pooler = BertPooler(config)

        self.apply(self.init_weights)
Exemplo n.º 10
0
 def __init__(self, bert, opt):
     super(LCF_BERT, self).__init__()
     self.bert4global = bert
     self.bert4local = copy.deepcopy(bert) if opt.use_dual_bert else self.bert4global
     self.opt = opt
     self.dropout = nn.Dropout(opt.dropout)
     self.bert_SA = SelfAttention(bert.config, opt)
     self.linear2 = nn.Linear(opt.embed_dim * 2, opt.embed_dim)
     self.linear3 = nn.Linear(opt.embed_dim * 3, opt.embed_dim)
     self.bert_pooler = BertPooler(bert.config)
     self.dense = nn.Linear(opt.embed_dim, opt.polarities_dim)
Exemplo n.º 11
0
 def __init__(self, bert, opt):
     super(LCF_BERT, self).__init__()
     self.bert_global_focus = bert
     self.bert_local_focus = copy.deepcopy(bert) if opt.use_single_bert else bert
     self.opt = opt
     self.dropout = nn.Dropout(opt.dropout)
     self.bert_SA = SelfAttention(bert.config, opt)
     self.linear_double_cdm_or_cdw = nn.Linear(opt.bert_dim * 2, opt.bert_dim)
     self.linear_triple_lcf_global = nn.Linear(opt.bert_dim * 3, opt.bert_dim)
     self.bert_pooler = BertPooler(bert.config)
     self.dense = nn.Linear(opt.bert_dim, opt.polarities_dim)
Exemplo n.º 12
0
    def __init__(self, bert, opt):
        super(LCF_BERT, self).__init__()

        self.bert_spc = bert
        self.opt = opt
        # self.bert_local = copy.deepcopy(bert)  # Uncomment the line to use dual Bert
        self.bert_local = bert  # Default to use single Bert and reduce memory requirements
        self.dropout = nn.Dropout(opt.dropout)
        self.bert_SA = SelfAttention(bert.config, opt)
        self.linear_double = nn.Linear(opt.bert_dim * 2, opt.bert_dim)
        self.linear_single = nn.Linear(opt.bert_dim, opt.bert_dim)
        self.bert_pooler = BertPooler(bert.config)
        self.dense = nn.Linear(opt.bert_dim, opt.polarities_dim)
Exemplo n.º 13
0
 def __init__(self, bert_base_model, args):
     super(LCF_ATEPC_Chinese, self).__init__(config=bert_base_model.config)
     config = bert_base_model.config
     self.bert = bert_base_model
     self.args = args
     if self.args.local_context_focus is not None:
         self.local_bert = copy.deepcopy(self.bert)
     self.pooler = BertPooler(config)
     self.dense = torch.nn.Linear(768, 2)
     self.bert_global_focus = self.bert
     self.dropout = nn.Dropout(self.args.dropout)
     self.bert_SA = SelfAttention(config, args)
     self.linear_double = nn.Linear(768 * 2, 768)
     self.linear_triple = nn.Linear(768 * 3, 768)
Exemplo n.º 14
0
 def __init__(self, bert_base_model):
     super(LCF_ATEPC, self).__init__(config=bert_base_model.config)
     config = bert_base_model.config
     self.device = torch.device(
         "cuda:1" if torch.cuda.is_available() else 'cpu')
     self.bert = bert_base_model
     # do not init lcf layer if BERT-SPC or BERT-BASE specified
     self.local_bert = copy.deepcopy(self.bert)
     self.pooler = BertPooler(config)
     self.dense = torch.nn.Linear(768, 2)
     self.bert_global_focus = self.bert
     self.bert_SA = SelfAttention(config)
     self.dropout = torch.nn.Dropout(0.0)
     self.linear_double = nn.Linear(768 * 2, 768)
     self.linear_triple = nn.Linear(768 * 3, 768)
Exemplo n.º 15
0
 def __init__(self, embedding_matrix, opt):
     super(LCF_GLOVE, self).__init__()
     self.config = BertConfig.from_json_file("utils/bert_config.json")
     self.opt = opt
     self.embed = nn.Embedding.from_pretrained(torch.tensor(embedding_matrix, dtype=torch.float))
     self.mha_global = SelfAttention(self.config, opt)
     self.mha_local = SelfAttention(self.config, opt)
     self.ffn_global = PositionwiseFeedForward(self.opt.embed_dim, dropout=self.opt.dropout)
     self.ffn_local = PositionwiseFeedForward(self.opt.embed_dim, dropout=self.opt.dropout)
     self.mha_local_SA = SelfAttention(self.config, opt)
     self.mha_global_SA = SelfAttention(self.config, opt)
     self.pool = BertPooler(self.config)
     self.dropout = nn.Dropout(opt.dropout)
     self.linear = nn.Linear(opt.embed_dim * 2, opt.embed_dim)
     self.dense = nn.Linear(opt.embed_dim, opt.polarities_dim)
Exemplo n.º 16
0
 def __init__(self, bert, opt):
     super(LCA_BERT, self).__init__()
     self.bert4global = bert
     self.bert4local = copy.deepcopy(bert) if opt.use_dual_bert else self.bert4global
     self.lc_embed = nn.Embedding(opt.max_seq_len, opt.embed_dim)
     self.opt = opt
     self.dropout = nn.Dropout(opt.dropout)
     self.bert_SA_L = SelfAttention(bert.config, opt)
     self.bert_SA_G = SelfAttention(bert.config, opt)
     self.linear = nn.Linear(opt.embed_dim * 2, opt.embed_dim)
     self.pool = BertPooler(bert.config)
     if self.opt.dataset in {'camera', 'notebook', 'car', 'phone'}:
         self.dense = nn.Linear(opt.embed_dim, 2)
     else:
         self.dense = nn.Linear(opt.embed_dim, 3)
     self.classifier = nn.Linear(opt.embed_dim, 2)
Exemplo n.º 17
0
 def __init__(self, embedding_matrix, opt):
     super(LCA_GLOVE, self).__init__()
     # Only few of the parameters are necessary in the config.json, such as hidden_size, num_attention_heads
     self.config = BertConfig.from_json_file("utils/bert_config.json")
     self.opt = opt
     self.embed = nn.Embedding.from_pretrained(
         torch.tensor(embedding_matrix, dtype=torch.float))
     self.lc_embed = nn.Embedding(2, opt.embed_dim)
     self.global_encoder1 = SelfAttention(self.config, opt)
     self.local_encoder1 = SelfAttention(self.config, opt)
     self.local_encoder2 = SelfAttention(self.config, opt)
     self.mha = SelfAttention(self.config, opt)
     self.pool = BertPooler(self.config)
     self.dropout = nn.Dropout(opt.dropout)
     self.linear = nn.Linear(opt.embed_dim * 2, opt.embed_dim)
     self.dense = nn.Linear(opt.embed_dim, opt.polarities_dim)
     self.classifier = nn.Linear(opt.embed_dim, 2)
Exemplo n.º 18
0
 def __init__(self, bert_base_model, args):
     super(LCF_ATEPC, self).__init__(config=bert_base_model.config)
     config = bert_base_model.config
     self.bert = bert_base_model
     self.args = args
     # do not init lcf layer if BERT-SPC or BERT-BASE specified
     if self.args.local_context_focus is not None:
         self.local_bert = copy.deepcopy(self.bert)
     self.pooler = BertPooler(config)
     if args.dataset in {'camera', 'car', 'phone', 'notebook'}:
         self.dense = torch.nn.Linear(768, 2)
     else:
         self.dense = torch.nn.Linear(768, 3)
     self.bert_global_focus = self.bert
     self.dropout = nn.Dropout(self.args.dropout)
     self.bert_SA = SelfAttention(config, args)
     self.linear_double = nn.Linear(768 * 2, 768)
     self.linear_triple = nn.Linear(768 * 3, 768)
Exemplo n.º 19
0
 def __init__(self, bert_or_embedding_matrix, opt):
     super(BERT_ALBERT_GCN, self).__init__()
     self.opt = opt
     bert = bert_or_embedding_matrix
     self.context_bert = bert
     self.squeeze_embedding = SqueezeEmbedding()
     if not opt.no_gnn:
         self.gcn = GNN_RELU_DIFF(opt.bert_dim,
                                  opt.bert_dim,
                                  step=opt.gnn_step,
                                  drop=0.0)
         self.encoder_fn = nn.Linear(opt.bert_dim, opt.bert_dim)
         self.norm = LayerNorm(opt.bert_dim)
     if not opt.no_sa:
         self.bert_self_att = SelfAttention(bert.config, opt)
     if self.opt.pool_tp == 'bert_pool':
         self.bert_pooler = BertPooler(bert.config)
     elif self.opt.pool_tp == 'max_pool':
         self.pool_fc = nn.Linear(opt.bert_dim, opt.bert_dim)
     self.fc = nn.Linear(opt.bert_dim, 3)
Exemplo n.º 20
0
 def __init__(
         self,
         config='a class with num_attention_heads, hidden_size, attention_probs_dropout_prob, output_attentions',
         bert_dir='/mnt/sda1/bert/uncased_L-12_H-768_A-12',
         drop=0.0,
         L=80,
         bert_dim=768,
         num_class=3,
         SDR=5,
         tp='cdm'):
     super(MY_BERT_LCF, self).__init__()
     self.text_bert = BertModel.from_pretrained(bert_dir)
     self.aspect_bert = copy.deepcopy(self.text_bert)
     self.aspect_self_att = SelfAttention(config, L)
     self.bert_pooler = BertPooler(config)
     if tp == 'cdm':
         self.reduce2_bert_dim = nn.Linear(bert_dim * 2, bert_dim)
     self.reduce2_num_class_linear = nn.Linear(bert_dim, num_class)
     self.drop = drop
     self.L = L
     self.SDR = SDR
     self.tp = tp
Exemplo n.º 21
0
 def __init__(self, bert_base_model, args):
     super(MAAEAC, self).__init__(config=bert_base_model.config)
     config = bert_base_model.config
     self.bert_for_global_context = bert_base_model
     self.args = args
     # do not init lcf layer if BERT-SPC or BERT-BASE specified
     # if self.args.local_context_focus in {'cdw', 'cdm', 'fusion'}:
     if not self.args.use_unique_bert:
         self.bert_for_local_context = copy.deepcopy(
             self.bert_for_global_context)
     else:
         self.bert_for_local_context = self.bert_for_global_context
     self.pooler = BertPooler(config)
     if args.dataset in {'camera', 'car', 'phone', 'notebook'}:
         self.dense = torch.nn.Linear(768, 2)
     else:
         self.dense = torch.nn.Linear(768, 3)
     self.bert_global_focus = self.bert_for_global_context
     self.dropout = nn.Dropout(self.args.dropout)
     self.SA1 = SelfAttention(config, args)
     self.SA2 = SelfAttention(config, args)
     self.linear_double = nn.Linear(768 * 2, 768)
     self.linear_triple = nn.Linear(768 * 3, 768)
     self.hidden = config.hidden_size
Exemplo n.º 22
0
 def __init__(self, config, L):
     super(SelfAttention, self).__init__()
     self.SA = BertSelfAttention(config)
     self.L = L
     self.pooler = BertPooler(config)