Esempio n. 1
0
    def __init__(self, word2ix, predicate_num, model_name="roberta"):
        super(BertRelationExtrac, self).__init__()

        self.predicate_num = predicate_num
        config = ""
        if model_name == "roberta":
            from bert_seq2seq.model.roberta_model import BertModel, BertConfig, BertPredictionHeadTransform, BertLayerNorm
            config = BertConfig(len(word2ix))
            self.bert = BertModel(config)
            self.layer_norm = BertLayerNorm(config.hidden_size)
            self.layer_norm_cond = BertLayerNorm(config.hidden_size,
                                                 conditional=True)
        elif model_name == "bert":
            from bert_seq2seq.model.bert_model import BertConfig, BertModel, BertPredictionHeadTransform, BertLayerNorm
            config = BertConfig(len(word2ix))
            self.bert = BertModel(config)
            self.layer_norm = BertLayerNorm(config.hidden_size)
            self.layer_norm_cond = BertLayerNorm(config.hidden_size,
                                                 conditional=True)
        else:
            raise Exception("model_name_err")

        self.subject_pred = nn.Linear(config.hidden_size, 2)
        self.activation = nn.Sigmoid()
        self.object_pred = nn.Linear(config.hidden_size,
                                     2 * self.predicate_num)
Esempio n. 2
0
    def __init__(self, word2ix, model_name="roberta"):
        super().__init__()
        self.config = ""
        self.word2ix = word2ix
        if model_name == "roberta":
            from bert_seq2seq.model.roberta_model import BertModel, BertConfig, BertLayerNorm, BertPredictionHeadTransform, BertLMPredictionHead
            self.config = BertConfig(len(self.word2ix))
            self.bert = BertModel(self.config)
            self.layer_norm = BertLayerNorm(self.config.hidden_size)
            self.layer_norm_cond = BertLayerNorm(self.config.hidden_size,
                                                 conditional=True)
            self.transform = BertPredictionHeadTransform(self.config)
            self.decoder = BertLMPredictionHead(
                self.config, self.bert.embeddings.word_embeddings.weight)
        elif model_name == "bert":
            from bert_seq2seq.model.bert_model import BertConfig, BertModel, BertLayerNorm, BertPredictionHeadTransform, BertLMPredictionHead
            self.config = BertConfig(len(self.word2ix))
            self.bert = BertModel(self.config)
            self.layer_norm = BertLayerNorm(self.config.hidden_size)
            self.layer_norm_cond = BertLayerNorm(self.config.hidden_size,
                                                 conditional=True)
            self.transform = BertPredictionHeadTransform(self.config)
            self.decoder = BertLMPredictionHead(
                self.config, self.bert.embeddings.word_embeddings.weight)
        else:
            raise Exception("model_name_err")

        self.device = torch.device("cpu")
Esempio n. 3
0
    def __init__(self, word2ix, model_name="roberta", tokenizer=None):
        super(Seq2SeqModel, self).__init__()
        self.word2ix = word2ix
        if tokenizer is None:
            self.tokenizer = Tokenizer(word2ix)
        else:
            self.tokenizer = tokenizer
        config = ""
        if model_name == "roberta":
            from bert_seq2seq.model.roberta_model import BertModel, BertConfig, BertLMPredictionHead
            config = BertConfig(len(word2ix))
            self.bert = BertModel(config)
            self.decoder = BertLMPredictionHead(
                config, self.bert.embeddings.word_embeddings.weight)
        elif model_name == "bert":
            from bert_seq2seq.model.bert_model import BertConfig, BertModel, BertLMPredictionHead
            config = BertConfig(len(word2ix))
            self.bert = BertModel(config)
            self.decoder = BertLMPredictionHead(
                config, self.bert.embeddings.word_embeddings.weight)
        else:
            raise Exception("model_name_err")

        self.hidden_dim = config.hidden_size
        self.vocab_size = len(word2ix)
Esempio n. 4
0
    def __init__(self, word2ix, target_size, model_name="roberta"):
        super(BertClsClassifier, self).__init__()
        self.word2ix = word2ix
        self.target_size = target_size
        config = ""
        if model_name == "roberta":
            from bert_seq2seq.model.roberta_model import BertModel, BertConfig
            config = BertConfig(len(self.word2ix))
            self.bert = BertModel(config)
        elif model_name == "bert":
            from bert_seq2seq.model.bert_model import BertConfig, BertModel
            config = BertConfig(len(self.word2ix))
            self.bert = BertModel(config)
        else:
            raise Exception("model_name_err")

        self.final_dense = nn.Linear(config.hidden_size, self.target_size)
Esempio n. 5
0
    def __init__(self, vocab_path, target_size, model_name="roberta"):
        super(BertEncoder, self).__init__()
        self.word2ix = load_chinese_base_vocab(vocab_path)
        self.tokenizer = Tokenizer(self.word2ix)
        self.target_size = target_size
        config = ""
        if model_name == "roberta":
            from bert_seq2seq.model.roberta_model import BertModel, BertConfig
            config = BertConfig(len(self.word2ix))
            self.bert = BertModel(config)
        elif model_name == "bert":
            from bert_seq2seq.model.bert_model import BertConfig, BertModel
            config = BertConfig(len(self.word2ix))
            self.bert = BertModel(config)
        else:
            raise Exception("model_name_err")

        self.final_dense = nn.Linear(config.hidden_size, self.target_size)
    def __init__(self, word2ix, target_size, model_name="roberta"):
        super(BertSeqLabeling, self).__init__()
        self.target_size = target_size
        config = ""
        if model_name == "roberta":
            from bert_seq2seq.model.roberta_model import BertModel, BertConfig, BertPredictionHeadTransform
            config = BertConfig(len(word2ix))
            self.bert = BertModel(config)
            self.transform = BertPredictionHeadTransform(config)
        elif model_name == "bert":
            from bert_seq2seq.model.bert_model import BertConfig, BertModel, BertPredictionHeadTransform
            config = BertConfig(len(word2ix))
            self.bert = BertModel(config)
            self.transform = BertPredictionHeadTransform(config)
        else:
            raise Exception("model_name_err")

        self.final_dense = nn.Linear(config.hidden_size, self.target_size)
Esempio n. 7
0
 def __init__(self, vocab_path, model_name="roberta"):
     super(Seq2SeqModel, self).__init__()
     self.word2ix = load_chinese_base_vocab(vocab_path)
     self.tokenizer = Tokenizer(self.word2ix)
     config = ""
     if model_name == "roberta":
         from bert_seq2seq.model.roberta_model import BertModel, BertConfig, BertLMPredictionHead
         config = BertConfig(len(self.word2ix))
         self.bert = BertModel(config)
         self.decoder = BertLMPredictionHead(config, self.bert.embeddings.word_embeddings.weight)
     elif model_name == "bert":
         from bert_seq2seq.model.bert_model import BertConfig, BertModel, BertLMPredictionHead
         config = BertConfig(len(self.word2ix))
         self.bert = BertModel(config)
         self.decoder = BertLMPredictionHead(config, self.bert.embeddings.word_embeddings.weight)
     else :
         raise Exception("model_name_err")
         
     self.hidden_dim = config.hidden_size
     self.vocab_size = config.vocab_size
    def __init__(self, vocab_path, target_size, model_name="roberta"):
        super(BertSeqLabelingCRF, self).__init__()
        self.word2ix = load_chinese_base_vocab(vocab_path)
        self.target_size = target_size
        config = ""
        if model_name == "roberta":
            from bert_seq2seq.model.roberta_model import BertModel, BertConfig, BertPredictionHeadTransform
            config = BertConfig(len(self.word2ix))
            self.bert = BertModel(config)
            self.transform = BertPredictionHeadTransform(config)
        elif model_name == "bert":
            from bert_seq2seq.model.bert_model import BertConfig, BertModel, BertPredictionHeadTransform
            config = BertConfig(len(self.word2ix))
            self.bert = BertModel(config)
            self.transform = BertPredictionHeadTransform(config)
        else:
            raise Exception("model_name_err")

        self.final_dense = nn.Linear(config.hidden_size, self.target_size)
        self.crf_layer = CRFLayer(self.target_size)
Esempio n. 9
0
 def __init__(self, word2ix, model_name="roberta"):
     super().__init__()
     self.config = ""
     self.word2ix = word2ix
     if model_name == "roberta":
         from bert_seq2seq.model.roberta_model import BertModel, BertConfig, BertLayerNorm
         self.config = BertConfig(len(self.word2ix))
         self.bert = BertModel(self.config)
         self.layer_norm = BertLayerNorm(self.config.hidden_size)
         self.layer_norm_cond = BertLayerNorm(self.config.hidden_size,
                                              conditional=True)
     elif model_name == "bert":
         from bert_seq2seq.model.bert_model import BertConfig, BertModel, BertLayerNorm
         self.config = BertConfig(len(self.word2ix))
         self.bert = BertModel(self.config)
         self.layer_norm = BertLayerNorm(self.config.hidden_size)
         self.layer_norm_cond = BertLayerNorm(self.config.hidden_size,
                                              conditional=True)
     else:
         raise Exception("model_name_err")
     self.device = torch.device("cpu")