コード例 #1
0
    def __init__(self, hparams: Hparams, **kwargs):
        super(BertForRoleNer, self).__init__(hparams, **kwargs)
        pretrained_hparams = hparams.pretrained
        model_hparams = hparams.model_attributes
        self.num_labels = hparams.dataset.outputs[0].num
        self.initializer_range = model_hparams.initializer_range

        self.bert = BaseLayer.by_name(
            pretrained_hparams.norm_name)(pretrained_hparams)
        self.dropout = tf.keras.layers.Dropout(
            model_hparams.hidden_dropout_prob)
        # self.bilstm = Bilstm(model_hparams.hidden_size, model_hparams.hidden_dropout_prob, name="bilstm")
        self.project = tf.keras.layers.Dense(
            model_hparams.hidden_size,
            kernel_initializer=get_initializer(
                model_hparams.initializer_range),
            name="project")
        self.ner_output = tf.keras.layers.Dense(
            self.num_labels,
            kernel_initializer=get_initializer(
                model_hparams.initializer_range),
            name='ner_output')
        self.crf = CRFLayer(self.num_labels,
                            self.initializer_range,
                            name="crf_output")
コード例 #2
0
    def __init__(self, hparams: Hparams, **kwargs):
        super(BertForQA, self).__init__(hparams, **kwargs)
        pretrained_hparams = hparams.pretrained
        model_hparams = hparams.model_attributes
        self.start_n_top = model_hparams.start_n_top
        self.seq_len = hparams.dataset.tokenizer.max_len

        assert pretrained_hparams.norm_name not in ["xlnet_chinese"], \
            ValueError(f"{pretrained_hparams.norm_name} not be supported.")
        self.encode_pretrained = BaseLayer.by_name(
            pretrained_hparams.norm_name)(pretrained_hparams)

        self.qa_layer = BaseLayer.by_name(model_hparams.qa_layer_name)(
            model_hparams.hidden_size, self.seq_len, self.start_n_top,
            self.start_n_top, get_initializer(model_hparams.initializer_range),
            model_hparams.hidden_dropout_prob)
コード例 #3
0
    def __init__(self, hparams: Hparams, **kwargs):
        super(BertDgcnnForNer, self).__init__(hparams, **kwargs)
        pretrained_hparams = hparams.pretrained
        model_hparams = hparams.model_attributes
        self.num_labels = hparams.dataset.outputs[0].num
        self.pos_num = hparams.dataset.inputs[-1].num
        self.initializer_range = model_hparams.initializer_range

        self.pos_embeddings = tf.keras.layers.Embedding(
            self.pos_num,
            32,
            embeddings_initializer=get_initializer(model_hparams.initializer_range),
            name="pos_embedding"
        )

        self.bert = BaseLayer.by_name(pretrained_hparams.norm_name)(pretrained_hparams)
        self.dropout = tf.keras.layers.Dropout(
            model_hparams.hidden_dropout_prob
        )
        self.project = tf.keras.layers.Dense(
            model_hparams.hidden_size,
            kernel_initializer=get_initializer(model_hparams.initializer_range),
            name="project"
        )
        self.fusion_project = tf.keras.layers.Dense(
            model_hparams.hidden_size,
            kernel_initializer=get_initializer(model_hparams.initializer_range),
            name="fusion_project"
        )
        self.dgcnn_encoder = DgcnnBlock(model_hparams.hidden_size, [3, 3, 3], [1, 2, 4], name="trigger_dgcnn_encoder")
        self.ner_output = tf.keras.layers.Dense(self.num_labels,
                                                kernel_initializer=get_initializer(model_hparams.initializer_range),
                                                name='ner_output')
        self.crf = CRFLayer(self.num_labels, self.initializer_range, label_mask=hparams.label_mask, name="crf_output")
コード例 #4
0
    def __init__(self, hparams: Hparams, **kwargs):
        super(BertForTextGeneration, self).__init__(hparams, **kwargs)
        pretrained_hparams = hparams.pretrained

        assert pretrained_hparams.norm_name in ['gpt2'], \
            ValueError(f"{pretrained_hparams.norm_name} not be supported.")
        self.transformer = BaseLayer.by_name(
            pretrained_hparams.norm_name)(pretrained_hparams)
コード例 #5
0
    def __init__(self, hparams: Hparams, **kwargs):
        super(BertForRelationExtract, self).__init__(hparams, **kwargs)
        pretrained_hparams = hparams.pretrained
        model_hparams = hparams.model_attributes
        self.hidden_size = model_hparams.hidden_size
        self.num_labels = hparams.dataset.outputs[0].num
        self.initializer_range = model_hparams.initializer_range

        self.bert = BaseLayer.by_name(
            pretrained_hparams.norm_name)(pretrained_hparams)
        self.dropout = tf.keras.layers.Dropout(
            model_hparams.hidden_dropout_prob)
        self.project1 = tf.keras.layers.Dense(
            model_hparams.hidden_size,
            kernel_initializer=get_initializer(
                model_hparams.initializer_range),
            name="project1")
        self.project2 = tf.keras.layers.Dense(
            model_hparams.hidden_size,
            kernel_initializer=get_initializer(
                model_hparams.initializer_range),
            name="project2")
        self.project3 = tf.keras.layers.Dense(
            model_hparams.hidden_size,
            kernel_initializer=get_initializer(
                model_hparams.initializer_range),
            name="project3")
        self.project4 = tf.keras.layers.Dense(
            model_hparams.hidden_size,
            kernel_initializer=get_initializer(
                model_hparams.initializer_range),
            name="project4")
        self.project5 = tf.keras.layers.Dense(
            model_hparams.hidden_size,
            kernel_initializer=get_initializer(
                model_hparams.initializer_range),
            name="project5")
        self.e1_attention = MultiHeadAttention(model_hparams,
                                               name="entity1_attention_fusion")
        self.e2_attention = MultiHeadAttention(model_hparams,
                                               name="entity2_attention_fusion")
        self.attention = MultiHeadAttention(model_hparams,
                                            name="attention_fusion")
        self.classifer = tf.keras.layers.Dense(
            self.num_labels,
            kernel_initializer=get_initializer(
                model_hparams.initializer_range),
            name="classifier")
コード例 #6
0
    def __init__(self, hparams: Hparams, **kwargs):
        super(BertForSeqClassification, self).__init__(hparams, **kwargs)
        self.num_lables = hparams.dataset.outputs[0].num
        pretrained_hparams = hparams.pretrained
        model_hparams = hparams.model_attributes

        # self.bert = Bert(pretrained_hparams, name='bert')
        assert pretrained_hparams.norm_name in ['bert', 'albert', 'albert_brightmart', "ernie", "xlnet", "electra"], \
            ValueError(f"{pretrained_hparams.norm_name} not be supported.")
        self.encoder = BaseLayer.by_name(
            pretrained_hparams.norm_name)(pretrained_hparams)
        self.dropout = tf.keras.layers.Dropout(
            model_hparams.hidden_dropout_prob)
        self.project = tf.keras.layers.Dense(
            model_hparams.hidden_size,
            kernel_initializer=get_initializer(
                model_hparams.initializer_range),
            name="project")
        self.classifier = tf.keras.layers.Dense(
            self.num_lables,
            kernel_initializer=get_initializer(
                model_hparams.initializer_range),
            name="classifier")