Esempio n. 1
0
 def build_model_bilstm_single(self):
     if args.use_lstm:
         if args.use_cudnn_cell:
             layer_cell = CuDNNLSTM
         else:
             layer_cell = LSTM
     else:
         if args.use_cudnn_cell:
             layer_cell = CuDNNGRU
         else:
             layer_cell = GRU
     # bert embedding
     bert_inputs, bert_output = KerasBertEmbedding().bert_encode()
     # Bi-LSTM
     x = Bidirectional(
         layer_cell(units=args.units,
                    return_sequences=args.return_sequences,
                    kernel_regularizer=regularizers.l2(args.l2 * 0.1),
                    recurrent_regularizer=regularizers.l2(
                        args.l2)))(bert_output)
     x = Dropout(args.keep_prob)(x)
     x = Flatten()(x)
     # 最后就是softmax
     dense_layer = Dense(args.label, activation=args.activation)(x)
     output_layers = [dense_layer]
     self.model = Model(bert_inputs, output_layers)
Esempio n. 2
0
    def build_model_r_cnn(self):
        #########    RCNN    #########
        # bert embedding
        bert_inputs, bert_output = KerasBertEmbedding().bert_encode()
        # rcnn
        bert_output_emmbed = SpatialDropout1D(rate=self.keep_prob)(bert_output)
        if args.use_lstm:
            if args.use_cudnn_cell:
                layer_cell = CuDNNLSTM
            else:
                layer_cell = LSTM
        else:
            if args.use_cudnn_cell:
                layer_cell = CuDNNGRU
            else:
                layer_cell = GRU

        x = Bidirectional(
            layer_cell(units=args.units,
                       return_sequences=args.return_sequences,
                       kernel_regularizer=regularizers.l2(args.l2 * 0.1),
                       recurrent_regularizer=regularizers.l2(
                           args.l2)))(bert_output_emmbed)
        x = Dropout(args.keep_prob)(x)
        x = Conv1D(filters=int(self.embedding_dim / 2),
                   kernel_size=2,
                   padding='valid',
                   kernel_initializer='normal',
                   activation='relu')(x)
        x = GlobalMaxPooling1D()(x)
        x = Dropout(args.keep_prob)(x)
        # 最后就是softmax
        dense_layer = Dense(self.label, activation=self.activation)(x)
        output_layers = [dense_layer]
        self.model = Model(bert_inputs, output_layers)
Esempio n. 3
0
    def build_model_bilstm_layers(self):
        if args.use_lstm:
            if args.use_cudnn_cell:
                layer_cell = CuDNNLSTM
            else:
                layer_cell = LSTM
        else:
            if args.use_cudnn_cell:
                layer_cell = CuDNNGRU
            else:
                layer_cell = GRU
        # bert embedding
        bert_inputs, bert_output = KerasBertEmbedding().bert_encode()
        # bert_output = bert_output[:0:]
        # layer_get_cls = Lambda(lambda x: x[:, 0:1, :])
        # bert_output = layer_get_cls(bert_output)
        # print("layer_get_cls:")
        # print(bert_output.shape)
        # Bi-LSTM
        x = Bidirectional(
            layer_cell(units=args.units,
                       return_sequences=args.return_sequences,
                       kernel_regularizer=regularizers.l2(args.l2 * 0.1),
                       recurrent_regularizer=regularizers.l2(
                           args.l2)))(bert_output)
        # blstm_layer = TimeDistributed(Dropout(args.keep_prob))(blstm_layer) 这个用不了,好像是输入不对, dims<3吧
        x = Dropout(args.keep_prob)(x)

        x = Bidirectional(
            layer_cell(units=args.units,
                       return_sequences=args.return_sequences,
                       kernel_regularizer=regularizers.l2(args.l2 * 0.1),
                       recurrent_regularizer=regularizers.l2(args.l2)))(x)
        x = Dropout(args.keep_prob)(x)
        x = Bidirectional(
            layer_cell(units=args.units,
                       return_sequences=args.return_sequences,
                       kernel_regularizer=regularizers.l2(args.l2 * 0.1),
                       recurrent_regularizer=regularizers.l2(args.l2)))(x)
        x = Dropout(args.keep_prob)(x)

        # 平均池化、最大池化拼接
        avg_pool = GlobalAvgPool1D()(x)
        max_pool = GlobalMaxPool1D()(x)
        print(max_pool.shape)
        print(avg_pool.shape)
        concat = concatenate([avg_pool, max_pool])
        x = Dense(int(args.units / 4), activation="relu")(concat)
        x = Dropout(args.keep_prob)(x)

        # 最后就是softmax
        dense_layer = Dense(args.label, activation=args.activation)(x)
        output_layers = [dense_layer]
        self.model = Model(bert_inputs, output_layers)
Esempio n. 4
0
    def build_model_text_cnn(self):
        #########    text-cnn    #########
        # bert embedding
        bert_inputs, bert_output = KerasBertEmbedding().bert_encode()
        # text cnn
        bert_output_emmbed = SpatialDropout1D(rate=self.keep_prob)(bert_output)
        concat_out = []
        for index, filter_size in enumerate(self.filters):
            x = Conv1D(name='TextCNN_Conv1D_{}'.format(index),
                       filters=int(self.embedding_dim / 2),
                       kernel_size=self.filters[index],
                       padding='valid',
                       kernel_initializer='normal',
                       activation='relu')(bert_output_emmbed)
            x = GlobalMaxPooling1D(
                name='TextCNN_MaxPool1D_{}'.format(index))(x)
            concat_out.append(x)
        x = Concatenate(axis=1)(concat_out)
        x = Dropout(self.keep_prob)(x)

        # 最后就是softmax
        dense_layer = Dense(self.label, activation=self.activation)(x)
        output_layers = [dense_layer]
        self.model = Model(bert_inputs, output_layers)
Esempio n. 5
0
    def build_model_avt_cnn(self):
        #########text-cnn#########
        # bert embedding
        bert_inputs, bert_output = KerasBertEmbedding().bert_encode()
        # text cnn
        bert_output_emmbed = SpatialDropout1D(rate=self.keep_prob)(bert_output)
        concat_x = []
        concat_y = []
        concat_z = []
        for index, filter_size in enumerate(self.filters):
            conv = Conv1D(name='TextCNN_Conv1D_{}'.format(index),
                          filters=int(self.embedding_dim / 2),
                          kernel_size=self.filters[index],
                          padding='valid',
                          kernel_initializer='normal',
                          activation='relu')(bert_output_emmbed)
            x = GlobalMaxPooling1D(
                name='TextCNN_MaxPooling1D_{}'.format(index))(conv)
            y = GlobalAveragePooling1D(
                name='TextCNN_AveragePooling1D_{}'.format(index))(conv)
            z = AttentionWeightedAverage(
                name='TextCNN_Annention_{}'.format(index))(conv)
            concat_x.append(x)
            concat_y.append(y)
            concat_z.append(z)

        merge_x = Concatenate(axis=1)(concat_x)
        merge_y = Concatenate(axis=1)(concat_y)
        merge_z = Concatenate(axis=1)(concat_z)
        merge_xyz = Concatenate(axis=1)([merge_x, merge_y, merge_z])
        x = Dropout(self.keep_prob)(merge_xyz)

        # 最后就是softmax
        dense_layer = Dense(self.label, activation=self.activation)(x)
        output_layers = [dense_layer]
        self.model = Model(bert_inputs, output_layers)