Exemplo n.º 1
0
    def __init__(self, emb_dim, num_words, sentence_length, atten_sen_len,
                 class_dim, embedding_matrix, dropout_rate):

        input_s = tf.keras.layers.Input(shape=(sentence_length, ),
                                        dtype=tf.int32)
        activations = tf.keras.layers.Embedding(
            num_words,
            embeddings_initializer=qc_emb.EmbeddingWeights(embedding_matrix),
            output_dim=emb_dim)(input_s)

        input_p = tf.keras.layers.Input(shape=(atten_sen_len, ),
                                        dtype=tf.int32)
        attention = tf.keras.layers.Embedding(
            num_words,
            embeddings_initializer=qc_emb.EmbeddingWeights(embedding_matrix),
            output_dim=emb_dim)(input_p)
        layer = attention = Attention()([activations, attention])
        layer = tf.keras.layers.Conv1D(emb_dim, 3, activation="relu")(layer)
        layer = tf.keras.layers.MaxPool1D(3, 1)(layer)
        layer = tf.keras.layers.Dropout(dropout_rate)(layer)
        layer = tf.keras.layers.Flatten()(layer)
        output = tf.keras.layers.Dense(
            class_dim,
            activation="softmax",
            kernel_regularizer=tf.keras.regularizers.l2(l=0.0))(layer)
        super(ACNN, self).__init__(inputs=[input_s, input_p], outputs=[output])
Exemplo n.º 2
0
    def __init__(self, emb_dim, num_words, sentence_length, class_dim,
                 embedding_matrix, dropout_rate):
        units = emb_dim
        input_layer = tf.keras.layers.Input(shape=(sentence_length, ),
                                            dtype=tf.int32)
        embedding = tf.keras.layers.Embedding(
            num_words,
            embeddings_initializer=qc_emb.EmbeddingWeights(embedding_matrix),
            output_dim=emb_dim,
            mask_zero=False)(input_layer)
        activations = tf.keras.layers.Bidirectional(
            tf.keras.layers.LSTM(units,
                                 return_sequences=True,
                                 dropout=dropout_rate))(embedding)

        attention = tf.keras.layers.Dense(1, activation="tanh")(activations)
        attention = tf.keras.layers.Flatten()(attention)
        attention = tf.keras.layers.Activation("softmax")(attention)
        attention = tf.keras.layers.RepeatVector(units * 2)(attention)
        attention = tf.keras.layers.Permute([2, 1])(attention)
        qc = tf.keras.layers.Multiply()([activations, attention])
        qc = tf.keras.layers.Lambda(lambda xin: K.sum(xin, axis=-2),
                                    output_shape=(units * 2, ))(qc)

        output = tf.keras.layers.Dense(
            class_dim,
            activation="softmax",
            kernel_regularizer=tf.keras.regularizers.l2(l=0.0))(qc)
        super(A_BLSTM, self).__init__(inputs=[input_layer], outputs=output)
 def __init__(self, emb_dim, num_words, sentence_length, class_dim,
              embedding_matrix, dropout_rate):
     input_layer = tf.keras.layers.Input(shape=(sentence_length, ),
                                         dtype=tf.int32)
     layer = tf.keras.layers.Embedding(
         num_words,
         embeddings_initializer=qc_emb.EmbeddingWeights(embedding_matrix),
         output_dim=emb_dim)(input_layer)
     conv1D_layer_3 = tf.keras.layers.Conv1D(emb_dim, 3,
                                             activation="relu")(layer)
     conv1D_layer_3 = tf.keras.layers.MaxPool1D(3, 1)(conv1D_layer_3)
     conv1D_layer_4 = tf.keras.layers.Conv1D(emb_dim, 4,
                                             activation="relu")(layer)
     conv1D_layer_4 = tf.keras.layers.MaxPool1D(4, 1)(conv1D_layer_4)
     conv1D_layer_5 = tf.keras.layers.Conv1D(emb_dim, 5,
                                             activation="relu")(layer)
     conv1D_layer_5 = tf.keras.layers.MaxPool1D(5, 1)(conv1D_layer_5)
     layer = tf.keras.layers.concatenate(
         [conv1D_layer_3, conv1D_layer_4, conv1D_layer_5], axis=1)
     layer = tf.keras.layers.Bidirectional(tf.keras.layers.GRU(emb_dim *
                                                               2))(layer)
     layer = tf.keras.layers.Dropout(dropout_rate)(layer)
     output = tf.keras.layers.Dense(
         class_dim,
         activation="softmax",
         kernel_regularizer=tf.keras.regularizers.l2(l=0.0))(layer)
     super(CNN_BGRU, self).__init__(inputs=[input_layer], outputs=output)
 def __init__(self, emb_dim, num_words, sentence_length,
            class_dim, embedding_matrix, dropout_rate):
     input_layer = tf.keras.layers.Input(shape=(sentence_length,), dtype=tf.int32)
     layer = tf.keras.layers.Embedding(num_words,
                                       embeddings_initializer=
                                       qc_emb.EmbeddingWeights(embedding_matrix), 
                                       output_dim=emb_dim)(input_layer)
     layer = tf.keras.layers.Bidirectional(tf.keras.layers.GRU(emb_dim * 2))(layer)
     layer = tf.keras.layers.Dropout(dropout_rate)(layer)
     #layer = tf.keras.layers.Flatten()(layer)
     output = tf.keras.layers.Dense(class_dim, activation="softmax", 
             kernel_regularizer = tf.keras.regularizers.l2(l=0.0) )(layer)
     super(BGRU, self).__init__(inputs=[input_layer], outputs=output)