Example #1
0
def get_model():

    sentence_inp = Input(shape=(SENT_LENGTH, ))
    emb = Embedding(nb_words,
                    EMBEDDING_DIM,
                    input_length=SENT_LENGTH,
                    weights=[embedding_matrix],
                    trainable=False)(sentence_inp)
    bigru = Bidirectional(CuDNNGRU(128, return_sequences=True))(emb)
    bigru = TimeDistributed(Dense(256))(bigru)
    bigru = AttentivePoolingLayer()(bigru)
    sentEncoder = Model(sentence_inp, bigru)

    review_input = Input(shape=(MAX_SENTS, SENT_LENGTH))
    review_encoder = TimeDistributed(sentEncoder)(review_input)
    bigru_sent = Bidirectional(CuDNNGRU(128,
                                        return_sequences=True))(review_encoder)
    bigru_sent = TimeDistributed(Dense(256))(bigru_sent)
    bigru_sent = AttentivePoolingLayer()(bigru_sent)

    fc1 = Dense(256, activation='relu')(bigru_sent)
    fc2 = Dense(128, activation='relu')(fc1)
    fc2 = BatchNormalization()(fc2)
    output = Dense(19, activation="softmax")(fc2)
    model = Model(inputs=review_input, outputs=output)
    model.compile(optimizer="adam",
                  loss="categorical_crossentropy",
                  metrics=['accuracy'])
    model.summary()
    return model
Example #2
0
def HAN(MAX_SENT_LENGTH, MAX_SENTS, max_features, embedding_dim, vecs):
    embedding_layer = Embedding(
        max_features,
        embedding_dim,
        weights=[vecs],
        input_length=MAX_SENT_LENGTH,
        trainable=True,
    )
    sentence_input = Input(shape=(MAX_SENT_LENGTH, ), dtype='int32')
    embedded_sequences = embedding_layer(sentence_input)
    # embedded_sequences =Masking()(embedded_sequences)
    l_lstm = Bidirectional(LSTM(150,
                                return_sequences=True))(embedded_sequences)
    l_lstm = AttentivePoolingLayer()(l_lstm)
    sentEncoder = Model(sentence_input, l_lstm)

    review_input = Input(shape=(MAX_SENTS, MAX_SENT_LENGTH), dtype='int32')
    review_encoder = TimeDistributed(sentEncoder)(review_input)
    # review_encoder =Masking()(review_encoder)
    l_lstm_sent = Bidirectional(LSTM(50,
                                     return_sequences=True))(review_encoder)
    l_lstm_sent = AttentivePoolingLayer()(l_lstm_sent)
    preds = Dense(2, activation='softmax')(l_lstm_sent)
    model = Model(review_input, preds)
    return model
Example #3
0
def get_model():
    inp = Input(shape=(MAX_SEQUENCE_LENGTH,))
    emb = Embedding(nb_words, EMBEDDING_DIM, input_length=MAX_SEQUENCE_LENGTH, weights=[embedding_matrix],
                    trainable=False)(inp)
    gru=Bidirectional(CuDNNGRU(256,return_sequences=True))(emb)
    gru=AttentivePoolingLayer()(gru)

    fc1 = Dense(256, activation='relu')(gru)
    fc2 = Dense(128, activation='relu')(fc1)
    fc2 = BatchNormalization()(fc2)
    output = Dense(19, activation="softmax")(fc2)
    model = Model(inputs=inp, outputs=output)
    model.compile(optimizer="adam", loss="categorical_crossentropy", metrics=['accuracy'])
    model.summary()
    return model
Example #4
0
def get_attention_cv_model(MAX_SEQUENCE_LENGTH, nb_words, EMBEDDING_DIM,
                           embedding_matrix):
    inp = Input(shape=(MAX_SEQUENCE_LENGTH, ))
    emb = Embedding(nb_words,
                    EMBEDDING_DIM,
                    input_length=MAX_SEQUENCE_LENGTH,
                    weights=[embedding_matrix],
                    trainable=True,
                    dropout=0.1)(inp)
    att = AttentivePoolingLayer()(emb)

    fc1 = Dense(256, activation='relu')(att)
    fc2 = Dense(64, activation='relu')(fc1)
    fc2 = BatchNormalization()(fc2)
    output = Dense(2, activation="softmax")(fc2)
    model = Model(inputs=inp, outputs=output)
    adam = Adam(lr=1e-3)
    model.compile(optimizer=adam,
                  loss="categorical_crossentropy",
                  metrics=['accuracy'])
    model.summary()
    return model