示例#1
0
    return model


if __name__ == "__main__":
    """
    train test GRU:
    ---
    Test loss: 0.6381104083000859
    Test accuracy: 0.7306
    """
    num_classes = 3
    epochs = 3

    vocab_set = load_pickle('./preprocess/vocab_set.pkl')
    # vocab_set = {}
    train_data, test_data, params, vocab_set = get_test_dataset(
        epochs=epochs, vocab_set=vocab_set, max_len=30, cache_dir='cache/base')
    max_len = params.get('max_len')
    num_words = params.get('vocab_size')
    batch_size = params.get('batch_size')

    model = build_gru_model(max_len, num_words, num_classes)

    model.compile(loss='sparse_categorical_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])
    model.summary()

    model.fit(train_data,
              epochs=epochs,
              validation_data=test_data)
    score = model.evaluate(test_data, verbose=0)
示例#2
0
if __name__ == "__main__":
    '''
    Train test char-level convolution:
    ---
    Test loss: 0.6190159400052662
    Test accuracy: 0.6076
    '''
    # const
    num_classes = 3
    epochs = 3

    vocab_set = load_pickle('./preprocess/vocab_set_char.pkl')
    # vocab_set = {}
    train_data, test_data, params, vocab_set = get_test_dataset(
        epochs=epochs,
        vocab_set=vocab_set,
        max_len=62,
        char=True,
        cache_dir='cache/char')

    # save_pickle(vocab_set, './preprocess/vocab_set_char.pkl')

    hyperparams = {
        'maxlen': params.get('max_len'),
        'num_words': params.get('vocab_size'),
        'num_classes': num_classes,
        'activation': "softmax",
        'mingram': 2,
        'maxgram': 5,
        'emb_dim': 32,
        'filter_size': 32,
        'num_dense': 2,