def train_cnn_model(emb_layer, x_train, y_train, x_val, y_val, opt):
    model = CNN(embedding_layer=emb_layer,
                num_words=opt.n_words,
                embedding_dim=opt.embed_dim,
                filter_sizes=opt.cnn_filter_shapes,
                feature_maps=opt.filter_sizes,
                max_seq_length=opt.sent_len,
                dropout_rate=opt.dropout_ratio,
                hidden_units=200,
                nb_classes=2).build_model()

    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizers.Adam(),
                  metrics=['accuracy'])

    #     y_train = y_train.reshape(-1, 1)
    #     model = build_model(emb_layer, opt)
    print(model.summary())

    early_stopping = EarlyStopping(monitor='val_loss', patience=2)
    history = model.fit(x_train,
                        y_train,
                        epochs=opt.cnn_epoch,
                        batch_size=opt.batch_size,
                        verbose=1,
                        validation_data=(x_val, y_val),
                        callbacks=[early_stopping])

    with open("CNN_train_history.txt", "w") as f:
        print(history.history, file=f)
    return model
def train_baseline_cnn(emb_layer, x_train, y_train, x_val, y_val, opt):
    model = CNN(embedding_layer=emb_layer,
                num_words=opt.transfer_n_words,
                embedding_dim=opt.baseline_embed_dim,
                filter_sizes=opt.cnn_filter_shapes,
                feature_maps=opt.filter_sizes,
                max_seq_length=opt.baseline_sent_len,
                dropout_rate=opt.baseline_drop_out_ratio,
                hidden_units=200,
                nb_classes=2).build_model()

    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizers.Adam(),
                  metrics=['accuracy'])

    #     y_train = y_train.reshape(-1, 1)
    #     model = build_model(emb_layer, opt)
    print(model.summary())
    tb_call_back = TensorBoard(log_dir=f'{opt.tbpath}/baseline_cnn_{time()}',
                               histogram_freq=1,
                               write_graph=True,
                               write_images=True)

    checkpoint = ModelCheckpoint("baseline_cnn.h5",
                                 monitor='val_acc',
                                 verbose=1,
                                 save_best_only=True,
                                 save_weights_only=False,
                                 mode='auto',
                                 period=1)
    early_stopping = EarlyStopping(monitor='val_loss', patience=2)
    history = model.fit(x_train,
                        y_train,
                        epochs=opt.baseline_epochs,
                        batch_size=opt.baseline_batchsize,
                        verbose=1,
                        validation_data=(x_val, y_val),
                        callbacks=[early_stopping, tb_call_back, checkpoint])

    with open("CNN_train_baseline_history.txt", "w") as f:
        print(history.history, file=f)
    return model
Esempio n. 3
0
        kernel_sizes=KERNEL_SIZES,
        feature_maps=FEATURE_MAPS,
        max_seq_length=MAX_SEQ_LENGTH,
        use_char=USE_CHAR,
        char_max_length=CHAR_MAX_LENGTH,
        alphabet_size=ALPHABET_SIZE,
        char_kernel_sizes=CHAR_KERNEL_SIZES,
        char_feature_maps=CHAR_FEATURE_MAPS,
        dropout_rate=DROPOUT_RATE,
        hidden_units=HIDDEN_UNITS,
        nb_classes=NB_CLASSES
    ).build_model()

    model.compile(
        loss='categorical_crossentropy',
        optimizer=keras.optimizers.Adam(),
        metrics=['accuracy']
    )

    # model.summary()

    history = model.fit(
        X_train, y_train,
        epochs=NB_EPOCHS,
        batch_size=BATCH_SIZE,
        validation_data=(X_val, y_val),
        callbacks=[
            keras.callbacks.ModelCheckpoint(
                'model-%i.h5' % (i + 1), monitor='val_loss', verbose=1, save_best_only=True, mode='min'
            ),
            # keras.callbacks.TensorBoard(log_dir='./logs/temp', write_graph=True)