Beispiel #1
0
    backward = LSTM(128,
                    activation='relu',
                    recurrent_dropout=0.2,
                    dropout=0.1,
                    return_sequences=True,
                    go_backwards=True)(right_embeddig)
    concat = concatenate([forward, center_embedding, backward], axis=2)
    semantic = TimeDistributed(Dense(100, activation='tanh'))(concat)
    pool_rnn = Lambda(lambda x: backend.max(x, axis=1),
                      output_shape=(100, ))(semantic)
    output = Dense(6, activation='sigmoid')(pool_rnn)

    model = Model(inputs=[left_context, center, right_context], outputs=output)

    roc_auc_callback = RocCallback(
        [_left_context_train, _X_train, _right_context_train], _y_train,
        [_left_context_valid, _X_valid, _right_context_valid], _y_valid)
    early_stopping = EarlyStopping(monitor='val_loss', patience=5)
    model_save_path = './models/text_cnn_non_static_' + str(
        indice_fold) + '.h5'
    model_check_point = ModelCheckpoint(model_save_path,
                                        save_best_only=True,
                                        save_weights_only=True)
    tb_callback = TensorBoard('./logs', write_graph=True, write_images=True)
    model.compile(loss='binary_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])
    hist = model.fit(
        [_left_context_train, _X_train, _right_context_train],
        _y_train,
        batch_size=BATCH_SIZE,
Beispiel #2
0
                  EMBEDDING_SIZE,
                  input_length=MAX_LEN,
                  weights=[embedding_matrix],
                  trainable=True))
    model.add(Convolution1D(256, 3, padding='same', strides=1))
    model.add(Activation('relu'))
    model.add(MaxPooling1D(pool_size=2))
    model.add(
        GRU(256, dropout=0.2, recurrent_dropout=0.1, return_sequences=True))
    model.add(GRU(256, dropout=0.2, recurrent_dropout=0.1))
    # model.add(Dense(128,activation='relu'))
    # model.add(Dropout(0.2))
    # model.add(BatchNormalization())
    model.add(Dense(6, activation='sigmoid'))

    roc_auc_callback = RocCallback(_X_train, _y_train, _X_valid, _y_valid)
    early_stopping = EarlyStopping(monitor='val_loss', patience=5)
    model_save_path = './models/clstm_non_static_' + str(indice_fold) + '.h5'
    model_check_point = ModelCheckpoint(model_save_path,
                                        save_best_only=True,
                                        save_weights_only=True)
    tb_callback = TensorBoard('./logs', write_graph=True, write_images=True)
    model.compile(loss='binary_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])
    hist = model.fit(_X_train,
                     _y_train,
                     batch_size=BATCH_SIZE,
                     epochs=num_epoch,
                     validation_data=(_X_valid, _y_valid),
                     class_weight=class_weight,
Beispiel #3
0
    # concatenate
    merge = concatenate([cnn1, cnn2, cnn3])
    merge = Flatten()(merge)
    merge = Dropout(0.5)(merge)

    merge = concatenate([merge, _statics_input])
    merge = BatchNormalization()(merge)
    merge = GaussianNoise(0.1)(merge)
    merge = Dense(512, activation='relu')(merge)  # linear layer
    merge = Dropout(0.4)(merge)
    merge = BatchNormalization()(merge)

    out = Dense(6, activation='sigmoid')(merge)
    model = Model(inputs=[_input, _statics_input], outputs=out)

    roc_auc_callback = RocCallback([_X_train, _statics_train], _y_train,
                                   [_X_valid, _statics_valid], _y_valid)
    early_stopping = EarlyStopping(monitor='val_loss', patience=5)
    model_save_path = './models/text_cnn_non_static_' + str(
        indice_fold) + '.h5'
    model_check_point = ModelCheckpoint(model_save_path,
                                        save_best_only=True,
                                        save_weights_only=True)
    tb_callback = TensorBoard('./logs', write_graph=True, write_images=True)
    model.compile(loss='binary_crossentropy',
                  optimizer='nadam',
                  metrics=['accuracy'])
    hist = model.fit([_X_train, _statics_train],
                     _y_train,
                     batch_size=BATCH_SIZE,
                     epochs=num_epoch,
                     validation_data=([_X_valid, _statics_valid], _y_valid),