示例#1
0
        from keras.callbacks import EarlyStopping

        num_feats = len(x_train[0])
        model = Sequential()

        early_stop = EarlyStopping(monitor='loss',
                                   patience=0,
                                   verbose=1,
                                   min_delta=0.005,
                                   mode='auto')

        model.add(Dense(num_feats, activation='relu', input_dim=(num_feats)))
        model.add(Dropout(0.50))
        model.add(Dense(500, activation='relu', kernel_initializer='uniform'))
        model.add(Dropout(0.50))
        model.add(Dense(2, kernel_initializer='uniform', activation='softmax'))

        model.compile(loss='sparse_categorical_crossentropy',
                      metrics=['accuracy'],
                      optimizer='adam')

        model.fit(x_train,
                  y_train,
                  epochs=25,
                  verbose=1,
                  callbacks=[early_stop])

        y_pred = model.predict_classes(x_test)
        accuracy = accuracy_score(y_test, y_pred)
        print("Accuracy: %.2f%%" % (accuracy * 100.0))
示例#2
0
model.add(Dense(units=2, activation='sigmoid'))

# Compile the Network
# More information on optimizer types:
# https://keras.io/optimizers/
model.compile(optimizer=Adam(lr=0.01), loss='binary_crossentropy', metrics=['accuracy'])
# loss='binary_crossentropy' specifies that your model should optimize the log 
# loss for binary classification.  
# metrics=['accuracy'] specifies that accuracy should be printed out

# Review NN configuration
model.summary()

History = model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=10, verbose=1)

model.predict_classes(X_test)

#model.predict(X_test)

# Log Loss over time
plt.plot(History.history['loss'])
plt.plot(History.history['val_loss'])
plt.title('Model Loss')
plt.ylabel('Loss')
plt.xlabel('Epochs')
plt.legend(['train', 'test'])
plt.show()

# Model accuracy over time
plt.plot(History.history['acc'])
plt.plot(History.history['val_acc'])
示例#3
0
#score=0.52721
#%%
import keras
from keras.layers import Dense, Dropout
from keras.models import Sequential
from keras.optimizers import Adam
from keras.utils import to_categorical
#%%
model = Sequential()
model.add(Dense(200, input_shape=(x_train.shape[1], )))
model.add(Dense(200))
model.add(Dropout(0.2))
model.add(Dense(150))
model.add(Dense(50))
model.add(Dense(6, activation='softmax'))
#%%

model.compile(optimizer='adam',
              loss='categorical_crossentropy',
              metrics=['acc'])

#%%

model.fit(x_train, y_train, epochs=1000)
#%%
prediction = model.predict_classes(x_test)
#%%
y_test = [i.argmax() for i in y_test]
#%%
score = f1_score(y_test, prediction, average='weighted')
print(score)