recall = recall_m(y_true, y_pred) return 2*((precision*recall)/(precision+recall+K.epsilon())) # define the keras model model = Sequential() model.add(Dense(8, activation='relu')) model.add(Dense(1, activation='sigmoid')) # compile the keras model model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) # fit the keras model on the dataset history = model.fit(np.asarray(X_train), np.asarray(y_train), epochs=15, batch_size=1) # evaluate the keras model _, accuracy = model.evaluate(np.asarray(X_test), np.asarray(y_test)) print('Accuracy: %.2f' % (accuracy*100)) # make class predictions with the model expected = np.asarray(y_test) predicted = model.predict_classes(np.asarray(X_test)) # summarize the fit of the model classification_model = metrics.classification_report(expected, predicted) confusion_model = metrics.confusion_matrix(expected, predicted) accuracy_model = metrics.accuracy_score(expected, predicted) print(classification_model) print(confusion_model) print("Accuracy NN with best parameters: ",accuracy_model) #Save metrics with open(save_report, "a") as text_file: text_file.write("\nAccuracy NN with best parameters: ") text_file.write(str(accuracy_model)) text_file.write("\nClassification NN with best parameters:\n") text_file.write(classification_model)
# %% [markdown] # **The rectified linear unit (relu) activation function** is used as a good general activation function for the first two layers, while the sigmoid activation function is required for the final layer as the output you want (of whether a passenger survives or not) needs to be scaled in the range of 0-1 (the probability of a passenger surviving). # %% model.summary() # %% model.compile(optimizer="adam", loss='binary_crossentropy', metrics=['accuracy']) model.fit(X_train, y_train, batch_size=32, epochs=50) # %% [markdown] # ### With the model built and trained its now time to see how it performs against the test data. # %% y_pred = model.predict_classes(X_test) print(metrics.accuracy_score(y_test, y_pred)) # %% # %% # %%
training = model.fit(Xtrain.values, ytrain.values, validation_split=0.1, epochs=200, batch_size=10, verbose=2) model.summary() """## Testing the accuracy""" accuracy = model.evaluate(Xtrain, ytrain) print(accuracy) # Resultate mit tatsächlichen Werten vergleichen print(model.predict_classes(Xtest)) print("[", end="") for i in ytest: print(i, end=" ") print("\b]") """## Plotting the results""" from matplotlib import pyplot as plt history = training plt.figure(figsize=(15, 10)) plt.subplots_adjust(hspace=0.5) plt.subplot(221)