Exemplo n.º 1
0
            test_acc_array[j] = test_results[1]

            predictions = np.empty((len(test_data), 2))

            predictions_output = model.predict(test_data)

            for k in range(0, len(predictions_output)):
                if predictions_output[k][0] > predictions_output[k][1]:
                    predictions[k] = np.array((1, 0))
                else:
                    predictions[k] = np.array((0, 1))

            print(
                confusion_matrix(test_labels.argmax(axis=1),
                                 predictions.argmax(axis=1)))

        max_test_array[i][count - 1] = np.max(test_acc_array)
        count = count + 1

        model.load_weights("weights.h5")

# In[ ]:

#plt.plot(train_acc_list)
#plt.plot(test_acc_list)
#plt.title('VGG-16-5 Fold Cross validation-model accuracy')
#plt.ylabel('accuracy')
#plt.xlabel('epoch')
#plt.legend(['train', 'test'], loc='upper left')
#plt.gcf().savefig("Results/VGG-16-5FCV.png")
Exemplo n.º 2
0
        target_size=(image_size, image_size),
        batch_size=1,
        shuffle=False)
    x = model_list[model[count]].output
    x = Dropout(0.25, name='do_akhir')(x)
    predictions = Dense(num_classes, activation='softmax')(x)
    loaded_model = Model(inputs=model_list[model[count]].input,
                         outputs=predictions)
    loaded_model.compile(
        Adam(lr=0.01),
        loss='categorical_crossentropy',
        metrics=[categorical_accuracy, top_2_accuracy, top_3_accuracy])
    loaded_model.load_weights(file)
    predictions = loaded_model.predict_generator(test_batches,
                                                 steps=num_val_samples,
                                                 verbose=1)
    test_labels = test_batches.classes
    cm = confusion_matrix(test_labels, predictions.argmax(axis=1))
    plot_confusion_matrix(cm,
                          cm_plot_labels,
                          base_dir=base_dir,
                          title='Confusion Matrix',
                          name=model[count])
    y_pred = np.argmax(predictions, axis=1)
    y_true = test_batches.classes
    report = classification_report(y_true, y_pred, target_names=cm_plot_labels)
    print(report)
    f.write(report)
    f.close()
    count += 1
Exemplo n.º 3
0
plt.xlabel("Epochs")
plt.ylabel("Loss")
plt.legend()
plt.savefig("loss.png")
plt.figure()

plt.plot(epochs, acc, 'g', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.xlabel("Epochs")
plt.ylabel("Accuracy %")
plt.title('Training and validation cat accuracy')
plt.legend()
plt.figure()
plt.savefig("accuracy.png")
plt.show()

# Confusion Matrix
predictions = models.predict_generator(test_gen, steps=len(df_val), verbose=1)
cm = confusion_matrix(test_gen.classes, predictions.argmax(axis=1))
plot_confusion_matrix(cm, ['0', '1'])

# Classification Report
y_pred = np.argmax(predictions, axis=1)
y_true = test_gen.classes

report = classification_report(y_true, y_pred, target_names=['0', '1'])
print(report)

#save model
model_json = models.to_json()
open('CNN_Model.json', 'w').write(model_json)