Esempio n. 1
0
def show_summary():
    print("\n" + cf.skyBlue("Training set"))
    print(get_summary_matrix(common.TRAINING_DIR))

    print("\n" + cf.salmon("Validation set"))
    print(get_summary_matrix(common.VALIDATION_DIR))

    print("\n" + cf.lightGreen("Testing set"))
    print(get_summary_matrix(common.TESTING_DIR))
Esempio n. 2
0
def fit(model, training, validation, batch_size):
    print("\nFitting model...")

    history = model.fit_generator(
        training,
        epochs=MAX_EPOCHS,
        validation_data=validation,
        steps_per_epoch=training.n // batch_size,
        validation_steps=validation.n // batch_size,
        callbacks=setup_callbacks(),
        workers=2,
        verbose=2)

    best_epoch = np.argmin(history.history['val_loss']) + 1
    print("\n" + cf.lightGreen("Best epoch: {}".format(best_epoch)))
Esempio n. 3
0
def main(src_subdir, verbose, model_name):
    common.create_directories()

    src_dir = common.DATA_DIR + src_subdir + "/"

    model = tensorflow.keras.models.load_model(common.MODEL_DIR + model_name)

    # real (index) x predicted (column)
    confusion_matrix = pd.DataFrame(np.zeros((4, 4), dtype=np.int32),
                                    index=('down', 'left', 'right', 'up'),
                                    columns=('down', 'left', 'right', 'up'))

    classification_matrix = pd.DataFrame(np.zeros((4, 3)),
                                         index=('down', 'left', 'right', 'up'),
                                         columns=('precision', 'recall', 'f1'))

    type_matrix = pd.DataFrame(np.zeros((4, 2), dtype=np.int32),
                               index=('round', 'wide', 'narrow', 'total'),
                               columns=('correct', 'incorrect'))

    images = common.get_files(src_dir)

    print("Processing {} file(s) in {}/...\n".format(len(images), src_subdir))

    for path, filename in images:
        img = cv2.imread(path, cv2.IMREAD_GRAYSCALE)

        data = np.reshape(img, (1, ) + common.INPUT_SHAPE)
        prediction = model.predict(data)

        class_index = np.argmax(prediction)
        predicted_class = common.CLASSES[class_index]

        real_class, arrow_type = common.arrow_labels(filename)

        if verbose and real_class != predicted_class:
            print(path)
            print("Expected {} but got {}: {}\n".format(
                cf.lightGreen(real_class), cf.lightCoral(predicted_class),
                str(prediction[0])))

        confusion_matrix[predicted_class][real_class] += 1

        if real_class == predicted_class:
            type_matrix['correct'][arrow_type] += 1
            type_matrix['correct']['total'] += 1
        else:
            type_matrix['incorrect'][arrow_type] += 1
            type_matrix['incorrect']['total'] += 1

    print("\n" + cf.sandyBrown("Confusion matrix"))
    print(confusion_matrix)

    classification_matrix['precision'] = confusion_matrix.apply(precision)
    classification_matrix['recall'] = confusion_matrix.apply(recall, axis=1)

    classification_matrix['f1'] = classification_matrix.apply(f1, axis=1)

    print("\n" + cf.skyBlue("Classification summary"))
    print(classification_matrix)

    type_matrix['accuracy'] = type_matrix.apply(type_accuracy, axis=1)

    print("\n" + cf.plum("Accuracy by type"))
    print(type_matrix)

    print("\nFinished!")