Exemple #1
0
def svm_skin():
    print("\nSVM Classification for Skin Cancer data:\n")
    x_train, x_test, y_train, y_test = get_data_skin()
    svm = SVM(x_train, np.where(y_train == 0, -1, y_train), 100, 0.01)
    y_pred = svm.predict(x_train)
    print("\nTraining Classification accuracy: ")
    print(100 - 100 * np.sum(np.abs(y_pred - y_train)) / y_pred.shape[0])
    confusionMatrix(y_train, y_pred)
    y_pred = svm.predict(x_test)
    print("\nTesting Classification accuracy: ")
    print(100 - 100 * np.sum(np.abs(y_pred - y_test)) / y_pred.shape[0])
    confusionMatrix(y_test, y_pred)
    print("ROC Curve: ")
    plot_roc_curve(y_test, y_pred)
Exemple #2
0
def logistic_skin():
    print("\nLogistic Regression for Skin Cancer data:\n")
    x_train, x_test, y_train, y_test = get_data_skin()
    logistic = Logistic(x_train, y_train)
    y_pred = logistic.predict(x_train)
    print("\nTraining Classification accuracy: ")
    print(100 - 100 * np.sum(np.abs(y_pred - y_train)) / y_pred.shape[0])
    confusionMatrix(y_train, y_pred)
    y_pred = logistic.predict(x_test)
    print("\nTesting Classification accuracy: ")
    print(100 - 100 * np.sum(np.abs(y_pred - y_test)) / y_pred.shape[0])
    confusionMatrix(y_test, y_pred)
    print("ROC Curve: ")
    plot_roc_curve(y_test, y_pred)
Exemple #3
0
    def predict(self, X_test, y_test):
        X_input = autograd.Variable(X_test)
        y_target = autograd.Variable(y_test.long())

        with torch.no_grad():
            y_output = self(X_input)

        prediction = y_output.max(1)[1]
        FP = (prediction - y_target).nonzero().shape[0]

        print(
            "\nFeed-Forward NN Test Classification Accuracy: {:.2f}%\n".format(
                (1 - FP / (y_target.shape[0])) * 100))
        confusionMatrix(y_target, prediction)
        print("ROC Curve: ")
        plot_roc_curve(y_target, prediction)
Exemple #4
0
def cnn():

    x_train, x_test, y_train, y_test = get_data_skin(model='cnn')

    x_train = x_train / np.float32(255)
    y_train = y_train.astype(np.int32)

    x_test = x_test / np.float32(255)
    y_test = y_test.astype(np.int32)

    # Create the Estimator
    skin_cancer_classifier = tf.estimator.Estimator(
        model_fn=cnn_model_fn, model_dir="skin_convnet_model")

    # Set up logging for predictions
    tensors_to_log = {"probabilities": "softmax_tensor"}

    logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log,
                                              every_n_iter=50)

    # Train the model
    train_input_fn = tf.estimator.inputs.numpy_input_fn(x={"x": x_train},
                                                        y=y_train,
                                                        batch_size=100,
                                                        num_epochs=None,
                                                        shuffle=True)
    # train one step and display the probabilties
    skin_cancer_classifier.train(input_fn=train_input_fn,
                                 steps=1,
                                 hooks=[logging_hook])

    skin_cancer_classifier.train(input_fn=train_input_fn, steps=100)

    eval_input_fn = tf.estimator.inputs.numpy_input_fn(x={"x": x_test},
                                                       y=y_test,
                                                       num_epochs=1,
                                                       shuffle=False)
    predictions = list(skin_cancer_classifier.predict(input_fn=eval_input_fn))

    predict = [i['classes'] for i in predictions]

    confusionMatrix(y_test, np.asarray(predict))
    print("ROC Curve: ")
    plot_roc_curve(y_test, np.asarray(predict))

    eval_results = skin_cancer_classifier.evaluate(input_fn=eval_input_fn)
    print(eval_results)
Exemple #5
0
    def train(self, X_train, y_train):
        X_input = autograd.Variable(X_train)
        y_target = autograd.Variable(y_train.long())
        opt = optim.SGD(params=self.parameters(), lr=0.01)

        for epoch in range(max_epochs):
            self.zero_grad()
            output = self(X_input)
            predict = output.max(1)[1]
            loss = F.cross_entropy(output, y_target)
            loss.backward()
            opt.step()
        FP = (predict - y_target).nonzero().shape[0]

        print("\nFeed-Forward NN Training Classification Accuracy: {:.2f}%".
              format((1 - FP / y_target.shape[0]) * 100))
        confusionMatrix(y_target, predict)
        print("ROC Curve: ")
        plot_roc_curve(y_target, predict)
Exemple #6
0
def main():
    X_train, Y_train = prepareTrainingset()
    X_test, Y_test = prepareTestset()

    seggregated_data = seggregate_class(X_train, Y_train)
    # seggregated_data = seggregate_class(X_train[:1000], Y_train[:1000])

    fisher_parameters = fisher_ldf_classifier(seggregated_data)

    # training accuracy
    print("=================Training Summary=======================")
    predictions = fisher_predict(X_train, fisher_parameters)
    accuracy = getAccuracy(Y_train, predictions)
    print("Training accuracy: ", accuracy)
    cm, classes = confusionMatrix(Y_train, predictions)
    plotConfusionMatrix(cm, classes)

    cls_accuracy, classes = class_accuracy(cm, classes)
    cls_error_rate = 1 - cls_accuracy

    print("Classes: ", classes)
    print("Class Accuracy: \n", np.round(cls_accuracy, 3))
    print("Class error rate: \n", np.round(cls_error_rate, 3))

    # test accuracy
    print("=================Testing Summary=======================")
    predictions = fisher_predict(X_test, fisher_parameters)
    accuracy = getAccuracy(Y_test, predictions)
    print("Testing accuracy: ", accuracy)
    cm, classes = confusionMatrix(Y_test, predictions)
    plotConfusionMatrix(cm, classes)

    cls_accuracy, classes = class_accuracy(cm, classes)
    cls_error_rate = 1 - cls_accuracy

    print("Classes: ", classes)
    print("Class Accuracy: \n", np.round(cls_accuracy, 3))
    print("Class error rate: \n", np.round(cls_error_rate, 3))
plt.show()

# Plot training & validation loss values
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Test'], loc='upper left')
plt.show()

# Score of the trained model
scores = model.evaluate(x_test, y_test_one_hot, verbose=1)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])

# compute the confusion matrix
prediction_prob = model.predict(x_test)
predictions = np.argmax(prediction_prob, axis=1)
cm, classes = confusionMatrix(y_test, predictions)

# compute class accuracies
cls_accuracy, classes = class_accuracy(cm, classes)
cls_error_rate = 1-cls_accuracy
print("========================================")
print("Class labels: ", classes)
print("Class Accuracies: ", np.round(cls_accuracy, 3))
print("Class error rates: ", np.round(cls_error_rate, 3))

# finally plot confusion matrix
plotConfusionMatrix(cm, classes)
        for i in range(tested_num, tested_num + N // division_group):
            source1 = os.path.join(source_pdf_Dir1, file_list_pdf[i])
            shutil.move(source1, destination_pdf1)
            source1 = os.path.join(source_txt_Dir1, file_list_txt[i])
            shutil.move(source1, destination_txt1)
            source1 = os.path.join(source_pdf_cropped_Dir1,
                                   file_list_pdf_cropped[i])
            shutil.move(source1, destination_pdf_cropped1)
            source1 = os.path.join(source_txt_cropped_Dir1,
                                   file_list_txt_cropped[i])
            shutil.move(source1, destination_txt_cropped1)
            inner_counter += 1
        gussed_title = title_decision_confusion_matrix()
        #print(gussed_title)
        correct_title = [test_case] * inner_counter
        conf_matrix = confusionMatrix(correct_title, gussed_title)
        #print(conf_matrix)
        #print('****************')
        Total_conf_matrix += conf_matrix

        file_list_pdf2 = os.listdir(source_pdf_Dir2)
        file_list_txt2 = os.listdir(source_txt_Dir2)
        file_list_pdf_cropped2 = os.listdir(source_pdf_cropped_Dir2)
        file_list_txt_cropped2 = os.listdir(source_txt_cropped_Dir2)
        for f in file_list_pdf2:
            source2 = os.path.join(source_pdf_Dir2, f)
            shutil.move(source2, destination_pdf2)
        for f in file_list_txt2:
            source2 = os.path.join(source_txt_Dir2, f)
            shutil.move(source2, destination_txt2)
        for f in file_list_pdf_cropped2:
Exemple #9
0
                continue

            # construct a blob for the face ROI, then pass the blob
            # through our face embedding model to obtain the 128-d
            # quantification of the face
            faceBlob = cv2.dnn.blobFromImage(face,
                                             1.0 / 255, (96, 96), (0, 0, 0),
                                             swapRB=True,
                                             crop=False)
            embedder.setInput(faceBlob)
            vec = embedder.forward()

            # perform classification to recognize the face
            preds = svm_model.predict_proba(vec)[0]
            j = np.argmax(preds)
            proba = preds[j]
            name_detected = le.classes_[j]

            gt_names.append(name_gt)
            detected_names.append(name_detected)

            print(image_path, name_gt, name_detected)

cm, names = confusionMatrix(gt_names, detected_names)
plotConfusionMatrix(cm, names)
class_acc, _ = classAccuracy(cm, names)

overall_acc = overallAccuracy(gt_names, detected_names)
print("People: {}", names)
print("Person Accuracy: {}", class_acc)
print("Overall Accuracy {}", overall_acc)