def easy_hard_data_classifier_test():
    #-------------------------------------------------------------------#
    # generate and classify (using nn_classifier) 2 pairs of datasets (easy and hard)
    # calculate classification error in each case
    trainX, trainY, testX, testY = generate_train_test(2, task='easy')
    util.scatter_data(trainX, trainY)

    D = scipy.spatial.distance.cdist(testX, trainX, metric='euclidean')
    min_index = np.argmin(D, axis=1)
    predicted_labels = trainY[min_index]
    err = util.classification_error(testY, predicted_labels)

    print('True labels:\n{}'.format(testY))
    print('Predicted labels:\n{}'.format(predicted_labels))
    print('Error:\n{}'.format(err))

    trainX, trainY, testX, testY = generate_train_test(2, task='hard')
    util.scatter_data(trainX, trainY)

    D = scipy.spatial.distance.cdist(testX, trainX, metric='euclidean')
    min_index = np.argmin(D, axis=1)
    predicted_labels = trainY[min_index]
    err = util.classification_error(testY, predicted_labels)

    print('True labels:\n{}'.format(testY))
    print('Predicted labels:\n{}'.format(predicted_labels))
    print('Error:\n{}'.format(err))
    #-------------------------------------------------------------------#
    pass
Ejemplo n.º 2
0
def segmentation_combined_atlas_minmax_test():
    task = 'brain'
    n = 5
    all_subjects = np.arange(n)
    train_slice = 1
    tmp_data, tmp_labels, tmp_feature_labels = util.create_dataset(1,train_slice,task)
    all_data_matrix = np.empty((tmp_data.shape[0], tmp_data.shape[1], n))
    all_labels_matrix = np.empty((tmp_labels.shape[0], n))

    #Load datasets once
    for i in all_subjects:
        train_data, train_labels, train_feature_labels = util.create_dataset(i+1,train_slice,task)
        all_data_matrix[:,:,i] = train_data
        all_labels_matrix[:,i] = train_labels.ravel()

    predicted_labels_min = seg.segmentation_combined_atlas(all_labels_matrix, combining='min')
    predicted_labels_max = seg.segmentation_combined_atlas(all_labels_matrix, combining='max')

    test_labels = all_labels_matrix[:,4].astype(bool)

    print('Combining method = min:')
    err = util.classification_error(test_labels, predicted_labels_min)
    print('Error:\n{}'.format(err))
    dice = util.dice_overlap(test_labels, predicted_labels_min)
    print('Dice coefficient:\n{}'.format(dice))

    print('Combining method = max:')
    err = util.classification_error(test_labels, predicted_labels_max)
    print('Error:\n{}'.format(err))
    dice = util.dice_overlap(test_labels, predicted_labels_max)
    print('Dice coefficient:\n{}'.format(dice))
Ejemplo n.º 3
0
def feature_curve(use_random=False):
    # Load training and test data
    train_data, train_labels, train_feature_labels = util.create_dataset(1, 1, 'brain')
    test_data, test_labels, test_feature_labels = util.create_dataset(2, 1, 'brain')

    if use_random:
        train_data = np.random.randn(train_data.shape[0], train_data.shape[1])

    # Normalize data
    train_data, test_data = seg.normalize_data(train_data, test_data)

    # Define parameters
    feature_sizes = np.arange(train_data.shape[1]) + 1
    train_size = 10
    k = 3
    num_iter = 5

    # Store errors
    test_error = np.empty([len(feature_sizes), num_iter])
    test_error[:] = np.nan
    train_error = np.empty([len(feature_sizes), num_iter])
    train_error[:] = np.nan

    # Train and test with different sizes
    for i in np.arange(len(feature_sizes)):
        for j in np.arange(num_iter):
            print('feature size = {}, iter = {}'.format(feature_sizes[i], j))
            start_time = timeit.default_timer()
            # Subsample training set
            ix = np.random.randint(len(train_data), size=train_size)
            subset_train_data = train_data[ix, :]
            subset_train_labels = train_labels[ix, :]

            # Train classifier
            neigh = KNeighborsClassifier(n_neighbors=k)
            neigh.fit(subset_train_data[:, :feature_sizes[i]], subset_train_labels.ravel())
            # Evaluate
            predicted_test_labels = neigh.predict(test_data[:, :feature_sizes[i]])
            predicted_train_labels = neigh.predict(subset_train_data[:, :feature_sizes[i]])

            test_error[i, j] = util.classification_error(test_labels, predicted_test_labels)
            train_error[i, j] = util.classification_error(subset_train_labels, predicted_train_labels)

            # Timer log
            elapsed = timeit.default_timer() - start_time
            # print('elapsed time = {}'.format(elapsed))

    ## Display results
    fig = plt.figure(figsize=(8, 8))
    ax1 = fig.add_subplot(111)
    x = feature_sizes
    y_test = np.mean(test_error, 1)
    yerr_test = np.std(test_error, 1)
    p1 = ax1.errorbar(x, y_test, yerr=yerr_test, label='Test error')

    ax1.set_xlabel('Number of features')
    ax1.set_ylabel('Error')
    ax1.grid()
    ax1.legend()
Ejemplo n.º 4
0
def easy_hard_data_classifier_test():
    train_data_easy, train_label_easy, test_data_easy, test_label_easy = generate_train_test(50, 'easy')
    train_data_hard, train_label_hard, test_data_hard, test_label_hard = generate_train_test(50, 'hard')

    predicted_labels_easy = seg.nn_classifier(train_data_easy, train_label_easy, test_data_easy)
    predicted_labels_hard = seg.nn_classifier(train_data_hard, train_label_hard, test_data_hard)

    err_easy = util.classification_error(test_label_easy, predicted_labels_easy)
    err_hard = util.classification_error(test_label_hard, predicted_labels_hard)

    print("easy: {0}, hard: {1}".format(err_easy, err_hard))
Ejemplo n.º 5
0
def TestCombinedKmeans(train_data_matrix, train_labels_matrix, test_data,
                       test_labels):
    im_size = [240, 240]

    #Combined K-means
    pred_labels_kmeans = np.empty(
        [train_data_matrix.shape[0], train_data_matrix.shape[2]])
    for i in range(train_data_matrix.shape[2]):
        #normalize data
        train_data, test_data = seg.normalize_data(train_data_matrix[:, :, i],
                                                   test_data)
        #get optimized clusters (using 100 iterations and a learning rate of 0.1)
        _, _, w_final = prj.kmeans_no_plot(train_data,
                                           train_labels_matrix[:, i], 100, 0.1)
        #predict the labels
        temp_pred = prj.predicted_kmeans_test(w_final, test_data)

        #store labels for each training subject in one matrix
        pred_labels_kmeans[:, i] = temp_pred

    #decision fusion based on majority voting
    predicted_labels_kmeans_final = scipy.stats.mode(pred_labels_kmeans,
                                                     axis=1)[0].flatten()

    #calculate the error and dice
    err = util.classification_error(test_labels, predicted_labels_kmeans_final)
    dice = util.dice_multiclass(test_labels, predicted_labels_kmeans_final)
    predicted_mask = predicted_labels_kmeans_final.reshape(
        im_size[0], im_size[1])

    return predicted_mask, err, dice
Ejemplo n.º 6
0
def segmentation_combined_atlas_test():

    task = 'brain'
    n = 5
    all_subjects = np.arange(n)
    train_slice = 1
    tmp_data, tmp_labels, tmp_feature_labels = util.create_dataset(1,train_slice,task)
    all_data_matrix = np.empty((tmp_data.shape[0], tmp_data.shape[1], n))
    all_labels_matrix = np.empty((tmp_labels.shape[0], n))

    #Load datasets once
    for i in all_subjects:
        train_data, train_labels, train_feature_labels = util.create_dataset(i+1,train_slice,task)
        all_data_matrix[:,:,i] = train_data
        all_labels_matrix[:,i] = train_labels.ravel()

    #------------------------------------------------------------------#
    # TODO: Use provided code to Combine labels of training images,
    #  Convert combined label into mask image,
    #  Convert true label into mask image, and
    #  View both masks on the same axis,
    #  Also calculate dice coefficient and error
    prlabes=seg.segmentation_combined_atlas(all_labels_matrix)
    predicted_mask= prlabes.reshape(240,240)
    GT = plt.imread('../data/dataset_brains/1_1_gt.tif')
    gt_mask = GT > 0
    gt_vec = gt_mask.flatten()  # labels

    MSE= util.classification_error(gt_vec,prlabes)
    Dice=util.dice_overlap(gt_vec,prlabes)
    print('MSE',MSE,'Dice',Dice)
    plt.figure()
    plt.imshow(predicted_mask, cmap='gray', alpha=0.5)
    plt.imshow(gt_mask, cmap='jet', alpha=0.5)
    plt.show()
Ejemplo n.º 7
0
def segmentation_combined_atlas_test():
    task = 'brain'
    n = 5
    all_subjects = np.arange(n)
    train_slice = 1
    tmp_data, tmp_labels, tmp_feature_labels = util.create_dataset(1, train_slice, task)
    all_data_matrix = np.empty((tmp_data.shape[0], tmp_data.shape[1], n))
    all_labels_matrix = np.empty((tmp_labels.shape[0], n))

    # Load datasets once
    for i in all_subjects:
        train_data, train_labels, train_feature_labels = util.create_dataset(i + 1, train_slice, task)
        all_data_matrix[:, :, i] = train_data
        all_labels_matrix[:, i] = train_labels.ravel()

    # Combine labels of training images:
    predicted_labels = seg.segmentation_combined_atlas(all_labels_matrix, combining='mode')

    # Convert combined label into mask image:
    predicted_mask = predicted_labels.reshape(240, 240)

    # Convert true label into mask image:
    true_mask = all_labels_matrix[:, 4].reshape(240, 240)

    plt.imshow(predicted_mask + true_mask)

    err = util.classification_error(true_mask, predicted_mask)
    dice = util.dice_overlap(true_mask, predicted_mask)

    print("error: {0}, dice: {1}".format(err, dice))
Ejemplo n.º 8
0
def nn_classifier_test_brains(testDice=False):

    # Subject 1, slice 1 is the train data
    X, Y, feature_labels_train = util.create_dataset(1,1,'brain')
    N = 1000
    ix = np.random.randint(len(X), size=N)
    train_data = X[ix,:]
    train_labels = Y[ix,:]
    # Subject 3, slice 1 is the test data
    test_data, test_labels, feature_labels_test  = util.create_dataset(3,1,'brain')

    predicted_labels = seg.nn_classifier(train_data, train_labels, test_data)
    predicted_labels = predicted_labels.astype(bool)
    test_labels = test_labels.astype(bool)
    err = util.classification_error(test_labels, predicted_labels)
    print('Error:\n{}'.format(err))

    if testDice:
        dice = util.dice_overlap(test_labels, predicted_labels)
        print('Dice coefficient:\n{}'.format(dice))
    else:
        I = plt.imread('../data/dataset_brains/3_1_t1.tif')
        GT = plt.imread('../data/dataset_brains/3_1_gt.tif')
        gt_mask = GT>0
        gt_labels = gt_mask.flatten() # labels
        predicted_mask = predicted_labels.reshape(I.shape)
        fig = plt.figure(figsize=(15,5))
        ax1 = fig.add_subplot(131)
        ax1.imshow(I)
        ax2 = fig.add_subplot(132)
        ax2.imshow(predicted_mask)
        ax3  = fig.add_subplot(133)
        ax3.imshow(gt_mask)
Ejemplo n.º 9
0
def easy_hard_data_classifier_test():
    #-------------------------------------------------------------------#
    #TODO: generate and classify (using nn_classifier) 2 pairs of datasets (easy and hard)
    # calculate classification error in each case
    EasytrainX, EasytrainY, EasytestX, EasytestY = generate_train_test(100, 'easy')
    HardtrainX, HardtrainY, HardtestX, HardtestY = generate_train_test(100, 'hard')

    predictedLabels_easy = seg.nn_classifier(EasytrainX, EasytrainY, EasytestX)
    class_error_easy = util.classification_error(EasytrainY, predictedLabels_easy)
    
    predictedLabels_hard = seg.nn_classifier(EasytrainX, EasytrainY, EasytestX)
    class_error_hard = util.classification_error(HardtrainX, HardtrainY, predictedLabels_hard)
    
    print("Easy task's error: {}".format(class_error_easy))
    print("Hard task's error: {}".format(class_error_hard))
    #-------------------------------------------------------------------#
    pass
Ejemplo n.º 10
0
def easy_hard_data_classifier_test():
    #-------------------------------------------------------------------#
    #TODO: generate and classify (using nn_classifier) 2 pairs of datasets (easy and hard)
    # calculate classification error in each case
    trainXeasy, trainYeasy, testXeasy, testYeasy = generate_train_test(
        2, 'easy')
    trainXhard, trainYhard, testXhard, testYhard = generate_train_test(
        2, 'hard')

    predicted_labels_easy = seg.nn_classifier(trainXeasy, trainYeasy,
                                              testXeasy)
    predicted_labels_hard = seg.nn_classifier(trainXhard, trainYhard,
                                              testXhard)

    err_easy = util.classification_error(testYeasy, predicted_labels_easy)
    print(err_easy)
    err_hard = util.classification_error(testYhard, predicted_labels_hard)
    print(err_hard)
def knn_curve():

    # Load training and test data
    train_data, train_labels, train_feature_labels = util.create_dataset(
        1, 1, 'brain')
    test_data, test_labels, test_feature_labels = util.create_dataset(
        2, 1, 'brain')
    # Normalize data
    train_data, test_data = seg.normalize_data(train_data, test_data)

    #Define parameters
    num_iter = 3
    train_size = 100
    k = np.array([1, 3, 5, 9, 15, 25, 100])
    # k = np.array([1, 5, 9])

    #Store errors
    test_error = np.empty([len(k), num_iter])
    test_error[:] = np.nan
    dice = np.empty([len(k), num_iter])
    dice[:] = np.nan

    ## Train and test with different values

    for i in np.arange(len(k)):
        for j in np.arange(num_iter):
            print('k = {}, iter = {}'.format(k[i], j))
            #Subsample training set
            ix = np.random.randint(len(train_data), size=train_size)
            subset_train_data = train_data[ix, :]
            subset_train_labels = train_labels[ix, :]

            predicted_test_labels = seg.knn_classifier(subset_train_data,
                                                       subset_train_labels,
                                                       test_data, k[i])

            # #Train classifier
            # neigh = KNeighborsClassifier(n_neighbors=k[i])
            # neigh.fit(subset_train_data, subset_train_labels)
            # #Evaluate
            # predicted_test_labels = neigh.predict(test_data)

            test_error[i,
                       j] = util.classification_error(test_labels,
                                                      predicted_test_labels)
            dice[i, j] = util.dice_overlap(test_labels, predicted_test_labels)

    ## Display results
    fig = plt.figure(figsize=(8, 8))
    ax1 = fig.add_subplot(111)
    p1 = ax1.plot(k, np.mean(test_error, 1), 'r', label='error')
    p2 = ax1.plot(k, np.mean(dice, 1), 'k', label='dice')
    ax1.set_xlabel('k')
    ax1.set_ylabel('error')
    ax1.grid()
    ax1.legend()
Ejemplo n.º 12
0
def TestcombinedAtlases(train_labels_matrix, test_labels):
    im_size = [240, 240]
    #predict the test data labels
    predicted_labels, predicted_labels2_atlas = seg.segmentation_combined_atlas(
        train_labels_matrix)
    #calculate error and dice
    dice_atlas = util.dice_multiclass(test_labels, predicted_labels2_atlas)
    err_atlas = util.classification_error(test_labels, predicted_labels2_atlas)
    predicted_mask_atlas = predicted_labels2_atlas.reshape(
        im_size[0], im_size[1])
    return predicted_mask_atlas, err_atlas, dice_atlas,
Ejemplo n.º 13
0
def nn_classifier_test_samples():
    train_data, train_labels = seg.generate_gaussian_data(20)
    test_data, test_labels = seg.generate_gaussian_data(10)
    predicted_labels = seg.nn_classifier(train_data, train_labels, test_data)

    # predicted_labels = predicted_labels.astype(bool)
    # test_labels = test_labels.astype(bool)
    err = util.classification_error(test_labels, predicted_labels)

    print('True labels:\n{}'.format(test_labels))
    print('Predicted labels:\n{}'.format(predicted_labels))
    print('Error:\n{}'.format(err))
Ejemplo n.º 14
0
def TestcombinedkNN(train_data_matrix, train_labels_matrix, test_data,
                    test_labels):
    im_size = [240, 240]

    #predict test data labels
    predicted_labels, predicted_labels2_knn = seg.segmentation_combined_knn(
        train_data_matrix, train_labels_matrix, test_data)
    #calculate error and dice
    dice_knn = util.dice_multiclass(test_labels, predicted_labels2_knn)
    err_knn = util.classification_error(test_labels, predicted_labels2_knn)
    predicted_mask_atlas = predicted_labels2_knn.reshape(
        im_size[0], im_size[1])
    return predicted_mask_atlas, err_knn, dice_knn
Ejemplo n.º 15
0
def segmentation_combined_atlas_test():

    task = 'brain'
    n = 5
    all_subjects = np.arange(n)
    train_slice = 1
    tmp_data, tmp_labels, tmp_feature_labels = util.create_dataset(1,train_slice,task)
    all_data_matrix = np.empty((tmp_data.shape[0], tmp_data.shape[1], n))
    all_labels_matrix = np.empty((tmp_labels.shape[0], n))

    #Load datasets once
    for i in all_subjects:
        train_data, train_labels, train_feature_labels = util.create_dataset(i+1,train_slice,task)
        all_data_matrix[:,:,i] = train_data
        all_labels_matrix[:,i] = train_labels.ravel()

    #------------------------------------------------------------------#
    # TODO: Use provided code to Combine labels of training images,
    #  Convert combined label into mask image,
    #  Convert true label into mask image, and
    #  View both masks on the same axis,
    #  Also calculate dice coefficient and error
    # Combine labels of training images:
    predicted_labels = stats.mode(all_labels_matrix[:,:4], axis=1)[0]
    
    # Convert combined label into mask image:
    predicted_mask = predicted_labels.reshape(240,240)
    
    # Convert true label into mask image:
    true_mask = all_labels_matrix[:,4].reshape(240,240)
    
    # View both masks on the same axis using imshow()
    
    class_error = util.classification_error(all_labels_matrix[:,4], predicted_labels)
    dice_Overlap = util.dice_overlap(all_labels_matrix[:,4], predicted_labels)
    
    print("The error: {:.2f}".format(class_error))
    print("Dice coefficient: {:.2f}".format(dice_Overlap))
    
    
    
    fig = plt.figure(figsize=(10,13))
    ax1  = fig.add_subplot(121)
    ax1.imshow(predicted_mask, cmap = 'Oranges_r')
    
  
    
    ax2  = fig.add_subplot(122)
    ax2.imshow(true_mask)
def segmentation_mymethod(train_data,
                          train_labels,
                          test_data,
                          test_labels,
                          task='brain',
                          method='nearest neighbour',
                          testDice=True):
    # segments the image based on your own method!
    # Input:
    # train_data_matrix   num_pixels x num_features x num_subjects matrix of
    # features
    # train_labels_matrix num_pixels x num_subjects matrix of labels
    # test_data           num_pixels x num_features test data
    # task           String corresponding to the segmentation task: either 'brain' or 'tissue'
    # Output:
    # predicted_labels    Predicted labels for the test slice

    #------------------------------------------------------------------#
    #TODO: Implement your method here
    if method == 'kmeans':
        predicted_labels = seg.kmeans_clustering(test_data, K=4)
    elif method == 'nearest neighbour':
        predicted_labels = seg.nn_classifier(train_data, train_labels,
                                             test_data)
    elif method == 'knn':
        predicted_labels = seg.knn_classifier(train_data,
                                              train_labels,
                                              test_data,
                                              k=4)
    elif method == 'atlas':
        predicted_labels = seg.segmentation_atlas(train_data, train_labels,
                                                  test_data)

    predicted_labels = predicted_labels.astype(bool)
    test_labels = test_labels.astype(bool)

    err = util.classification_error(test_labels, predicted_labels)
    print('Error:\n{}'.format(err))

    if testDice:
        dice = util.dice_multiclass(test_labels, predicted_labels)
        print('Dice coefficient:\n{}'.format(dice))
    #------------------------------------------------------------------#
    return predicted_labels
Ejemplo n.º 17
0
def segmentation_demo():
    #only SECTION 2 is needed for what we want to do
    train_subject = 1
    test_subject = 2
    train_slice = 1
    test_slice = 1
    task = 'tissue'
    #SECTION 1 (this seciton has nothing to do with SECTION 2)

    #Load data from a train and testsubject
    train_data, train_labels, train_feature_labels = util.create_dataset(
        train_subject, train_slice, task)
    test_data, test_labels, test_feature_labels = util.create_dataset(
        test_subject, test_slice, task)

    util.scatter_data(train_data, train_labels, 0, 6)
    util.scatter_data(test_data, test_labels, 0, 6)

    predicted_labels = seg.segmentation_atlas(None, train_labels, None)

    err = util.classification_error(test_labels, predicted_labels)
    dice = util.dice_overlap(test_labels, predicted_labels)

    #Display results
    true_mask = test_labels.reshape(240, 240)
    predicted_mask = predicted_labels.reshape(240, 240)

    fig = plt.figure(figsize=(8, 8))
    ax1 = fig.add_subplot(111)
    ax1.imshow(true_mask, 'gray')
    ax1.imshow(predicted_mask, 'viridis', alpha=0.5)
    print('Subject {}, slice {}.\nErr {}, dice {}'.format(
        test_subject, test_slice, err, dice))

    ## SECTION 2:Compare methods
    num_images = 5
    num_methods = 3
    im_size = [240, 240]

    all_errors = np.empty([num_images, num_methods])
    all_errors[:] = np.nan
    all_dice = np.empty([num_images, num_methods])
    all_dice[:] = np.nan

    all_subjects = np.arange(5)  #list of all subjects [0, 1, 2, 3, 4]
    train_slice = 2
    task = 'tissue'
    all_data_matrix = np.empty(
        [train_data.shape[0], train_data.shape[1], num_images])
    all_labels_matrix = np.empty([train_labels.size, num_images])

    #Load datasets once
    print('Loading data for ' + str(num_images) + ' subjects...')

    for i in all_subjects:
        sub = i + 1
        train_data, train_labels, train_feature_labels = util.create_dataset(
            sub, train_slice, task)
        all_data_matrix[:, :, i] = train_data
        all_labels_matrix[:, i] = train_labels.flatten()

    print('Finished loading data.\nStarting segmentation...')

    #Go through each subject, taking i-th subject as the test
    for i in all_subjects:
        sub = i + 1
        #Define training subjects as all, except the test subject
        train_subjects = all_subjects.copy()
        train_subjects = np.delete(train_subjects, i)

        train_data_matrix = all_data_matrix[:, :, train_subjects]
        train_labels_matrix = all_labels_matrix[:, train_subjects]
        test_data = all_data_matrix[:, :, i]
        test_labels = all_labels_matrix[:, i]
        test_shape_1 = test_labels.reshape(im_size[0], im_size[1])

        fig = plt.figure(figsize=(15, 5))

        predicted_labels, predicted_labels2 = seg.segmentation_combined_atlas(
            train_labels_matrix)
        all_errors[i, 0] = util.classification_error(test_labels,
                                                     predicted_labels2)
        all_dice[i, 0] = util.dice_multiclass(test_labels, predicted_labels2)
        predicted_mask_1 = predicted_labels2.reshape(im_size[0], im_size[1])
        ax1 = fig.add_subplot(131)
        ax1.imshow(test_shape_1, 'gray')
        ax1.imshow(predicted_mask_1, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 0],
                                                    all_dice[i, 0])
        ax1.set_xlabel(text_str)
        ax1.set_title('Subject {}: Combined atlas'.format(sub))

        predicted_labels, predicted_labels2 = seg.segmentation_combined_knn(
            train_data_matrix, train_labels_matrix, test_data)
        all_errors[i, 1] = util.classification_error(test_labels,
                                                     predicted_labels2)
        all_dice[i, 1] = util.dice_multiclass(test_labels, predicted_labels2)
        predicted_mask_2 = predicted_labels2.reshape(im_size[0], im_size[1])
        ax2 = fig.add_subplot(132)
        ax2.imshow(test_shape_1, 'gray')
        ax2.imshow(predicted_mask_2, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 1],
                                                    all_dice[i, 1])
        ax2.set_xlabel(text_str)
        ax2.set_title('Subject {}: Combined k-NN'.format(sub))

        #OUR METHOD
        #predict the labels using our method
        predicted_labels_mymethod = segmentation_mymethod(train_data_matrix,
                                                          train_labels_matrix,
                                                          test_data,
                                                          num_iter=100,
                                                          mu=0.1)

        #determine error and dice (multiclass, since there are more classes)
        all_errors[i,
                   2] = util.classification_error(test_labels,
                                                  predicted_labels_mymethod)
        all_dice[i, 2] = util.dice_multiclass(test_labels,
                                              predicted_labels_mymethod)

        #reshape the predicted labels in order to plot the results
        predicted_mask_3 = predicted_labels_mymethod.reshape(
            im_size[0], im_size[1])

        #plot the predicted image over the real image
        plt.imshow(predicted_mask_3, 'viridis')
        ax3 = fig.add_subplot(133)
        ax3.imshow(test_shape_1, 'gray')
        ax3.imshow(predicted_mask_3, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax3.set_xlabel(text_str)
        ax3.set_title('Subject {}: My method'.format(sub))

        #save the figure after every loop (3 subimages/plots)
        fig.savefig("Results for test subject {}".format(sub), )
Ejemplo n.º 18
0
def learning_curve():

    # Load training and test data
    train_data, train_labels = seg.generate_gaussian_data(1000)
    test_data, test_labels = seg.generate_gaussian_data(1000)
    [train_data, test_data] = seg.normalize_data(train_data, test_data)

    #Define parameters
    train_sizes = np.array([1, 3, 10, 30, 100, 300])
    k = 1
    num_iter = 3  #How often to repeat the experiment

    #Store errors
    test_error = np.empty([len(train_sizes),num_iter])
    test_error[:] = np.nan
    test_dice = np.empty([len(train_sizes),num_iter])
    test_dice[:] = np.nan

    #------------------------------------------------------------------#
    #TODO: Store errors for training data
    #------------------------------------------------------------------#

    ## Train and test with different values
    for i in np.arange(len(train_sizes)):
        for j in np.arange(num_iter):
            print('train_size = {}, iter = {}'.format(train_sizes[i], j))
            #Subsample training set
            ix = np.random.randint(len(train_data), size=train_sizes[i])
            subset_train_data = train_data[ix,:]
            subset_train_labels = train_labels[ix,:]

            #Train classifier
            neigh = KNeighborsClassifier(n_neighbors=k)
            neigh.fit(subset_train_data, subset_train_labels.ravel())
            #Evaluate
            predicted_test_labels = neigh.predict(test_data)

            test_labels = test_labels.astype(bool)
            predicted_test_labels = predicted_test_labels.astype(bool)

            test_error[i,j] = util.classification_error(test_labels, predicted_test_labels)
            test_dice[i,j] = util.dice_overlap(test_labels, predicted_test_labels)

            #------------------------------------------------------------------#
            #TODO: Predict training labels and evaluate
            #------------------------------------------------------------------#

    ## Display results
    fig = plt.figure(figsize=(8,8))
    ax1 = fig.add_subplot(111)
    x = np.log(train_sizes)
    y_test = np.mean(test_error,1)
    yerr_test = np.std(test_error,1)
    p1 = ax1.errorbar(x, y_test, yerr=yerr_test, label='Test error')

    #------------------------------------------------------------------#
    #TODO: Plot training size
    #------------------------------------------------------------------#

    ax1.set_xlabel('Number of training samples (k)')
    ax1.set_ylabel('error')
    ticks = list(x)
    ax1.set_xticks(ticks)
    tick_lbls = [str(i) for i in train_sizes]
    ax1.set_xticklabels(tick_lbls)
    ax1.grid()
    ax1.legend()
Ejemplo n.º 19
0
def learning_curve():
    # Load training and test data
    # train_data, train_labels = seg.generate_gaussian_data(1000)
    train_data, train_labels, _ = util.create_dataset(1, 1, 'brain')
    # test_data, test_labels = seg.generate_gaussian_data(1000)
    test_data, test_labels, _ = util.create_dataset(2, 1, 'brain')
    train_data, test_data = seg.normalize_data(train_data, test_data)

    # Define parameters
    train_sizes = np.logspace(0.1, 3.0, num=15).astype(int)
    k = 1
    num_iter = 3  # How often to repeat the experiment

    # Store errors
    test_error = np.empty([len(train_sizes), num_iter])
    test_error[:] = np.nan
    test_dice = np.empty([len(train_sizes), num_iter])
    test_dice[:] = np.nan

    train_error = np.empty([len(train_sizes), num_iter])
    train_error[:] = np.nan
    train_dice = np.empty([len(train_sizes), num_iter])
    train_dice[:] = np.nan

    ## Train and test with different values
    for i in np.arange(len(train_sizes)):
        for j in np.arange(num_iter):
            print('train_size = {}, iter = {}'.format(train_sizes[i], j))
            # Subsample training set
            ix = np.random.randint(len(train_data), size=train_sizes[i])
            subset_train_data = train_data[ix, :]
            subset_train_labels = train_labels[ix, :]

            # Train classifier
            neigh = KNeighborsClassifier(n_neighbors=k)
            neigh.fit(subset_train_data, subset_train_labels.ravel())
            # Evaluate
            predicted_test_labels = neigh.predict(test_data)

            test_labels = test_labels.astype(bool)
            predicted_test_labels = predicted_test_labels.astype(bool)

            test_error[i, j] = util.classification_error(test_labels, predicted_test_labels)
            test_dice[i, j] = util.dice_overlap(test_labels, predicted_test_labels)

            predicted_train_labels = neigh.predict(train_data).astype(bool)
            train_labels_bool = train_labels.astype(bool)

            train_error[i, j] = util.classification_error(train_labels_bool, predicted_train_labels)
            train_dice[i, j] = util.dice_overlap(train_labels_bool, predicted_train_labels)

    ## Display results
    fig = plt.figure(figsize=(8, 8))
    gs = fig.add_gridspec(2, 2)
    ax1 = fig.add_subplot(gs[0, :])
    ax2 = fig.add_subplot(gs[1, :])
    x = np.log(train_sizes)
    ticks = list(x)
    tick_lbls = [str(i) for i in train_sizes]

    y_test = np.mean(test_error, 1)
    y_train = np.mean(train_error, 1)

    yerr_test = np.std(test_error, 1)
    yerr_train = np.std(train_error, 1)

    p1 = ax1.errorbar(x, y_test, yerr=yerr_test, label='Test error')
    p2 = ax2.errorbar(x, y_train, yerr=yerr_train, label='Train error')

    ax1.set_xlabel('Number of training samples (k)')
    ax1.set_ylabel('error')
    ax1.set_xticks(ticks)
    ax1.set_xticklabels(tick_lbls)
    ax1.grid()
    ax1.legend()

    ax2.set_xlabel('Number of training samples (k)')
    ax2.set_ylabel('error')
    ax2.set_xticks(ticks)
    ax2.set_xticklabels(tick_lbls)
    ax2.grid()
    ax2.legend()
Ejemplo n.º 20
0
def segmentation_demo():

    # Data name specification
    train_subject = 1
    test_subject = 2
    train_slice = 1
    test_slice = 1
    task = 'tissue'

    # Load data
    train_data, train_labels, train_feature_labels = util.create_dataset(
        train_subject, train_slice, task)
    test_data, test_labels, test_feature_labels = util.create_dataset(
        test_subject, test_slice, task)

    # find the predicted labels (here: the train_labels)
    predicted_labels = seg.segmentation_atlas(None, train_labels, None)

    # Calculate the error and dice score of these predicted labels in comparison to test labels
    err = util.classification_error(test_labels, predicted_labels)
    dice = util.dice_overlap(test_labels, predicted_labels)

    # Display results
    true_mask = test_labels.reshape(240, 240)
    predicted_mask = predicted_labels.reshape(240, 240)

    fig = plt.figure(figsize=(8, 8))
    ax1 = fig.add_subplot(111)
    ax1.imshow(true_mask, 'gray')
    ax1.imshow(predicted_mask, 'viridis', alpha=0.5)
    print('Subject {}, slice {}.\nErr {}, dice {}'.format(
        test_subject, test_slice, err, dice))

    # COMPARE METHODS
    num_images = 5
    num_methods = 3
    im_size = [240, 240]

    # make space for error and dice data
    all_errors = np.empty([num_images, num_methods])
    all_errors[:] = np.nan
    all_dice = np.empty([num_images, num_methods])
    all_dice[:] = np.nan

    # data name specification
    all_subjects = np.arange(num_images)
    train_slice = 1
    task = 'tissue'

    # make space for data
    all_data_matrix = np.empty(
        [train_data.shape[0], train_data.shape[1], num_images])
    # all_labels_matrix = np.empty([train_labels.size, num_images], dtype=bool)
    all_labels_matrix = np.empty([train_labels.size, num_images])

    # Load datasets once
    print('Loading data for ' + str(num_images) + ' subjects...')

    for i in all_subjects:
        sub = i + 1
        train_data, train_labels, train_feature_labels = util.create_dataset(
            sub, train_slice, task)
        all_data_matrix[:, :, i] = train_data
        all_labels_matrix[:, i] = train_labels.flatten()

    print('Finished loading data.\nStarting segmentation...')

    # Go through each subject, taking i-th subject as the test
    for i in np.arange(num_images):
        sub = i + 1

        # Define training subjects as all, except the test subject
        train_subjects = all_subjects.copy()
        train_subjects = np.delete(train_subjects, i)

        # Obtain data about the chosen amount of subjects
        train_data_matrix = all_data_matrix[:, :, train_subjects]
        train_labels_matrix = all_labels_matrix[:, train_subjects]
        test_data = all_data_matrix[:, :, i]
        test_labels = all_labels_matrix[:, i]
        test_shape_1 = test_labels.reshape(im_size[0], im_size[1])

        fig = plt.figure(figsize=(15, 5))

        # Get predicted labels from atlas method
        predicted_labels = seg.segmentation_combined_atlas(train_labels_matrix)
        all_errors[i, 0] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 0] = util.dice_overlap(test_labels, predicted_labels)

        # Plot atlas method
        predicted_mask_1 = predicted_labels.reshape(im_size[0], im_size[1])
        ax1 = fig.add_subplot(131)
        ax1.imshow(test_shape_1, 'gray')
        ax1.imshow(predicted_mask_1, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 0],
                                                    all_dice[i, 0])
        ax1.set_xlabel(text_str)
        ax1.set_title('Subject {}: Combined atlas'.format(sub))

        # Get predicted labels from kNN method
        predicted_labels = seg.segmentation_combined_knn(
            train_data_matrix, train_labels_matrix, test_data)
        all_errors[i, 1] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 1] = util.dice_overlap(test_labels, predicted_labels)

        # Plot kNN method
        predicted_mask_2 = predicted_labels.reshape(im_size[0], im_size[1])
        ax2 = fig.add_subplot(132)
        ax2.imshow(test_shape_1, 'gray')
        ax2.imshow(predicted_mask_2, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 1],
                                                    all_dice[i, 1])
        ax2.set_xlabel(text_str)
        ax2.set_title('Subject {}: Combined k-NN'.format(sub))

        # Get predicted labels from my own method
        predicted_labels = segmentation_mymethod(train_data_matrix,
                                                 train_labels_matrix,
                                                 test_data, task)
        print(predicted_labels.shape)
        print(np.unique(predicted_labels))
        all_errors[i, 2] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 2] = util.dice_overlap(test_labels, predicted_labels)

        # Plot my own method
        predicted_mask_3 = predicted_labels.reshape(im_size[0], im_size[1])
        ax3 = fig.add_subplot(133)
        ax3.imshow(test_shape_1, 'gray')
        ax3.imshow(predicted_mask_3, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax3.set_xlabel(text_str)
        ax3.set_title('Subject {}: My method'.format(sub))
Ejemplo n.º 21
0
#Combined K-means
pred_labels_kmeans = np.empty(
    [train_data_matrix.shape[0], train_data_matrix.shape[2]])
print(train_data.shape)
for i in range(train_data_matrix.shape[2]):
    train_data, test_data = seg.normalize_data(train_data_matrix[:, :, i],
                                               test_data)

    _, _, w_final = prj.kmeans(train_data, train_labels_matrix[:, i], num_iter,
                               mu)

    temp_pred = prj.predicted_kmeans_test(w_final, test_data)

    print("Possible classes are: {}".format(np.unique(temp_pred)))
    tempdice = util.dice_multiclass(test_labels, temp_pred)
    temperr = util.classification_error(test_labels, temp_pred)

    print('Err {:.4f}, dice {:.4f}'.format(temperr, tempdice))
    pred_labels_kmeans[:, i] = temp_pred

#decision fusion
predicted_labels_kmeans_final = scipy.stats.mode(pred_labels_kmeans,
                                                 axis=1)[0].flatten()

#do a check which labels exist
print("Possible classes are: {}".format(
    np.unique(predicted_labels_kmeans_final)))

#calculate the error and dice
err = util.classification_error(test_labels, predicted_labels_kmeans_final)
dice = util.dice_multiclass(test_labels, predicted_labels_kmeans_final)
Ejemplo n.º 22
0
def segmentation_demo():

    # Data name specification
    train_subject = 1
    test_subject = 2
    train_slice = 1
    test_slice = 1
    task = 'tissue'

    # Load data
    train_data, train_labels, train_feature_labels = util.create_dataset(
        train_subject, train_slice, task)
    test_data, test_labels, test_feature_labels = util.create_dataset(
        test_subject, test_slice, task)

    # Normalize and feed data through X_pca
    train_norm, _ = seg.normalize_data(train_data)
    Xpca, v, w, fraction_variance, ix = seg.mypca(train_norm)
    relevant_feature = int(np.sum(fraction_variance < 0.95)) + 1
    train_norm_ord = train_norm[:, ix]
    train_norm = train_norm_ord[:, :relevant_feature]

    # find the predicted labels (here: the train_labels)
    predicted_labels = seg.segmentation_atlas(None, train_labels, None)

    # Calculate the error and dice score of these predicted labels in comparison to test labels
    err = util.classification_error(test_labels, predicted_labels)
    dice = util.dice_multiclass(test_labels, predicted_labels)

    # Display results
    true_mask = test_labels.reshape(240, 240)
    predicted_mask = predicted_labels.reshape(240, 240)
    fig = plt.figure(figsize=(8, 8))
    ax1 = fig.add_subplot(111)
    ax1.imshow(true_mask, 'gray')
    ax1.imshow(predicted_mask, 'viridis', alpha=0.5)
    print('Subject {}, slice {}.\nErr {}, dice {}'.format(
        test_subject, test_slice, err, dice))

    # COMPARE METHODS
    num_images = 5
    num_methods = 3
    im_size = [240, 240]

    # make space for error and dice data
    all_errors = np.empty([num_images, num_methods])
    all_errors[:] = np.nan
    all_dice = np.empty([num_images, num_methods])
    all_dice[:] = np.nan

    # data name specification
    all_subjects = np.arange(num_images)
    train_slice = 1
    task = 'tissue'

    # make space for data
    all_data_matrix = np.empty(
        [train_norm.shape[0], train_norm.shape[1], num_images])
    all_labels_matrix = np.empty([train_labels.size, num_images])
    all_data_matrix_kmeans = np.empty(
        [train_norm.shape[0], train_norm.shape[1], num_images])
    all_labels_matrix_kmeans = np.empty([train_labels.size, num_images])

    # Load datasets once
    print('Loading data for ' + str(num_images) + ' subjects...')
    for i in all_subjects:
        sub = i + 1
        train_data, train_labels, train_feature_labels = util.create_dataset(
            sub, train_slice, task)
        train_norm, _ = seg.normalize_data(train_data)
        Xpca, v, w, fraction_variance, ix = seg.mypca(train_norm)
        relevant_labels = int(np.sum(fraction_variance < 0.95)) + 1
        train_norm_ord = train_norm[:, ix]
        train_norm = train_norm_ord[:, :relevant_labels]
        all_data_matrix[:, :, i] = train_norm
        all_labels_matrix[:, i] = train_labels.flatten()

    # Load datasets for kmeans
    print('Loading data for ' + str(num_images) + ' subjects...')
    for i in all_subjects:
        sub = i + 1
        train_data_kmeans, train_labels_kmeans, train_feature_labels_kmeans = create_dataset(
            sub, train_slice, task)
        train_norm_kmeans, _ = seg.normalize_data(train_data_kmeans)
        all_data_matrix_kmeans[:, :, i] = train_norm_kmeans
        all_labels_matrix_kmeans[:, i] = train_labels_kmeans.flatten()

    print('Finished loading data.\nStarting segmentation...')

    # Go through each subject, taking i-th subject as the test
    for i in np.arange(num_images):
        sub = i + 1

        # Define training subjects as all, except the test subject
        train_subjects = all_subjects.copy()
        train_subjects = np.delete(train_subjects, i)

        # Obtain data about the chosen amount of subjects
        train_data_matrix = all_data_matrix[:, :, train_subjects]
        train_labels_matrix = all_labels_matrix[:, train_subjects]
        test_data = all_data_matrix[:, :, i]
        test_labels = all_labels_matrix[:, i]
        test_shape_1 = test_labels.reshape(im_size[0], im_size[1])

        fig = plt.figure(figsize=(15, 5))

        # Get predicted labels from atlas method
        predicted_labels = seg.segmentation_combined_atlas(train_labels_matrix)
        all_errors[i, 0] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 0] = util.dice_multiclass(test_labels, predicted_labels)

        # Plot atlas method
        predicted_mask_1 = predicted_labels.reshape(im_size[0], im_size[1])
        ax1 = fig.add_subplot(151)
        ax1.imshow(test_shape_1, 'gray')
        ax1.imshow(predicted_mask_1, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 0],
                                                    all_dice[i, 0])
        ax1.set_xlabel(text_str)
        ax1.set_title('Subject {}: Combined atlas'.format(sub))

        # Get predicted labels from kNN method
        predicted_labels = seg.segmentation_combined_knn(train_data_matrix,
                                                         train_labels_matrix,
                                                         test_data,
                                                         k=10)
        all_errors[i, 1] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 1] = util.dice_multiclass(test_labels, predicted_labels)

        # Plot kNN method
        predicted_mask_2 = predicted_labels.reshape(im_size[0], im_size[1])
        ax2 = fig.add_subplot(152)
        ax2.imshow(test_shape_1, 'gray')
        ax2.imshow(predicted_mask_2, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 1],
                                                    all_dice[i, 1])
        ax2.set_xlabel(text_str)
        ax2.set_title('Subject {}: Combined k-NN'.format(sub))

        # Get predicted labels from my own method
        # all_data_matrix_bnb = np.empty([train_norm.shape[0], train_norm.shape[1], num_images])
        # all_labels_matrix_bnb = np.empty([train_labels.size, num_images])

        # for ii in all_subjects:
        #     sub = i + 1
        #     task = 'brain'
        #     train_data_bnb, train_labels_bnb, train_feature_labels_bnb = util.create_dataset(sub, train_slice, task)
        #     train_norm_bnb, _ = seg.normalize_data(train_data_bnb)
        #     Xpca, v, w, fraction_variance, ix = seg.mypca(train_norm_bnb)
        #     relevant_labels_bnb = int(np.sum(fraction_variance < 0.95)) + 1
        #     train_norm_ord_bnb = train_norm_bnb[:, ix]
        #     train_norm_bnb = train_norm_ord_bnb[:, :relevant_labels_bnb]
        #     all_data_matrix_bnb[:, :, ii] = train_norm_bnb
        #     all_labels_matrix_bnb[:, ii] = train_labels_bnb.flatten()
        #
        # qw, we, er = all_data_matrix.shape
        # for iii in np.arange(qw):
        #     for j in np.arange(er):
        #         if all_labels_matrix_bnb[iii, j] == 0:
        #             for k in np.arange(we):
        #                 all_data_matrix[iii, k, j] = 0

        # train_data_matrix = all_data_matrix[:, :, train_subjects]
        # test_data = all_data_matrix[:, :, i]

        train_data_matrix_kmeans = all_data_matrix_kmeans[:, :, train_subjects]
        train_labels_matrix_kmeans = all_labels_matrix[:, train_subjects]
        test_data_kmeans = all_data_matrix_kmeans[:, :, i]

        predicted_labels = segmentation_mymethod(train_data_matrix_kmeans,
                                                 train_labels_matrix_kmeans,
                                                 test_data_kmeans, task)
        all_errors[i, 2] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 2] = util.dice_multiclass(test_labels, predicted_labels)

        # Plot my own method
        predicted_mask_3 = predicted_labels.reshape(im_size[0], im_size[1])
        ax3 = fig.add_subplot(153)
        ax3.imshow(test_shape_1, 'gray')
        ax3.imshow(predicted_mask_3, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax3.set_xlabel(text_str)
        ax3.set_title('Subject {}: My method'.format(sub))

        ax4 = fig.add_subplot(154)
        ax4.imshow(predicted_mask_3, 'viridis')
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax4.set_xlabel(text_str)
        ax4.set_title('Subject {}: My method'.format(sub))

        ax5 = fig.add_subplot(155)
        ax5.imshow(test_shape_1, 'gray')
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax5.set_xlabel(text_str)
        ax5.set_title('Subject {}: My method'.format(sub))
Ejemplo n.º 23
0
#select certain data:
train_data_matrix = all_data_matrix[:, :, train_subjects]
train_data_matrix = train_data_matrix[:, features, :]
test_data = test_data[:, features]
train_labels_matrix = train_labels_matrix[:, train_subjects]

#predict test data labels
predicted_labels, predicted_labels2_atlas = seg.segmentation_combined_atlas(
    train_labels_matrix)
predicted_labels, predicted_labels2_knn = seg.segmentation_combined_knn(
    train_data_matrix, train_labels_matrix, test_data)

#calculate error and dice
dice_atlas = util.dice_multiclass(test_labels, predicted_labels2_atlas)
err_atlas = util.classification_error(test_labels, predicted_labels2_atlas)

dice_knn = util.dice_multiclass(test_labels, predicted_labels2_knn)
err_knn = util.classification_error(test_labels, predicted_labels2_knn)

#needed for plotting the 'real' data and the predicted
test_shape = test_labels.reshape(im_size[0], im_size[1])

#Plot for combined atlas
predicted_mask_atlas = predicted_labels2_atlas.reshape(im_size[0], im_size[1])
fig, ax = plt.subplots()
ax.imshow(test_shape, 'gray')

ax.imshow(predicted_mask_atlas, 'viridis', alpha=0.5)
text_str = 'Err {:.4f}, dice {:.4f}'.format(err_atlas, dice_atlas)
ax.set_xlabel(text_str)
Ejemplo n.º 24
0
features = [1,4]
train_data,_ = seg.normalize_data(train_data[:, features])
test_data ,_= seg.normalize_data(test_data[:,features])
all_data_matrix, _ = seg.normalize_data(all_data_matrix[:, features, :])


#predicted_train = seg.kmeans_clustering(train_data, K=4)
if (task == 'tissue'):
    k = 4
else:
    k = 2
    
kmeans_cost, train_predicted, w_final =  prj.kmeans(train_data, train_labels k, mu = 0.1, num_iter = 5)

dice = util.dice_multiclass(train_labels, train_predicted)
error = util.classification_error(train_labels, train_predicted)

print("Dice score is {:.2f}".format(dice))
print("Error is {:.2f}".format(error))

#Use my method
#predicted_labels = prj.segmentation_mymethod(train_data, train_labels, test_data, task='tissue')
pred_labels_kmeans = prj.predicted_kmeans_test(w_final, test_data).T

_, pred_labels_cat = seg.segmentation_combined_atlas(train_labels, combining='mode')
_, pred_labels_cnn = seg.segmentation_combined_knn(all_data_matrix, all_labels_matrix, test_data, k=1)

pred_labels_cat = pred_labels_cat.T
pred_labels_cnn = pred_labels_cnn.T

concat_labels = np.vstack((pred_labels_kmeans, pred_labels_cat, pred_labels_cnn)).T
def segmentation_demo():

    train_subject = 1
    test_subject = 2
    train_slice = 1
    test_slice = 1
    task = 'brain'

    #Load data
    train_data, train_labels, train_feature_labels = util.create_dataset(
        train_subject, train_slice, task)
    test_data, test_labels, test_feature_labels = util.create_dataset(
        test_subject, test_slice, task)

    predicted_labels = seg.segmentation_atlas(None, train_labels, None)

    err = util.classification_error(test_labels, predicted_labels)
    dice = util.dice_overlap(test_labels, predicted_labels)

    #Display results
    true_mask = test_labels.reshape(240, 240)
    predicted_mask = predicted_labels.reshape(240, 240)

    # fig = plt.figure(figsize=(8,8))
    # ax1 = fig.add_subplot(111)
    # ax1.imshow(true_mask, 'gray')
    # ax1.imshow(predicted_mask, 'viridis', alpha=0.5)
    # print('Subject {}, slice {}.\nErr {}, dice {}'.format(test_subject, test_slice, err, dice))

    ## Compare methods
    num_images = 5
    num_methods = 3
    im_size = [240, 240]

    all_errors = np.empty([num_images, num_methods])
    all_errors[:] = np.nan
    all_dice = np.empty([num_images, num_methods])
    all_dice[:] = np.nan

    all_subjects = np.arange(num_images)
    train_slice = 1
    task = 'brain'
    all_data_matrix = np.empty(
        [train_data.shape[0], train_data.shape[1], num_images])
    all_labels_matrix = np.empty([train_labels.size, num_images], dtype=bool)

    #Load datasets once
    print('Loading data for ' + str(num_images) + ' subjects...')

    for i in all_subjects:
        sub = i + 1
        train_data, train_labels, train_feature_labels = util.create_dataset(
            sub, train_slice, task)
        all_data_matrix[:, :, i] = train_data
        all_labels_matrix[:, i] = train_labels.flatten()

    print('Finished loading data.\nStarting segmentation...')

    #Go through each subject, taking i-th subject as the test
    for i in np.arange(num_images):
        sub = i + 1
        #Define training subjects as all, except the test subject
        train_subjects = all_subjects.copy()
        train_subjects = np.delete(train_subjects, i)

        train_data_matrix = all_data_matrix[:, :, train_subjects]
        train_labels_matrix = all_labels_matrix[:, train_subjects]
        test_data = all_data_matrix[:, :, i]
        test_labels = all_labels_matrix[:, i]
        test_shape_1 = test_labels.reshape(im_size[0], im_size[1])

        fig = plt.figure(figsize=(15, 5))

        predicted_labels = seg.segmentation_combined_atlas(train_labels_matrix)
        all_errors[i, 0] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 0] = util.dice_overlap(test_labels, predicted_labels)
        predicted_mask_1 = predicted_labels.reshape(im_size[0], im_size[1])
        ax1 = fig.add_subplot(131)
        ax1.imshow(test_shape_1, 'gray')
        ax1.imshow(predicted_mask_1, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 0],
                                                    all_dice[i, 0])
        ax1.set_xlabel(text_str)
        ax1.set_title('Subject {}: Combined atlas'.format(sub))

        predicted_labels = seg.segmentation_combined_knn(
            train_data_matrix, train_labels_matrix, test_data)
        all_errors[i, 1] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 1] = util.dice_overlap(test_labels, predicted_labels)
        predicted_mask_2 = predicted_labels.reshape(im_size[0], im_size[1])
        ax2 = fig.add_subplot(132)
        ax2.imshow(test_shape_1, 'gray')
        ax2.imshow(predicted_mask_2, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 1],
                                                    all_dice[i, 1])
        ax2.set_xlabel(text_str)
        ax2.set_title('Subject {}: Combined k-NN'.format(sub))

        predicted_labels = segmentation_mymethod(train_data_matrix,
                                                 train_labels_matrix,
                                                 test_data, task)
        all_errors[i, 2] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 2] = util.dice_overlap(test_labels, predicted_labels)
        predicted_mask_3 = predicted_labels.reshape(im_size[0], im_size[1])
        ax3 = fig.add_subplot(133)
        ax3.imshow(test_shape_1, 'gray')
        ax3.imshow(predicted_mask_3, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax3.set_xlabel(text_str)
        ax3.set_title('Subject {}: My method'.format(sub))