Beispiel #1
0
def segmentation_combined_atlas_minmax_test():
    task = 'brain'
    n = 5
    all_subjects = np.arange(n)
    train_slice = 1
    tmp_data, tmp_labels, tmp_feature_labels = util.create_dataset(1,train_slice,task)
    all_data_matrix = np.empty((tmp_data.shape[0], tmp_data.shape[1], n))
    all_labels_matrix = np.empty((tmp_labels.shape[0], n))

    #Load datasets once
    for i in all_subjects:
        train_data, train_labels, train_feature_labels = util.create_dataset(i+1,train_slice,task)
        all_data_matrix[:,:,i] = train_data
        all_labels_matrix[:,i] = train_labels.ravel()

    predicted_labels_min = seg.segmentation_combined_atlas(all_labels_matrix, combining='min')
    predicted_labels_max = seg.segmentation_combined_atlas(all_labels_matrix, combining='max')

    test_labels = all_labels_matrix[:,4].astype(bool)

    print('Combining method = min:')
    err = util.classification_error(test_labels, predicted_labels_min)
    print('Error:\n{}'.format(err))
    dice = util.dice_overlap(test_labels, predicted_labels_min)
    print('Dice coefficient:\n{}'.format(dice))

    print('Combining method = max:')
    err = util.classification_error(test_labels, predicted_labels_max)
    print('Error:\n{}'.format(err))
    dice = util.dice_overlap(test_labels, predicted_labels_max)
    print('Dice coefficient:\n{}'.format(dice))
Beispiel #2
0
def nn_classifier_test_brains(testDice=False):

    # Subject 1, slice 1 is the train data
    X, Y, feature_labels_train = util.create_dataset(1,1,'brain')
    N = 1000
    ix = np.random.randint(len(X), size=N)
    train_data = X[ix,:]
    train_labels = Y[ix,:]
    # Subject 3, slice 1 is the test data
    test_data, test_labels, feature_labels_test  = util.create_dataset(3,1,'brain')

    predicted_labels = seg.nn_classifier(train_data, train_labels, test_data)
    predicted_labels = predicted_labels.astype(bool)
    test_labels = test_labels.astype(bool)
    err = util.classification_error(test_labels, predicted_labels)
    print('Error:\n{}'.format(err))

    if testDice:
        dice = util.dice_overlap(test_labels, predicted_labels)
        print('Dice coefficient:\n{}'.format(dice))
    else:
        I = plt.imread('../data/dataset_brains/3_1_t1.tif')
        GT = plt.imread('../data/dataset_brains/3_1_gt.tif')
        gt_mask = GT>0
        gt_labels = gt_mask.flatten() # labels
        predicted_mask = predicted_labels.reshape(I.shape)
        fig = plt.figure(figsize=(15,5))
        ax1 = fig.add_subplot(131)
        ax1.imshow(I)
        ax2 = fig.add_subplot(132)
        ax2.imshow(predicted_mask)
        ax3  = fig.add_subplot(133)
        ax3.imshow(gt_mask)
def segmentation_combined_atlas_test():

    task = 'brain'
    n = 5
    all_subjects = np.arange(n)
    train_slice = 1
    tmp_data, tmp_labels, tmp_feature_labels = util.create_dataset(1,train_slice,task)
    all_data_matrix = np.empty((tmp_data.shape[0], tmp_data.shape[1], n))
    all_labels_matrix = np.empty((tmp_labels.shape[0], n))

    #Load datasets once
    for i in all_subjects:
        train_data, train_labels, train_feature_labels = util.create_dataset(i+1,train_slice,task)
        all_data_matrix[:,:,i] = train_data
        all_labels_matrix[:,i] = train_labels.ravel()

    #------------------------------------------------------------------#
    # TODO: Use provided code to Combine labels of training images,
    #  Convert combined label into mask image,
    #  Convert true label into mask image, and
    #  View both masks on the same axis,
    #  Also calculate dice coefficient and error
    prlabes=seg.segmentation_combined_atlas(all_labels_matrix)
    predicted_mask= prlabes.reshape(240,240)
    GT = plt.imread('../data/dataset_brains/1_1_gt.tif')
    gt_mask = GT > 0
    gt_vec = gt_mask.flatten()  # labels

    MSE= util.classification_error(gt_vec,prlabes)
    Dice=util.dice_overlap(gt_vec,prlabes)
    print('MSE',MSE,'Dice',Dice)
    plt.figure()
    plt.imshow(predicted_mask, cmap='gray', alpha=0.5)
    plt.imshow(gt_mask, cmap='jet', alpha=0.5)
    plt.show()
def segmentation_combined_atlas_test():
    task = 'brain'
    n = 5
    all_subjects = np.arange(n)
    train_slice = 1
    tmp_data, tmp_labels, tmp_feature_labels = util.create_dataset(1, train_slice, task)
    all_data_matrix = np.empty((tmp_data.shape[0], tmp_data.shape[1], n))
    all_labels_matrix = np.empty((tmp_labels.shape[0], n))

    # Load datasets once
    for i in all_subjects:
        train_data, train_labels, train_feature_labels = util.create_dataset(i + 1, train_slice, task)
        all_data_matrix[:, :, i] = train_data
        all_labels_matrix[:, i] = train_labels.ravel()

    # Combine labels of training images:
    predicted_labels = seg.segmentation_combined_atlas(all_labels_matrix, combining='mode')

    # Convert combined label into mask image:
    predicted_mask = predicted_labels.reshape(240, 240)

    # Convert true label into mask image:
    true_mask = all_labels_matrix[:, 4].reshape(240, 240)

    plt.imshow(predicted_mask + true_mask)

    err = util.classification_error(true_mask, predicted_mask)
    dice = util.dice_overlap(true_mask, predicted_mask)

    print("error: {0}, dice: {1}".format(err, dice))
def feature_curve(use_random=False):
    # Load training and test data
    train_data, train_labels, train_feature_labels = util.create_dataset(1, 1, 'brain')
    test_data, test_labels, test_feature_labels = util.create_dataset(2, 1, 'brain')

    if use_random:
        train_data = np.random.randn(train_data.shape[0], train_data.shape[1])

    # Normalize data
    train_data, test_data = seg.normalize_data(train_data, test_data)

    # Define parameters
    feature_sizes = np.arange(train_data.shape[1]) + 1
    train_size = 10
    k = 3
    num_iter = 5

    # Store errors
    test_error = np.empty([len(feature_sizes), num_iter])
    test_error[:] = np.nan
    train_error = np.empty([len(feature_sizes), num_iter])
    train_error[:] = np.nan

    # Train and test with different sizes
    for i in np.arange(len(feature_sizes)):
        for j in np.arange(num_iter):
            print('feature size = {}, iter = {}'.format(feature_sizes[i], j))
            start_time = timeit.default_timer()
            # Subsample training set
            ix = np.random.randint(len(train_data), size=train_size)
            subset_train_data = train_data[ix, :]
            subset_train_labels = train_labels[ix, :]

            # Train classifier
            neigh = KNeighborsClassifier(n_neighbors=k)
            neigh.fit(subset_train_data[:, :feature_sizes[i]], subset_train_labels.ravel())
            # Evaluate
            predicted_test_labels = neigh.predict(test_data[:, :feature_sizes[i]])
            predicted_train_labels = neigh.predict(subset_train_data[:, :feature_sizes[i]])

            test_error[i, j] = util.classification_error(test_labels, predicted_test_labels)
            train_error[i, j] = util.classification_error(subset_train_labels, predicted_train_labels)

            # Timer log
            elapsed = timeit.default_timer() - start_time
            # print('elapsed time = {}'.format(elapsed))

    ## Display results
    fig = plt.figure(figsize=(8, 8))
    ax1 = fig.add_subplot(111)
    x = feature_sizes
    y_test = np.mean(test_error, 1)
    yerr_test = np.std(test_error, 1)
    p1 = ax1.errorbar(x, y_test, yerr=yerr_test, label='Test error')

    ax1.set_xlabel('Number of features')
    ax1.set_ylabel('Error')
    ax1.grid()
    ax1.legend()
def knn_curve():

    # Load training and test data
    train_data, train_labels, train_feature_labels = util.create_dataset(
        1, 1, 'brain')
    test_data, test_labels, test_feature_labels = util.create_dataset(
        2, 1, 'brain')
    # Normalize data
    train_data, test_data = seg.normalize_data(train_data, test_data)

    #Define parameters
    num_iter = 3
    train_size = 100
    k = np.array([1, 3, 5, 9, 15, 25, 100])
    # k = np.array([1, 5, 9])

    #Store errors
    test_error = np.empty([len(k), num_iter])
    test_error[:] = np.nan
    dice = np.empty([len(k), num_iter])
    dice[:] = np.nan

    ## Train and test with different values

    for i in np.arange(len(k)):
        for j in np.arange(num_iter):
            print('k = {}, iter = {}'.format(k[i], j))
            #Subsample training set
            ix = np.random.randint(len(train_data), size=train_size)
            subset_train_data = train_data[ix, :]
            subset_train_labels = train_labels[ix, :]

            predicted_test_labels = seg.knn_classifier(subset_train_data,
                                                       subset_train_labels,
                                                       test_data, k[i])

            # #Train classifier
            # neigh = KNeighborsClassifier(n_neighbors=k[i])
            # neigh.fit(subset_train_data, subset_train_labels)
            # #Evaluate
            # predicted_test_labels = neigh.predict(test_data)

            test_error[i,
                       j] = util.classification_error(test_labels,
                                                      predicted_test_labels)
            dice[i, j] = util.dice_overlap(test_labels, predicted_test_labels)

    ## Display results
    fig = plt.figure(figsize=(8, 8))
    ax1 = fig.add_subplot(111)
    p1 = ax1.plot(k, np.mean(test_error, 1), 'r', label='error')
    p2 = ax1.plot(k, np.mean(dice, 1), 'k', label='dice')
    ax1.set_xlabel('k')
    ax1.set_ylabel('error')
    ax1.grid()
    ax1.legend()
Beispiel #7
0
def segmentation_combined_atlas_test():

    task = 'brain'
    n = 5
    all_subjects = np.arange(n)
    train_slice = 1
    tmp_data, tmp_labels, tmp_feature_labels = util.create_dataset(1,train_slice,task)
    all_data_matrix = np.empty((tmp_data.shape[0], tmp_data.shape[1], n))
    all_labels_matrix = np.empty((tmp_labels.shape[0], n))

    #Load datasets once
    for i in all_subjects:
        train_data, train_labels, train_feature_labels = util.create_dataset(i+1,train_slice,task)
        all_data_matrix[:,:,i] = train_data
        all_labels_matrix[:,i] = train_labels.ravel()

    #------------------------------------------------------------------#
    # TODO: Use provided code to Combine labels of training images,
    #  Convert combined label into mask image,
    #  Convert true label into mask image, and
    #  View both masks on the same axis,
    #  Also calculate dice coefficient and error
    # Combine labels of training images:
    predicted_labels = stats.mode(all_labels_matrix[:,:4], axis=1)[0]
    
    # Convert combined label into mask image:
    predicted_mask = predicted_labels.reshape(240,240)
    
    # Convert true label into mask image:
    true_mask = all_labels_matrix[:,4].reshape(240,240)
    
    # View both masks on the same axis using imshow()
    
    class_error = util.classification_error(all_labels_matrix[:,4], predicted_labels)
    dice_Overlap = util.dice_overlap(all_labels_matrix[:,4], predicted_labels)
    
    print("The error: {:.2f}".format(class_error))
    print("Dice coefficient: {:.2f}".format(dice_Overlap))
    
    
    
    fig = plt.figure(figsize=(10,13))
    ax1  = fig.add_subplot(121)
    ax1.imshow(predicted_mask, cmap = 'Oranges_r')
    
  
    
    ax2  = fig.add_subplot(122)
    ax2.imshow(true_mask)
Beispiel #8
0
def segmentation_combined_atlas_test():

    task = 'brain'
    n = 5
    all_subjects = np.arange(n)
    train_slice = 1
    tmp_data, tmp_labels, tmp_feature_labels = util.create_dataset(1,train_slice,task)
    all_data_matrix = np.empty((tmp_data.shape[0], tmp_data.shape[1], n))
    all_labels_matrix = np.empty((tmp_labels.shape[0], n))

    #Load datasets once
    for i in all_subjects:
        train_data, train_labels, train_feature_labels = util.create_dataset(i+1,train_slice,task)
        all_data_matrix[:,:,i] = train_data
        all_labels_matrix[:,i] = train_labels.ravel()
def PCA_features_demo():
    task = 'tissue'
    n = 5
    all_subjects = np.arange(n)
    train_slice = 1
    tmp_data, tmp_labels, tmp_feature_labels = util.create_dataset(
        1, train_slice, task)
    all_data_matrix = np.empty((tmp_data.shape[0], tmp_data.shape[1], n))
    all_labels_matrix = np.empty((tmp_labels.shape[0], n))

    # Load all datasets once and compile in all_data_matrix
    for i in all_subjects:
        train_data, train_labels, train_feature_labels = util.create_dataset(
            i + 1, train_slice, task)
        all_data_matrix[:, :, i] = train_data
        all_labels_matrix[:, i] = train_labels.ravel()

    # Perform PCA and plots the first patient's features against each other
    # Also calculates tha amount of features needed to account for at least 95% of the variance

    X_pca, v, w, fraction_variance = seg.mypca(all_data_matrix[:, :, 0])

    fig = plt.figure(figsize=(40, 40))
    plot = 1
    for i in range(8):
        for j in range(8):
            ax = plt.subplot(8, 8, plot)
            ax = util.scatter_data(X_pca,
                                   all_labels_matrix[:, 0],
                                   feature0=i,
                                   feature1=j,
                                   ax=ax)
            plot += 1

    needed_features = np.sum((fraction_variance < 0.95).astype(int)) + 1
    print("needed features to account for 95% of variance: {0}".format(
        needed_features))
def kmeans_clustering_test():
    #------------------------------------------------------------------#
    #TODO: Store errors for training data
    X_data, Y, feature_labels_train = util.create_dataset(1, 1, 'brain')
    predicted_labels = seg.kmeans_clustering(X_data, K=2)

    I = plt.imread('../data/dataset_brains/1_1_t1.tif')
    GT = plt.imread('../data/dataset_brains/1_1_gt.tif')
    gt_mask = GT > 0
    gt_labels = gt_mask.flatten()  # labels
    predicted_mask = predicted_labels.reshape(I.shape)

    fig = plt.figure(figsize=(15, 5))
    ax1 = fig.add_subplot(131)
    ax1.imshow(I)
    ax2 = fig.add_subplot(132)
    ax2.imshow(predicted_mask)
    ax3 = fig.add_subplot(133)
    ax3.imshow(gt_mask)
    util.scatter_data(X_data, predicted_labels, 0, 1)
Beispiel #11
0
def segmentation_demo():

    # Data name specification
    train_subject = 1
    test_subject = 2
    train_slice = 1
    test_slice = 1
    task = 'tissue'

    # Load data
    train_data, train_labels, train_feature_labels = util.create_dataset(
        train_subject, train_slice, task)
    test_data, test_labels, test_feature_labels = util.create_dataset(
        test_subject, test_slice, task)

    # find the predicted labels (here: the train_labels)
    predicted_labels = seg.segmentation_atlas(None, train_labels, None)

    # Calculate the error and dice score of these predicted labels in comparison to test labels
    err = util.classification_error(test_labels, predicted_labels)
    dice = util.dice_overlap(test_labels, predicted_labels)

    # Display results
    true_mask = test_labels.reshape(240, 240)
    predicted_mask = predicted_labels.reshape(240, 240)

    fig = plt.figure(figsize=(8, 8))
    ax1 = fig.add_subplot(111)
    ax1.imshow(true_mask, 'gray')
    ax1.imshow(predicted_mask, 'viridis', alpha=0.5)
    print('Subject {}, slice {}.\nErr {}, dice {}'.format(
        test_subject, test_slice, err, dice))

    # COMPARE METHODS
    num_images = 5
    num_methods = 3
    im_size = [240, 240]

    # make space for error and dice data
    all_errors = np.empty([num_images, num_methods])
    all_errors[:] = np.nan
    all_dice = np.empty([num_images, num_methods])
    all_dice[:] = np.nan

    # data name specification
    all_subjects = np.arange(num_images)
    train_slice = 1
    task = 'tissue'

    # make space for data
    all_data_matrix = np.empty(
        [train_data.shape[0], train_data.shape[1], num_images])
    # all_labels_matrix = np.empty([train_labels.size, num_images], dtype=bool)
    all_labels_matrix = np.empty([train_labels.size, num_images])

    # Load datasets once
    print('Loading data for ' + str(num_images) + ' subjects...')

    for i in all_subjects:
        sub = i + 1
        train_data, train_labels, train_feature_labels = util.create_dataset(
            sub, train_slice, task)
        all_data_matrix[:, :, i] = train_data
        all_labels_matrix[:, i] = train_labels.flatten()

    print('Finished loading data.\nStarting segmentation...')

    # Go through each subject, taking i-th subject as the test
    for i in np.arange(num_images):
        sub = i + 1

        # Define training subjects as all, except the test subject
        train_subjects = all_subjects.copy()
        train_subjects = np.delete(train_subjects, i)

        # Obtain data about the chosen amount of subjects
        train_data_matrix = all_data_matrix[:, :, train_subjects]
        train_labels_matrix = all_labels_matrix[:, train_subjects]
        test_data = all_data_matrix[:, :, i]
        test_labels = all_labels_matrix[:, i]
        test_shape_1 = test_labels.reshape(im_size[0], im_size[1])

        fig = plt.figure(figsize=(15, 5))

        # Get predicted labels from atlas method
        predicted_labels = seg.segmentation_combined_atlas(train_labels_matrix)
        all_errors[i, 0] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 0] = util.dice_overlap(test_labels, predicted_labels)

        # Plot atlas method
        predicted_mask_1 = predicted_labels.reshape(im_size[0], im_size[1])
        ax1 = fig.add_subplot(131)
        ax1.imshow(test_shape_1, 'gray')
        ax1.imshow(predicted_mask_1, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 0],
                                                    all_dice[i, 0])
        ax1.set_xlabel(text_str)
        ax1.set_title('Subject {}: Combined atlas'.format(sub))

        # Get predicted labels from kNN method
        predicted_labels = seg.segmentation_combined_knn(
            train_data_matrix, train_labels_matrix, test_data)
        all_errors[i, 1] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 1] = util.dice_overlap(test_labels, predicted_labels)

        # Plot kNN method
        predicted_mask_2 = predicted_labels.reshape(im_size[0], im_size[1])
        ax2 = fig.add_subplot(132)
        ax2.imshow(test_shape_1, 'gray')
        ax2.imshow(predicted_mask_2, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 1],
                                                    all_dice[i, 1])
        ax2.set_xlabel(text_str)
        ax2.set_title('Subject {}: Combined k-NN'.format(sub))

        # Get predicted labels from my own method
        predicted_labels = segmentation_mymethod(train_data_matrix,
                                                 train_labels_matrix,
                                                 test_data, task)
        print(predicted_labels.shape)
        print(np.unique(predicted_labels))
        all_errors[i, 2] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 2] = util.dice_overlap(test_labels, predicted_labels)

        # Plot my own method
        predicted_mask_3 = predicted_labels.reshape(im_size[0], im_size[1])
        ax3 = fig.add_subplot(133)
        ax3.imshow(test_shape_1, 'gray')
        ax3.imshow(predicted_mask_3, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax3.set_xlabel(text_str)
        ax3.set_title('Subject {}: My method'.format(sub))
Beispiel #12
0
from IPython.display import display, clear_output
from scipy import ndimage, stats
import scipy
import segmentation_project as prj

train_subjects = [0, 1, 3, 4]
test_subject = 2
train_slice = 1
test_slice = 1
task = 'tissue'
im_size = [240, 240]
num_images = 5
features = [8, 9]  #[1,4] and #[0,4] and [8,9]
num_iter = 100
mu = 0.1
test_data, test_labels, test_feature_labels = util.create_dataset(
    test_subject + 1, train_slice, task)

all_data_matrix = np.empty(
    [test_data.shape[0], test_data.shape[1], num_images])
train_labels_matrix = np.empty([test_data.shape[0],
                                num_images])  #isn't used only for plotting

for i in np.arange(num_images):

    sub = i + 1
    train_data, train_labels, train_feature_labels = util.create_dataset(
        sub, train_slice, task)
    all_data_matrix[:, :, i] = train_data
    train_labels_matrix[:, i] = train_labels.flatten()

#select certain data:
def segmentation_demo():

    train_subject = 1
    test_subject = 2
    train_slice = 1
    test_slice = 1
    task = 'brain'

    #Load data
    train_data, train_labels, train_feature_labels = util.create_dataset(
        train_subject, train_slice, task)
    test_data, test_labels, test_feature_labels = util.create_dataset(
        test_subject, test_slice, task)

    predicted_labels = seg.segmentation_atlas(None, train_labels, None)

    err = util.classification_error(test_labels, predicted_labels)
    dice = util.dice_overlap(test_labels, predicted_labels)

    #Display results
    true_mask = test_labels.reshape(240, 240)
    predicted_mask = predicted_labels.reshape(240, 240)

    # fig = plt.figure(figsize=(8,8))
    # ax1 = fig.add_subplot(111)
    # ax1.imshow(true_mask, 'gray')
    # ax1.imshow(predicted_mask, 'viridis', alpha=0.5)
    # print('Subject {}, slice {}.\nErr {}, dice {}'.format(test_subject, test_slice, err, dice))

    ## Compare methods
    num_images = 5
    num_methods = 3
    im_size = [240, 240]

    all_errors = np.empty([num_images, num_methods])
    all_errors[:] = np.nan
    all_dice = np.empty([num_images, num_methods])
    all_dice[:] = np.nan

    all_subjects = np.arange(num_images)
    train_slice = 1
    task = 'brain'
    all_data_matrix = np.empty(
        [train_data.shape[0], train_data.shape[1], num_images])
    all_labels_matrix = np.empty([train_labels.size, num_images], dtype=bool)

    #Load datasets once
    print('Loading data for ' + str(num_images) + ' subjects...')

    for i in all_subjects:
        sub = i + 1
        train_data, train_labels, train_feature_labels = util.create_dataset(
            sub, train_slice, task)
        all_data_matrix[:, :, i] = train_data
        all_labels_matrix[:, i] = train_labels.flatten()

    print('Finished loading data.\nStarting segmentation...')

    #Go through each subject, taking i-th subject as the test
    for i in np.arange(num_images):
        sub = i + 1
        #Define training subjects as all, except the test subject
        train_subjects = all_subjects.copy()
        train_subjects = np.delete(train_subjects, i)

        train_data_matrix = all_data_matrix[:, :, train_subjects]
        train_labels_matrix = all_labels_matrix[:, train_subjects]
        test_data = all_data_matrix[:, :, i]
        test_labels = all_labels_matrix[:, i]
        test_shape_1 = test_labels.reshape(im_size[0], im_size[1])

        fig = plt.figure(figsize=(15, 5))

        predicted_labels = seg.segmentation_combined_atlas(train_labels_matrix)
        all_errors[i, 0] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 0] = util.dice_overlap(test_labels, predicted_labels)
        predicted_mask_1 = predicted_labels.reshape(im_size[0], im_size[1])
        ax1 = fig.add_subplot(131)
        ax1.imshow(test_shape_1, 'gray')
        ax1.imshow(predicted_mask_1, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 0],
                                                    all_dice[i, 0])
        ax1.set_xlabel(text_str)
        ax1.set_title('Subject {}: Combined atlas'.format(sub))

        predicted_labels = seg.segmentation_combined_knn(
            train_data_matrix, train_labels_matrix, test_data)
        all_errors[i, 1] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 1] = util.dice_overlap(test_labels, predicted_labels)
        predicted_mask_2 = predicted_labels.reshape(im_size[0], im_size[1])
        ax2 = fig.add_subplot(132)
        ax2.imshow(test_shape_1, 'gray')
        ax2.imshow(predicted_mask_2, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 1],
                                                    all_dice[i, 1])
        ax2.set_xlabel(text_str)
        ax2.set_title('Subject {}: Combined k-NN'.format(sub))

        predicted_labels = segmentation_mymethod(train_data_matrix,
                                                 train_labels_matrix,
                                                 test_data, task)
        all_errors[i, 2] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 2] = util.dice_overlap(test_labels, predicted_labels)
        predicted_mask_3 = predicted_labels.reshape(im_size[0], im_size[1])
        ax3 = fig.add_subplot(133)
        ax3.imshow(test_shape_1, 'gray')
        ax3.imshow(predicted_mask_3, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax3.set_xlabel(text_str)
        ax3.set_title('Subject {}: My method'.format(sub))
Beispiel #14
0
def segmentation_demo():
    #only SECTION 2 is needed for what we want to do
    train_subject = 1
    test_subject = 2
    train_slice = 1
    test_slice = 1
    task = 'tissue'
    #SECTION 1 (this seciton has nothing to do with SECTION 2)

    #Load data from a train and testsubject
    train_data, train_labels, train_feature_labels = util.create_dataset(
        train_subject, train_slice, task)
    test_data, test_labels, test_feature_labels = util.create_dataset(
        test_subject, test_slice, task)

    util.scatter_data(train_data, train_labels, 0, 6)
    util.scatter_data(test_data, test_labels, 0, 6)

    predicted_labels = seg.segmentation_atlas(None, train_labels, None)

    err = util.classification_error(test_labels, predicted_labels)
    dice = util.dice_overlap(test_labels, predicted_labels)

    #Display results
    true_mask = test_labels.reshape(240, 240)
    predicted_mask = predicted_labels.reshape(240, 240)

    fig = plt.figure(figsize=(8, 8))
    ax1 = fig.add_subplot(111)
    ax1.imshow(true_mask, 'gray')
    ax1.imshow(predicted_mask, 'viridis', alpha=0.5)
    print('Subject {}, slice {}.\nErr {}, dice {}'.format(
        test_subject, test_slice, err, dice))

    ## SECTION 2:Compare methods
    num_images = 5
    num_methods = 3
    im_size = [240, 240]

    all_errors = np.empty([num_images, num_methods])
    all_errors[:] = np.nan
    all_dice = np.empty([num_images, num_methods])
    all_dice[:] = np.nan

    all_subjects = np.arange(5)  #list of all subjects [0, 1, 2, 3, 4]
    train_slice = 2
    task = 'tissue'
    all_data_matrix = np.empty(
        [train_data.shape[0], train_data.shape[1], num_images])
    all_labels_matrix = np.empty([train_labels.size, num_images])

    #Load datasets once
    print('Loading data for ' + str(num_images) + ' subjects...')

    for i in all_subjects:
        sub = i + 1
        train_data, train_labels, train_feature_labels = util.create_dataset(
            sub, train_slice, task)
        all_data_matrix[:, :, i] = train_data
        all_labels_matrix[:, i] = train_labels.flatten()

    print('Finished loading data.\nStarting segmentation...')

    #Go through each subject, taking i-th subject as the test
    for i in all_subjects:
        sub = i + 1
        #Define training subjects as all, except the test subject
        train_subjects = all_subjects.copy()
        train_subjects = np.delete(train_subjects, i)

        train_data_matrix = all_data_matrix[:, :, train_subjects]
        train_labels_matrix = all_labels_matrix[:, train_subjects]
        test_data = all_data_matrix[:, :, i]
        test_labels = all_labels_matrix[:, i]
        test_shape_1 = test_labels.reshape(im_size[0], im_size[1])

        fig = plt.figure(figsize=(15, 5))

        predicted_labels, predicted_labels2 = seg.segmentation_combined_atlas(
            train_labels_matrix)
        all_errors[i, 0] = util.classification_error(test_labels,
                                                     predicted_labels2)
        all_dice[i, 0] = util.dice_multiclass(test_labels, predicted_labels2)
        predicted_mask_1 = predicted_labels2.reshape(im_size[0], im_size[1])
        ax1 = fig.add_subplot(131)
        ax1.imshow(test_shape_1, 'gray')
        ax1.imshow(predicted_mask_1, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 0],
                                                    all_dice[i, 0])
        ax1.set_xlabel(text_str)
        ax1.set_title('Subject {}: Combined atlas'.format(sub))

        predicted_labels, predicted_labels2 = seg.segmentation_combined_knn(
            train_data_matrix, train_labels_matrix, test_data)
        all_errors[i, 1] = util.classification_error(test_labels,
                                                     predicted_labels2)
        all_dice[i, 1] = util.dice_multiclass(test_labels, predicted_labels2)
        predicted_mask_2 = predicted_labels2.reshape(im_size[0], im_size[1])
        ax2 = fig.add_subplot(132)
        ax2.imshow(test_shape_1, 'gray')
        ax2.imshow(predicted_mask_2, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 1],
                                                    all_dice[i, 1])
        ax2.set_xlabel(text_str)
        ax2.set_title('Subject {}: Combined k-NN'.format(sub))

        #OUR METHOD
        #predict the labels using our method
        predicted_labels_mymethod = segmentation_mymethod(train_data_matrix,
                                                          train_labels_matrix,
                                                          test_data,
                                                          num_iter=100,
                                                          mu=0.1)

        #determine error and dice (multiclass, since there are more classes)
        all_errors[i,
                   2] = util.classification_error(test_labels,
                                                  predicted_labels_mymethod)
        all_dice[i, 2] = util.dice_multiclass(test_labels,
                                              predicted_labels_mymethod)

        #reshape the predicted labels in order to plot the results
        predicted_mask_3 = predicted_labels_mymethod.reshape(
            im_size[0], im_size[1])

        #plot the predicted image over the real image
        plt.imshow(predicted_mask_3, 'viridis')
        ax3 = fig.add_subplot(133)
        ax3.imshow(test_shape_1, 'gray')
        ax3.imshow(predicted_mask_3, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax3.set_xlabel(text_str)
        ax3.set_title('Subject {}: My method'.format(sub))

        #save the figure after every loop (3 subimages/plots)
        fig.savefig("Results for test subject {}".format(sub), )
def segmentation_demo():

    # Data name specification
    train_subject = 1
    test_subject = 2
    train_slice = 1
    test_slice = 1
    task = 'tissue'

    # Load data
    train_data, train_labels, train_feature_labels = util.create_dataset(
        train_subject, train_slice, task)
    test_data, test_labels, test_feature_labels = util.create_dataset(
        test_subject, test_slice, task)

    # Normalize and feed data through X_pca
    train_norm, _ = seg.normalize_data(train_data)
    Xpca, v, w, fraction_variance, ix = seg.mypca(train_norm)
    relevant_feature = int(np.sum(fraction_variance < 0.95)) + 1
    train_norm_ord = train_norm[:, ix]
    train_norm = train_norm_ord[:, :relevant_feature]

    # find the predicted labels (here: the train_labels)
    predicted_labels = seg.segmentation_atlas(None, train_labels, None)

    # Calculate the error and dice score of these predicted labels in comparison to test labels
    err = util.classification_error(test_labels, predicted_labels)
    dice = util.dice_multiclass(test_labels, predicted_labels)

    # Display results
    true_mask = test_labels.reshape(240, 240)
    predicted_mask = predicted_labels.reshape(240, 240)
    fig = plt.figure(figsize=(8, 8))
    ax1 = fig.add_subplot(111)
    ax1.imshow(true_mask, 'gray')
    ax1.imshow(predicted_mask, 'viridis', alpha=0.5)
    print('Subject {}, slice {}.\nErr {}, dice {}'.format(
        test_subject, test_slice, err, dice))

    # COMPARE METHODS
    num_images = 5
    num_methods = 3
    im_size = [240, 240]

    # make space for error and dice data
    all_errors = np.empty([num_images, num_methods])
    all_errors[:] = np.nan
    all_dice = np.empty([num_images, num_methods])
    all_dice[:] = np.nan

    # data name specification
    all_subjects = np.arange(num_images)
    train_slice = 1
    task = 'tissue'

    # make space for data
    all_data_matrix = np.empty(
        [train_norm.shape[0], train_norm.shape[1], num_images])
    all_labels_matrix = np.empty([train_labels.size, num_images])
    all_data_matrix_kmeans = np.empty(
        [train_norm.shape[0], train_norm.shape[1], num_images])
    all_labels_matrix_kmeans = np.empty([train_labels.size, num_images])

    # Load datasets once
    print('Loading data for ' + str(num_images) + ' subjects...')
    for i in all_subjects:
        sub = i + 1
        train_data, train_labels, train_feature_labels = util.create_dataset(
            sub, train_slice, task)
        train_norm, _ = seg.normalize_data(train_data)
        Xpca, v, w, fraction_variance, ix = seg.mypca(train_norm)
        relevant_labels = int(np.sum(fraction_variance < 0.95)) + 1
        train_norm_ord = train_norm[:, ix]
        train_norm = train_norm_ord[:, :relevant_labels]
        all_data_matrix[:, :, i] = train_norm
        all_labels_matrix[:, i] = train_labels.flatten()

    # Load datasets for kmeans
    print('Loading data for ' + str(num_images) + ' subjects...')
    for i in all_subjects:
        sub = i + 1
        train_data_kmeans, train_labels_kmeans, train_feature_labels_kmeans = create_dataset(
            sub, train_slice, task)
        train_norm_kmeans, _ = seg.normalize_data(train_data_kmeans)
        all_data_matrix_kmeans[:, :, i] = train_norm_kmeans
        all_labels_matrix_kmeans[:, i] = train_labels_kmeans.flatten()

    print('Finished loading data.\nStarting segmentation...')

    # Go through each subject, taking i-th subject as the test
    for i in np.arange(num_images):
        sub = i + 1

        # Define training subjects as all, except the test subject
        train_subjects = all_subjects.copy()
        train_subjects = np.delete(train_subjects, i)

        # Obtain data about the chosen amount of subjects
        train_data_matrix = all_data_matrix[:, :, train_subjects]
        train_labels_matrix = all_labels_matrix[:, train_subjects]
        test_data = all_data_matrix[:, :, i]
        test_labels = all_labels_matrix[:, i]
        test_shape_1 = test_labels.reshape(im_size[0], im_size[1])

        fig = plt.figure(figsize=(15, 5))

        # Get predicted labels from atlas method
        predicted_labels = seg.segmentation_combined_atlas(train_labels_matrix)
        all_errors[i, 0] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 0] = util.dice_multiclass(test_labels, predicted_labels)

        # Plot atlas method
        predicted_mask_1 = predicted_labels.reshape(im_size[0], im_size[1])
        ax1 = fig.add_subplot(151)
        ax1.imshow(test_shape_1, 'gray')
        ax1.imshow(predicted_mask_1, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 0],
                                                    all_dice[i, 0])
        ax1.set_xlabel(text_str)
        ax1.set_title('Subject {}: Combined atlas'.format(sub))

        # Get predicted labels from kNN method
        predicted_labels = seg.segmentation_combined_knn(train_data_matrix,
                                                         train_labels_matrix,
                                                         test_data,
                                                         k=10)
        all_errors[i, 1] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 1] = util.dice_multiclass(test_labels, predicted_labels)

        # Plot kNN method
        predicted_mask_2 = predicted_labels.reshape(im_size[0], im_size[1])
        ax2 = fig.add_subplot(152)
        ax2.imshow(test_shape_1, 'gray')
        ax2.imshow(predicted_mask_2, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 1],
                                                    all_dice[i, 1])
        ax2.set_xlabel(text_str)
        ax2.set_title('Subject {}: Combined k-NN'.format(sub))

        # Get predicted labels from my own method
        # all_data_matrix_bnb = np.empty([train_norm.shape[0], train_norm.shape[1], num_images])
        # all_labels_matrix_bnb = np.empty([train_labels.size, num_images])

        # for ii in all_subjects:
        #     sub = i + 1
        #     task = 'brain'
        #     train_data_bnb, train_labels_bnb, train_feature_labels_bnb = util.create_dataset(sub, train_slice, task)
        #     train_norm_bnb, _ = seg.normalize_data(train_data_bnb)
        #     Xpca, v, w, fraction_variance, ix = seg.mypca(train_norm_bnb)
        #     relevant_labels_bnb = int(np.sum(fraction_variance < 0.95)) + 1
        #     train_norm_ord_bnb = train_norm_bnb[:, ix]
        #     train_norm_bnb = train_norm_ord_bnb[:, :relevant_labels_bnb]
        #     all_data_matrix_bnb[:, :, ii] = train_norm_bnb
        #     all_labels_matrix_bnb[:, ii] = train_labels_bnb.flatten()
        #
        # qw, we, er = all_data_matrix.shape
        # for iii in np.arange(qw):
        #     for j in np.arange(er):
        #         if all_labels_matrix_bnb[iii, j] == 0:
        #             for k in np.arange(we):
        #                 all_data_matrix[iii, k, j] = 0

        # train_data_matrix = all_data_matrix[:, :, train_subjects]
        # test_data = all_data_matrix[:, :, i]

        train_data_matrix_kmeans = all_data_matrix_kmeans[:, :, train_subjects]
        train_labels_matrix_kmeans = all_labels_matrix[:, train_subjects]
        test_data_kmeans = all_data_matrix_kmeans[:, :, i]

        predicted_labels = segmentation_mymethod(train_data_matrix_kmeans,
                                                 train_labels_matrix_kmeans,
                                                 test_data_kmeans, task)
        all_errors[i, 2] = util.classification_error(test_labels,
                                                     predicted_labels)
        all_dice[i, 2] = util.dice_multiclass(test_labels, predicted_labels)

        # Plot my own method
        predicted_mask_3 = predicted_labels.reshape(im_size[0], im_size[1])
        ax3 = fig.add_subplot(153)
        ax3.imshow(test_shape_1, 'gray')
        ax3.imshow(predicted_mask_3, 'viridis', alpha=0.5)
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax3.set_xlabel(text_str)
        ax3.set_title('Subject {}: My method'.format(sub))

        ax4 = fig.add_subplot(154)
        ax4.imshow(predicted_mask_3, 'viridis')
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax4.set_xlabel(text_str)
        ax4.set_title('Subject {}: My method'.format(sub))

        ax5 = fig.add_subplot(155)
        ax5.imshow(test_shape_1, 'gray')
        text_str = 'Err {:.4f}, dice {:.4f}'.format(all_errors[i, 2],
                                                    all_dice[i, 2])
        ax5.set_xlabel(text_str)
        ax5.set_title('Subject {}: My method'.format(sub))
def learning_curve():
    # Load training and test data
    # train_data, train_labels = seg.generate_gaussian_data(1000)
    train_data, train_labels, _ = util.create_dataset(1, 1, 'brain')
    # test_data, test_labels = seg.generate_gaussian_data(1000)
    test_data, test_labels, _ = util.create_dataset(2, 1, 'brain')
    train_data, test_data = seg.normalize_data(train_data, test_data)

    # Define parameters
    train_sizes = np.logspace(0.1, 3.0, num=15).astype(int)
    k = 1
    num_iter = 3  # How often to repeat the experiment

    # Store errors
    test_error = np.empty([len(train_sizes), num_iter])
    test_error[:] = np.nan
    test_dice = np.empty([len(train_sizes), num_iter])
    test_dice[:] = np.nan

    train_error = np.empty([len(train_sizes), num_iter])
    train_error[:] = np.nan
    train_dice = np.empty([len(train_sizes), num_iter])
    train_dice[:] = np.nan

    ## Train and test with different values
    for i in np.arange(len(train_sizes)):
        for j in np.arange(num_iter):
            print('train_size = {}, iter = {}'.format(train_sizes[i], j))
            # Subsample training set
            ix = np.random.randint(len(train_data), size=train_sizes[i])
            subset_train_data = train_data[ix, :]
            subset_train_labels = train_labels[ix, :]

            # Train classifier
            neigh = KNeighborsClassifier(n_neighbors=k)
            neigh.fit(subset_train_data, subset_train_labels.ravel())
            # Evaluate
            predicted_test_labels = neigh.predict(test_data)

            test_labels = test_labels.astype(bool)
            predicted_test_labels = predicted_test_labels.astype(bool)

            test_error[i, j] = util.classification_error(test_labels, predicted_test_labels)
            test_dice[i, j] = util.dice_overlap(test_labels, predicted_test_labels)

            predicted_train_labels = neigh.predict(train_data).astype(bool)
            train_labels_bool = train_labels.astype(bool)

            train_error[i, j] = util.classification_error(train_labels_bool, predicted_train_labels)
            train_dice[i, j] = util.dice_overlap(train_labels_bool, predicted_train_labels)

    ## Display results
    fig = plt.figure(figsize=(8, 8))
    gs = fig.add_gridspec(2, 2)
    ax1 = fig.add_subplot(gs[0, :])
    ax2 = fig.add_subplot(gs[1, :])
    x = np.log(train_sizes)
    ticks = list(x)
    tick_lbls = [str(i) for i in train_sizes]

    y_test = np.mean(test_error, 1)
    y_train = np.mean(train_error, 1)

    yerr_test = np.std(test_error, 1)
    yerr_train = np.std(train_error, 1)

    p1 = ax1.errorbar(x, y_test, yerr=yerr_test, label='Test error')
    p2 = ax2.errorbar(x, y_train, yerr=yerr_train, label='Train error')

    ax1.set_xlabel('Number of training samples (k)')
    ax1.set_ylabel('error')
    ax1.set_xticks(ticks)
    ax1.set_xticklabels(tick_lbls)
    ax1.grid()
    ax1.legend()

    ax2.set_xlabel('Number of training samples (k)')
    ax2.set_ylabel('error')
    ax2.set_xticks(ticks)
    ax2.set_xticklabels(tick_lbls)
    ax2.grid()
    ax2.legend()