def generate_training_data_vol(jobs, case):
    print("Generating Training Data (Volume)...")
    
    images_fn, labels_fn, regint_fn = get_filenames(path, im_name, lb_name, ro_name) 
    kernels = generate_kernels()
    
    for i in range(len(images_fn)):
        if i == case:
            create_volume_info(images_fn, labels_fn, regint_fn, kernels, i, jobs)
            break
def generate_training_data_vol(jobs, case):
    print("Generating Training Data (Volume)...")

    images_fn, labels_fn, regint_fn = get_filenames(path, im_name, lb_name,
                                                    ro_name)
    kernels = generate_kernels()

    for i in range(len(images_fn)):
        if i == case:
            create_volume_info(images_fn, labels_fn, regint_fn, kernels, i,
                               jobs)
            break
def generate_training_data(jobs):
    print("Generating Training Data...")
    slice_infos = []
    
    images_fn, labels_fn, regint_fn = get_filenames(path, im_name, lb_name, ro_name) 
    kernels = generate_kernels()
        
    slice_infos = Parallel(n_jobs=jobs)(delayed(create_sliceinfo_w)(images_fn, 
                           labels_fn, regint_fn, kernels, i) for i in range(len(images_fn)))

    with open(pickle, 'wb') as f:
        dill.dump(slice_infos, f)
def generate_training_data(jobs):
    print("Generating Training Data...")
    slice_infos = []
    
    images_fn, labels_fn, regint_fn = get_filenames(path, im_name, lb_name, ro_name) 
    kernels = generate_kernels()
        
    slice_infos = Parallel(n_jobs=jobs)(delayed(create_sliceinfo_w)(images_fn, 
                           labels_fn, regint_fn, kernels, i) for i in range(len(images_fn)))

    with open(pickle, 'wb') as f:
        dill.dump(slice_infos, f)
def create_sliceinfos_w(images_fn, labels_fn):
    print("Creating weighted Slice Infos...")
    kernels = generate_kernels()  # create gabor kernels
    slice_infos = []

    if len(sys.argv) < 2:
        for i in range(len(images_fn)):
            slice_infos.append(
                create_sliceinfo_w(images_fn, labels_fn, kernels, i))

    else:
        slice_infos = Parallel(n_jobs=int(sys.argv[1]))(
            delayed(create_sliceinfo_w)(images_fn, labels_fn, kernels, i)
            for i in range(len(images_fn)))

    return slice_infos
def rf_reconstruct(slice_infos, i):
    feats, labels = generate_training_feats(slice_infos, i)
    labels = np.array(labels)
    RF = train_rf_classifier(feats, labels, no_trees)

    test_sl = slice_infos[i]
    image = test_sl.slice_im

    kernels = generate_kernels()

    # break the image into patches; all of these will be classified
    patch_size = (psize, psize)
    # _a stands for "all"
    patches_a = extract_patches_2d(image, patch_size)
    # _p stands for "predict"

    # dump the RF
    fn_rf = 'rf.joblib'
    joblib.dump(RF, fn_rf)

    # check each patch
    if len(sys.argv) >= 2:
        #patches_p = Parallel(n_jobs=int(sys.argv[1]))(delayed(classify_patch)(RF, kernels, patches_a, i) for i in range(len(patches_a)))
        patches_p = Parallel(n_jobs=int(sys.argv[1]))(
            delayed(classify_patch_w)(fn_rf, kernels, patches_a, i)
            for i in range(len(patches_a)))

    else:
        patches_p = []
        for i in range(len(patches_a)):
            patches_p.append(classify_patch(RF, kernels, patches_a, i))

    # reconstruct based on the patch
    recons_im = reconstruct_from_patches_2d(np.asarray(patches_p), image.shape)
    print(recons_im.shape)

    # save patches_p to the drive, because it took so much work to make!
    with open(recons, 'wb') as f:
        dill.dump(recons_im, f)
def test_rf_feats_fullspec(slice_infos, i):
    feats, labels = generate_training_feats(slice_infos, i)
    RF = train_rf_classifier(feats, labels, no_trees)

    test_sl = slice_infos[i]
    image = test_sl.slice_im
    label = test_sl.slice_lb
    patches_m, patches_n = extract_roi_patches(image, label, psize)
    patches_n = patches_n
    plabels_m = ['M' for m in patches_m]
    plabels_n = ['N' for n in patches_n]
    kernels = generate_kernels()

    tot = len(patches_m) + len(patches_n)

    res1 = []
    res2 = []

    t0 = time()

    if len(sys.argv) >= 2:
        res1 = Parallel(n_jobs=int(sys.argv[1]))(
            delayed(check_classify)(RF, kernels, p, plabels_m[i], i, tot)
            for i, p in enumerate(patches_m))
        res2 = Parallel(n_jobs=int(sys.argv[1]))(
            delayed(check_classify)(RF, kernels, p, plabels_n[i], i, tot)
            for i, p in enumerate(patches_n))
    else:
        for i, p in enumerate(
                patches_m):  # go through each patch, and classify it!
            res1.append(check_classify(RF, kernels, p, plabels_m[i], i, tot))
        pass

    dt = time() - t0

    print(res1.count(True) + res2.count(True))
    print("Finished in {:.2f} seconds.".format(dt))
def rf_reconstruct2(jobs, slice_infos, i):
    t0 = time()
    feats, labels = generate_training_feats(slice_infos, i)
    labels = np.array(labels)
    print(feats.shape, labels.shape)
    RF = train_rf_classifier(feats, labels, no_trees)

    dt1 = time() - t0
    t0 = time()

    test_sl = slice_infos[i]
    image = test_sl.slice_im
    rimage = test_sl.slice_ro

    kernels = generate_kernels()

    dt2 = time() - t0
    t0 = time()

    # break the image into patches; all of these will be classified
    patch_size = (psize, psize)
    # _a stands for "all"
    patches_a = extract_patches_2d(image, patch_size)
    # _p stands for "predict"

    patches_r = extract_patches_2d(rimage, patch_size)
    # _r stands for "registered"

    dt3 = time() - t0
    t0 = time()

    # dump the RF
    #fn_rf = 'rf.joblib'
    #joblib.dump(RF, fn_rf)

    dt4 = time() - t0
    t0 = time()

    chunk_size = len(patches_a) / float(jobs)
    chunk_size = int(chunk_size / 8)

    # save the Random Forest classifier to disk
    fn_rf = "tmp/rf.pkl"
    fd_rf = open(fn_rf, 'wb')
    dill.dump(RF, fd_rf)
    fd_rf.close()

    # save the kernels to disk
    fn_kern = "tmp/kern.pkl"
    fd_kern = open(fn_kern, 'wb')
    dill.dump(kernels, fd_kern)
    fd_kern.close()

    # list which will contain the filenames of the patch chunks
    fn_chunks = []

    # break both patch groups into chunk-sized sets and save each of them
    # to disk
    for j in range(0, len(patches_a), int(chunk_size)):
        # determine the bounds of this chunk
        a = j
        b = j + int(chunk_size)
        if b >= len(patches_a):
            b = len(patches_a) - 1

        # create a chunk
        patches_a_chunk = patches_a[a:b]
        patches_r_chunk = patches_r[a:b]

        # put it together
        chunk = [(a, b, len(patches_a)), patches_a_chunk, patches_r_chunk]

        # generate a filename for this chunk
        fn_chunk = "tmp/" + "patches_" + str(a) + "_" + str(b) + ".pkl"
        fn_chunks.append(fn_chunk)

        # serialise it to disk
        fd_chunk = open(fn_chunk, 'wb')
        dill.dump(chunk, fd_chunk)
        fd_chunk.close()

    # check each patch
    if len(sys.argv) >= 2:
        #patches_p = Parallel(n_jobs=jobs)(delayed(classify_patch)(RF, kernels, patches_a, i) for i in range(len(patches_a)))
        #patches_p = Parallel(n_jobs=jobs)(delayed(classify_patch_w)(fn_rf, kernels, patches_a, i) for i in range(len(patches_a)))
        #patches_x = Parallel(n_jobs=jobs)(delayed(classify_patch_p)(fn_rf, kernels, patches_a, patches_r, i, i+int(chunk_size)) for i in range(0, len(patches_a), int(chunk_size)))
        #patches_x = Parallel(n_jobs=jobs)(delayed(classify_patch_f)(rf_pkl, kernels_pkl, patches_a_pkl, patches_r_pkl, i, i+int(chunk_size)) for i in range(0, len(patches_a), int(chunk_size)))
        patches_x = Parallel(n_jobs=jobs)(
            delayed(classify_patch_group)(fn_rf, fn_kern, fn_chunk)
            for fn_chunk in fn_chunks)
        patches_p = []
        for group in patches_x:
            patches_p.extend(group)
    else:
        patches_p = []
        for i in range(len(patches_a)):
            patches_p.append(classify_patch_w(RF, kernels, patches_a, i))

    dt5 = time() - t0
    t0 = time()

    # reconstruct based on the patch
    recons_im = reconstruct_from_patches_2d(np.asarray(patches_p), image.shape)

    dt6 = time() - t0
    t0 = time()

    print(
        "Completed Reconstruction {}/{}: {} DT: {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f}"
        .format(i, len(slice_infos), recons, dt1, dt2, dt3, dt4, dt5, dt6))

    # save reconstruction!
    with open(recons, 'wb') as f:
        dill.dump(recons_im, f)
def process(filename, filename_label, slice_no):
    
    # Grab the image
    image_slice, orientation_slice = get_nifti_slice(filename, slice_no)
    if SHOW_IMG:
        plt.imshow(image_slice, cmap = plt.cm.gray)
        plt.show()
    
    # Grab the labels
    label_slice, orientation_label = get_nrrd_data(filename_label, slice_no)
    if SHOW_IMG:
        plt.imshow(label_slice, cmap=plt.cm.gray)
        plt.show()

    # Show the mask
    if SHOW_IMG:
        print("Masked version: ")
        mask = np.where(label_slice == 0, label_slice, image_slice)
        plt.imshow(mask, cmap=plt.cm.gray)
        plt.show()   
    
    # Extract patches in ROI
    patches_mask, patches_nonmask = extract_roi_patches(image_slice, 
                                                        label_slice, 
                                                        PATCH_SIZE)
    
    # Get the decomposed patches
    eigens_mask = get_eigenpatches(patches_mask, PATCH_SIZE, MAX_EIGEN)
    eigens_nonmask = get_eigenpatches(patches_nonmask, PATCH_SIZE, MAX_EIGEN)
    
    # Show the eigens, if you want
    if SHOW_IMG:
        show_eigenpatches(eigens_mask)
        
    # Generate Gabor Kernels
    kernels = generate_kernels()
    
    # Show the Gabors
    if SHOW_IMG:
        plot_gabor(eigens_mask)

    # Store all the features and Gabor responses    
    all_features_mask = []
    all_powers_mask = []
    complete_features_mask = []
    
    all_features_nonmask = []
    all_powers_nonmask = []
    complete_features_nonmask = []
    
    for eigen in eigens_mask:
        all_features_mask.append(compute_feats(eigen, kernels))
        all_powers_mask.append(compute_powers(eigen, kernels))
    
    for eigen in eigens_nonmask:
        all_features_nonmask.append(compute_feats(eigen, kernels))
        all_powers_nonmask.append(compute_powers(eigen, kernels))
        
#    for patch in patches_mask:
#        complete_features_mask.append(compute_feats(patch, kernels))
#    
#    for patch in patches_nonmask:
#        complete_features_nonmask.append(compute_feats(patch, kernels))
        
        
    return SliceInfo(filename, slice_no, image_slice, orientation_slice, 
                     label_slice, orientation_label, kernels,
                     patches_mask, eigens_mask,
                     all_features_mask, all_powers_mask,
                     patches_nonmask, eigens_nonmask,
                     all_features_nonmask, all_powers_nonmask)
Beispiel #10
0
def process(filename, filename_label, slice_no):

    # Grab the image
    image_slice, orientation_slice = get_nifti_slice(filename, slice_no)
    image_slice = normalise(image_slice)
    if SHOW_IMG:
        plt.imshow(image_slice, cmap=plt.cm.gray)
        plt.show()

    # Grab the labels
    label_slice, orientation_label = get_nrrd_data(filename_label, slice_no)
    #if SHOW_IMG:
    #    plt.imshow(label_slice, cmap=plt.cm.gray)
    #    plt.show()

    # Show the mask
    if SHOW_IMG:
        print("Masked version: ")
        mask = np.where(label_slice == 0, label_slice, image_slice)
        plt.imshow(mask, cmap=plt.cm.gray)
        plt.show()

    # Extract patches in ROI
    patches_mask, patches_nonmask = extract_roi_patches(
        image_slice, label_slice, PATCH_SIZE)

    # Get the decomposed patches
    eigens_mask = get_randoms(patches_mask, MAX_EIGEN)
    eigens_nonmask = get_randoms(patches_nonmask, MAX_EIGEN)

    # Show the eigens, if you want
    if SHOW_IMG:
        show_eigenpatches(eigens_mask)

    # Generate Gabor Kernels
    kernels = generate_kernels()

    # Show the Gabors
    if SHOW_IMG:
        plot_gabor(eigens_mask)

    # Store all the features and Gabor responses
    all_features_mask = []
    all_powers_mask = []

    all_features_nonmask = []
    all_powers_nonmask = []

    for eigen in eigens_mask:
        all_features_mask.append(compute_feats(eigen, kernels))
        all_powers_mask.append(compute_powers(eigen, kernels))

    for eigen in eigens_nonmask:
        all_features_nonmask.append(compute_feats(eigen, kernels))
        all_powers_nonmask.append(compute_powers(eigen, kernels))

    return SliceInfo(filename, slice_no, image_slice, orientation_slice,
                     label_slice, orientation_label, kernels, patches_mask,
                     eigens_mask, all_features_mask, all_powers_mask,
                     patches_nonmask, eigens_nonmask, all_features_nonmask,
                     all_powers_nonmask)
def rf_reconstruct2(jobs, slice_infos, i):
    t0 = time()
    feats, labels = generate_training_feats(slice_infos, i)
    labels = np.array(labels)
    print(feats.shape, labels.shape)
    RF = train_rf_classifier(feats, labels, no_trees)

    dt1 = time() - t0   
    t0 = time()
    
    test_sl = slice_infos[i]
    image = test_sl.slice_im
    rimage = test_sl.slice_ro
    
    kernels = generate_kernels()
    
    dt2 = time() - t0
    t0 = time()
    
    # break the image into patches; all of these will be classified
    patch_size = (psize, psize)
    # _a stands for "all"
    patches_a = extract_patches_2d(image, patch_size)
    # _p stands for "predict"
    
    patches_r = extract_patches_2d(rimage, patch_size)
    # _r stands for "registered"
    
    dt3 = time() - t0
    t0 = time()
    
    # dump the RF
    #fn_rf = 'rf.joblib'
    #joblib.dump(RF, fn_rf)
    
    dt4 = time() - t0
    t0 = time()
    
    chunk_size = len(patches_a) / float(jobs)
    chunk_size = int(chunk_size / 8)
    
    # save the Random Forest classifier to disk
    fn_rf = "tmp/rf.pkl"
    fd_rf = open(fn_rf, 'wb')
    dill.dump(RF, fd_rf)
    fd_rf.close()
    
    
    # save the kernels to disk
    fn_kern = "tmp/kern.pkl" 
    fd_kern = open(fn_kern, 'wb')
    dill.dump(kernels, fd_kern)
    fd_kern.close()
    
    # list which will contain the filenames of the patch chunks
    fn_chunks = []
    
    # break both patch groups into chunk-sized sets and save each of them
    # to disk
    for j in range(0, len(patches_a), int(chunk_size)):
        # determine the bounds of this chunk
        a = j
        b = j + int(chunk_size)
        if b >= len(patches_a):
            b = len(patches_a) - 1
        
        # create a chunk
        patches_a_chunk = patches_a[a:b]
        patches_r_chunk = patches_r[a:b]
        
        # put it together
        chunk = [(a, b, len(patches_a)), patches_a_chunk, patches_r_chunk]

        # generate a filename for this chunk
        fn_chunk = "tmp/" + "patches_" + str(a) + "_" + str(b) + ".pkl"
        fn_chunks.append(fn_chunk)        
        
        # serialise it to disk
        fd_chunk = open(fn_chunk, 'wb')
        dill.dump(chunk, fd_chunk)
        fd_chunk.close()
        
    
    # check each patch
    if len(sys.argv) >= 2:
        #patches_p = Parallel(n_jobs=jobs)(delayed(classify_patch)(RF, kernels, patches_a, i) for i in range(len(patches_a)))
        #patches_p = Parallel(n_jobs=jobs)(delayed(classify_patch_w)(fn_rf, kernels, patches_a, i) for i in range(len(patches_a)))
        #patches_x = Parallel(n_jobs=jobs)(delayed(classify_patch_p)(fn_rf, kernels, patches_a, patches_r, i, i+int(chunk_size)) for i in range(0, len(patches_a), int(chunk_size)))    
        #patches_x = Parallel(n_jobs=jobs)(delayed(classify_patch_f)(rf_pkl, kernels_pkl, patches_a_pkl, patches_r_pkl, i, i+int(chunk_size)) for i in range(0, len(patches_a), int(chunk_size)))            
        patches_x =  Parallel(n_jobs=jobs)(delayed(classify_patch_group)(fn_rf, fn_kern, fn_chunk) for fn_chunk in fn_chunks)            
        patches_p = []        
        for group in patches_x:
            patches_p.extend(group)
    else:
        patches_p = []
        for i in range(len(patches_a)):
            patches_p.append(classify_patch_w(RF, kernels, patches_a, i))
        
            
    dt5 = time() - t0
    t0 = time()
    
    # reconstruct based on the patch
    recons_im = reconstruct_from_patches_2d(np.asarray(patches_p), image.shape)
    
    dt6 = time() - t0
    t0 = time()
    
    print("Completed Reconstruction {}/{}: {} DT: {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f}".format(i, len(slice_infos), recons, dt1, dt2, dt3, dt4, dt5, dt6))  
    
    # save reconstruction!
    with open(recons, 'wb') as f:
        dill.dump(recons_im, f)