Exemple #1
0
def test_match():
    a = many([[[0, 0], [0, 1], [1, 0], [1, 1]],
              [[10, 10], [10, 11], [11, 10], [11, 11]]])
    b = many([[[0, 0], [0, 1], [1, 0], [1, 1]], [[30, 30], [31, 30], [31,
                                                                      31]]])
    assert match(a, b) == [0, 1]
    assert match(a, b, threshold=5) == [0, nan]
Exemple #2
0
def test_accuracy():
    app = QApplication([])
    app.setApplicationName("cidan")
    widget = MainWindow(dev=True, preload=False)

    main_widget = widget.table_widget
    load_new_dataset(main_widget,
                     file_path="test_files/small_dataset1.tif",
                     save_dir_path="test_files/save_dir",
                     load_into_mem=True)
    main_widget.open_dataset_thread.wait()
    time.sleep(10)
    data_handler = main_widget.data_handler

    data_handler.change_filter_param("median_filter", True)
    data_handler.change_roi_extraction_param("roi_circ_threshold", 20)
    main_widget.thread_list[1].run()
    assert main_widget.tabs[1].image_view.magic_wand(70, 127)

    data_handler.export()
    a = neurofinder.load("test_files/roi_list.json")
    b = neurofinder.load("test_files/save_dir/roi_list.json")
    test = neurofinder.match(a, b)
    percision, recall = neurofinder.centers(a, b)
    inclusion, exclusion = neurofinder.shapes(a, b)
    assert percision > .8
    assert recall > .8
    assert exclusion > .6
    assert inclusion > .6
    image_good = io.imread("test_files/embedding_norm_image.png")
    image_test = io.imread(
        "test_files/save_dir/embedding_norm_images/embedding_norm_image.png")

    assert structural_similarity(image_good, image_test) > .90
    data_handler.change_box_param("total_num_spatial_boxes", 4)
    main_widget.thread_list[1].run()
    data_handler.export()
    image_test = io.imread(
        "test_files/save_dir/embedding_norm_images/embedding_norm_image.png")

    assert structural_similarity(image_good, image_test) > .60
    a = neurofinder.load("test_files/roi_list.json")
    b = neurofinder.load("test_files/save_dir/roi_list.json")
    percision, recall = neurofinder.centers(a, b)
    inclusion, exclusion = neurofinder.shapes(a, b)
    assert percision > .4
    assert recall > .4
    assert exclusion > .4
    assert inclusion > .3
    app.quit()
    shutil.rmtree("test_files/save_dir")
#        idcomps=np.intersect1d(pos_examples,idcomps)
#        regions_CNMF=cse.utilities.nf_masks_to_json( masks_ws[idcomps],os.path.join('/tmp/regions_CNMF_2.json'))
#        b=load(os.path.join('/tmp/regions_CNMF_2.json'))
#        pl.imshow(np.sum(masks_ws[idcomps],0),alpha=.3,cmap='hot')
        pl.imshow(np.sum(masks_2,0),alpha=.3,cmap='hot')

        pl.title('M_2')
        pl.subplot(2,2,4)
#        pl.imshow(Cn,cmap='gray')
#        pl.imshow(np.sum(masks_nf,0)+2*np.sum(masks_ws[idcomps],0))
        pl.imshow(np.sum(masks_nf,0)+2*np.sum(masks_2,0))
#        pl.imshow(np.sum(masks_2,0),alpha=.2,cmap='hot')
        pl.title('M_overlap')
        #print
        mtc=match(a,b,threshold=5)
        re,pr=centers(a,b,threshold=5)
        incl,excl=shapes(a,b,threshold=5)
        fscore=2*(pr*re)/(pr+re)
        print(('Exclusion %.3f\nRecall %.3f\nCombined %.3f\nPrecision %.3f\nInclusion %.3f' % (excl,re,fscore,pr,incl)))

    else:
        print((ref_file + ' DO NOT EXIST!'))
#%%
from neurofinder import load, centers, shapes
results=[]
for folder_in_check in folders_in:

    a=load(os.path.join(folder_in_check,'regions_CNMF.json'))  
    dset='.'.join(folder_in_check[:-1].split('.')[1:])
    print (dset)
        #        idcomps=np.intersect1d(pos_examples,idcomps)
        #        regions_CNMF=cse.utilities.nf_masks_to_json( masks_ws[idcomps],os.path.join('/tmp/regions_CNMF_2.json'))
        #        b=load(os.path.join('/tmp/regions_CNMF_2.json'))
        #        pl.imshow(np.sum(masks_ws[idcomps],0),alpha=.3,cmap='hot')
        pl.imshow(np.sum(masks_2, 0), alpha=.3, cmap='hot')

        pl.title('M_2')
        pl.subplot(2, 2, 4)
        #        pl.imshow(Cn,cmap='gray')
        #        pl.imshow(np.sum(masks_nf,0)+2*np.sum(masks_ws[idcomps],0))
        pl.imshow(np.sum(masks_nf, 0) + 2 * np.sum(masks_2, 0))
        #        pl.imshow(np.sum(masks_2,0),alpha=.2,cmap='hot')
        pl.title('M_overlap')
        # print
        mtc = match(a, b, threshold=5)
        re, pr = centers(a, b, threshold=5)
        incl, excl = shapes(a, b, threshold=5)
        fscore = 2 * (pr * re) / (pr + re)
        print((
            'Exclusion %.3f\nRecall %.3f\nCombined %.3f\nPrecision %.3f\nInclusion %.3f'
            % (excl, re, fscore, pr, incl)))

    else:
        print((ref_file + ' DO NOT EXIST!'))
#%%
from neurofinder import load, centers, shapes
results = []
for folder_in_check in folders_in:

    a = load(os.path.join(folder_in_check, 'regions_CNMF.json'))
Exemple #5
0
def overlay(model, image=None, compare=None, threshold=inf, correct=False):
    """
    Overlay regions onto reference image, with optional comparison regions.

    Parameters
    ----------
    model : ExtractionModel

    image : array-like, optional, default = None
         Base image, can provide a 2d array,
         if unspecified will be black.

    modelCompare : ExtractionModel, default = None
        Regions to be compared to if provided.

    threshold : float, default = inf
        Distance threshold for matching sources.

    correct : bool, default = False
        If True and a comparision given will only show correct regions
    """

    if image is not None:
        if image.max() > 1:
            im = norm(image)
        else:
            im = image
        size = im.shape
    else:
        size = (max([r.bbox[2] for r in model.regions]) + 1,
                max([r.bbox[3] for r in model.regions]) + 1)
        if compare is not None:
            sizeCompare = (max([r.bbox[2] for r in compare.regions]) + 1,
                           max([r.bbox[3] for r in compare.regions]) + 1)
            size = (maximum(size[0],
                            sizeCompare[0]), maximum(size[1], sizeCompare[1]))
        im = full(size, 0.0)

    if compare is not None:
        matches = match(model.regions, compare.regions, threshold)
        matchesCompare = full(compare.regions.count, nan)

        for ii in where(~isnan(matches))[0]:
            matchesCompare[matches[ii]] = ii

        if any(~isnan(matches)):
            hits = many([model.regions[i] for i in where(~isnan(matches))[0]])
            h = hits.mask(size,
                          background='black',
                          fill=None,
                          stroke=[0, 0.7, 0])
        else:
            h = full((size[0], size[1], 3), 0.0)
        if any(isnan(matches)):
            falseAlarms = many(
                [model.regions[i] for i in where(isnan(matches))[0]])
            fA = falseAlarms.mask(size,
                                  background='black',
                                  fill=None,
                                  stroke=[0.7, 0, 0])
        else:
            fA = full((size[0], size[1], 3), 0.0)
        if any(~isnan(matchesCompare)):
            truePositives = many(
                [compare.regions[i] for i in where(~isnan(matchesCompare))[0]])
            tP = truePositives.mask(size,
                                    background='black',
                                    fill=None,
                                    stroke=[0, 0, 0.7])
        else:
            tP = full((size[0], size[1], 3), 0.0)
        if any(isnan(matchesCompare)):
            misses = many(
                [compare.regions[i] for i in where(isnan(matchesCompare))[0]])
            m = misses.mask(size,
                            background='black',
                            fill=None,
                            stroke=[0.7, 0.7, 0])
        else:
            m = full((size[0], size[1], 3), 0.0)
        if correct:
            mask = maximum(tP, h)
        else:
            mask = maximum(maximum(maximum(tP, fA), h), m)
    else:
        mask = model.regions.mask(size,
                                  background='black',
                                  fill=None,
                                  stroke=[0, 0.7, 0])

    base = tile(im, (3, 1, 1)).transpose(1, 2, 0)
    return maximum(base, mask)
Exemple #6
0
def overlay(model, image=None, compare=None, threshold=inf, correct=False):
    """
    Overlay regions onto reference image, with optional comparison regions.

    Parameters
    ----------
    model : ExtractionModel

    image : array-like, optional, default = None
         Base image, can provide a 2d array,
         if unspecified will be black.

    modelCompare : ExtractionModel, default = None
        Regions to be compared to if provided.

    threshold : float, default = inf
        Distance threshold for matching sources.

    correct : bool, default = False
        If True and a comparision given will only show correct regions
    """

    if image is not None:
        if image.max() > 1:
            im = norm(image)
        else:
            im = image
        size = im.shape
    else:
        size = (max([r.bbox[2] for r in model.regions])+1, max([r.bbox[3] for r in model.regions])+1)
        if compare is not None:
            sizeCompare = (max([r.bbox[2] for r in compare.regions])+1, max([r.bbox[3] for r in compare.regions])+1)
            size = (maximum(size[0], sizeCompare[0]), maximum(size[1], sizeCompare[1]))
        im = full(size, 0.0)


    if compare is not None:
        matches = match(model.regions, compare.regions, threshold)
        matchesCompare = full(compare.regions.count,nan)

        for ii in where(~isnan(matches))[0]:
            matchesCompare[matches[ii]] = ii

        if any(~isnan(matches)):
            hits = many([model.regions[i] for i in where(~isnan(matches))[0]])
            h = hits.mask(size, background='black', fill=None, stroke=[0, 0.7, 0])
        else:
            h = full((size[0], size[1], 3), 0.0)
        if any(isnan(matches)):
            falseAlarms = many([model.regions[i] for i in where(isnan(matches))[0]])
            fA = falseAlarms.mask(size, background='black', fill=None, stroke=[0.7, 0, 0])
        else:
            fA = full((size[0], size[1], 3), 0.0)
        if any(~isnan(matchesCompare)):
            truePositives = many([compare.regions[i] for i in where(~isnan(matchesCompare))[0]])
            tP = truePositives.mask(size, background='black', fill=None, stroke=[0, 0, 0.7])
        else:
            tP = full((size[0], size[1], 3), 0.0)
        if any(isnan(matchesCompare)):
            misses = many([compare.regions[i] for i in where(isnan(matchesCompare))[0]])
            m = misses.mask(size, background='black', fill=None, stroke=[0.7, 0.7, 0])
        else:
            m = full((size[0], size[1], 3), 0.0)
        if correct:
            mask = maximum(tP, h)
        else:
            mask = maximum(maximum(maximum(tP, fA), h), m)
    else:
        mask = model.regions.mask(size, background='black', fill=None, stroke=[0, 0.7, 0])


    base = tile(im,(3,1,1)).transpose(1,2,0)
    return maximum(base, mask)