Esempio n. 1
0
def test_match():
    a = many([[[0, 0], [0, 1], [1, 0], [1, 1]],
              [[10, 10], [10, 11], [11, 10], [11, 11]]])
    b = many([[[0, 0], [0, 1], [1, 0], [1, 1]], [[30, 30], [31, 30], [31,
                                                                      31]]])
    assert match(a, b) == [0, 1]
    assert match(a, b, threshold=5) == [0, nan]
Esempio n. 2
0
def test_construction():
	coords = [[0, 0], [0, 1], [1, 0], [1, 1]]
	r = many([one(coords), one(coords)])
	assert r.count == 2
	assert allclose(r.coordinates, [coords, coords])
	r = many([coords, coords])
	assert r.count == 2
	assert allclose(r.coordinates, [coords, coords])
	r = many([coords, coords, coords])
	assert r.count == 3
	assert allclose(r.coordinates, [coords, coords, coords])
Esempio n. 3
0
def bifurcated_regions(n):
    """
    generate an intensity table with diagonal coordinates ((1, 1), (2, 2), ... (n, n)) over 3
    channels and four rounds, where intensities are randomly generated.

    Split the region into two block-diagonal cells which should encompass 1/2 of the total area but
    all of the points in the domain, since intensities is a diagonal table.
    """

    np.random.seed(777)
    data = np.random.random_sample((n, 3, 4))
    diagonal_intensities = intensity_table_factory(data)

    x = diagonal_intensities[Indices.X.value].max() + 1
    y = diagonal_intensities[Indices.Y.value].max() + 1
    box_one_coords = [[0, 0], [0, np.floor(x / 2)], [np.ceil(y / 2), 0],
                      [np.floor(y / 2), np.floor(x / 2)]]
    box_two_coords = [[np.floor(y / 2), np.floor(x / 2)], [np.floor(y / 2), x],
                      [y, np.floor(x / 2)], [y, x]]
    regions = regional.many(
        [regional.one(box_one_coords),
         regional.one(box_two_coords)])

    # assign intensity_table some target values that are just sequential numbers
    diagonal_intensities[Features.TARGET] = (Features.AXIS,
                                             np.arange(n).astype(str))

    return diagonal_intensities, regions
Esempio n. 4
0
 def __init__(self, regions):
     if isinstance(regions, list):
         self.regions = many(regions)
     elif isinstance(regions, many):
         self.regions = regions
     else:
         raise Exception("Input type not recognized, must be many regions")
 def __init__(self, regions):
     if isinstance(regions, list):
         self.regions = many(regions)
     elif isinstance(regions, many):
         self.regions = regions
     else:
         raise Exception("Input type not recognized, must be many regions")
Esempio n. 6
0
 def fit(self, images, block_size=None):
     images = check_images(images)
     block_size = block_size if block_size is not None else images.shape[1:]
     blocks = images.toblocks(size=block_size)
     shape = blocks.blockshape
     sources = blocks.tordd().map(lambda kv: self._get(kv[0], kv[1], shape))
     collected = sources.collect()
     return ExtractionModel(many(list(itertools.chain.from_iterable(collected))))
Esempio n. 7
0
def _mask_to_regional(m):
    """Convert a 2D numpy mask to a regional many object so it can be measured
    using the neurofinder library."""
    mlbl = measure.label(m)
    coords = []
    for lbl in range(1, np.max(mlbl) + 1):
        yy, xx = np.where(mlbl == lbl)
        coords.append([[y, x] for y, x in zip(yy, xx)])
    return many(coords)
def test_model_transform_single(eng):
    regions = many([[[0, 0], [0, 1]]])
    model = ExtractionModel(regions=regions)
    im0 = [[0, 1], [1, 2]]
    im1 = [[3, 4], [5, 6]]
    im2 = [[7, 8], [9, 10]]
    data = fromarray([im0, im1, im2], engine=eng)
    transformed = model.transform(data)
    assert allclose(transformed.toarray(), [[0.5, 3.5, 7.5]])
Esempio n. 9
0
def test_mask_colors():
	r = many([one([0, 0]), one([1, 1])])
	im = r.mask(fill=['red','blue'], background='black')
	assert allclose(im[:,:,0], [[1, 0], [0, 0]])
	assert allclose(im[:,:,1], [[0, 0], [0, 0]])
	assert allclose(im[:,:,2], [[0, 0], [0, 1]])
	im = r.mask(fill=[[1, 0, 0], [0, 0, 1]], background='black')
	assert allclose(im[:,:,0], [[1, 0], [0, 0]])
	assert allclose(im[:,:,1], [[0, 0], [0, 0]])
	assert allclose(im[:,:,2], [[0, 0], [0, 1]])
Esempio n. 10
0
def test_mask_background():
	r = many([one([0, 0]), one([1, 1])])
	im = r.mask(fill='red', stroke=None, background='black')
	assert allclose(im[:,:,0], [[1, 0], [0, 1]])
	assert allclose(im[:,:,1], [[0, 0], [0, 0]])
	assert allclose(im[:,:,2], [[0, 0], [0, 0]])
	im = r.mask(fill=[1, 0, 0], stroke=None, background='black')
	assert allclose(im[:,:,0], [[1, 0], [0, 1]])
	assert allclose(im[:,:,1], [[0, 0], [0, 0]])
	assert allclose(im[:,:,2], [[0, 0], [0, 0]])
Esempio n. 11
0
def test_mask():
	r = many([one([0, 0]), one([1, 1])])
	im = r.mask(fill='red')
	assert allclose(im[:,:,0], [[1, 1], [1, 1]])
	assert allclose(im[:,:,1], [[0, 1], [1, 0]])
	assert allclose(im[:,:,2], [[0, 1], [1, 0]])
	im = r.mask(fill=[1, 0, 0])
	assert allclose(im[:,:,0], [[1, 1], [1, 1]])
	assert allclose(im[:,:,1], [[0, 1], [1, 0]])
	assert allclose(im[:,:,2], [[0, 1], [1, 0]])
Esempio n. 12
0
def load(file):
    """
    Load neuronal regions from a file or string.
    """
    if os.path.isfile(file):
        with open(file, 'r') as f:
            values = json.load(f)
    else:
        values = json.loads(file)

    return many([v['coordinates'] for v in values])
Esempio n. 13
0
def load(file):
    """
    Load neuronal regions from a file or string.
    """
    if os.path.isfile(file):
        with open(file, 'r') as f:
            values = json.load(f)
    else:
        values = json.loads(file)

    return many([v['coordinates'] for v in values])
Esempio n. 14
0
def geojson_to_region(geojson: Dict[Any, Any]) -> regional.many:
    """
    Convert geojson data to region geometrical data.
    """
    def make_region(geometry):
        assert geometry['geometry']['type'] == "Polygon"
        region = [(coordinates[0], coordinates[1])
                  for coordinates in geometry['geometry']['coordinates']]

        return regional.one(region)

    return regional.many([make_region(geometry) for geometry in geojson])
Esempio n. 15
0
def load_regions(dataset_path):
  """
  Load in the ROIs for a dataset.
  Returns a regional.many object
  """
  with open(os.path.join(dataset_path, 'regions/regions.json')) as f:
    data = json.load(f)
  
  regions = []
  for i in range(len(data)):
    regions.append(regional.one(data[i]['coordinates']))
  
  return regional.many(regions)
Esempio n. 16
0
def load_regions(dataset_path):
    """
  Load in the ROIs for a dataset.
  Returns a regional.many object
  """
    with open(os.path.join(dataset_path, 'regions/regions.json')) as f:
        data = json.load(f)

    regions = []
    for i in range(len(data)):
        regions.append(regional.one(data[i]['coordinates']))

    return regional.many(regions)
Esempio n. 17
0
def label_to_regions(labels) -> regional.many:
    label_mat_coo = coo_matrix(labels)

    def region_for(label_mat_coo, label):
        ind = label_mat_coo.data == label
        # TODO does this work in 3D?
        x = label_mat_coo.row[ind]
        y = label_mat_coo.col[ind]

        re = regional.one(list(zip(x, y)))
        return re

    unique_labels = sorted(set(label_mat_coo.data))
    regions = [region_for(label_mat_coo, label) for label in unique_labels]

    return regional.many(regions)
Esempio n. 18
0
  def fit(self, images, chunk_size=None, padding=None):
      images = check_images(images)
      chunk_size = chunk_size if chunk_size is not None else images.shape[1:]
      blocks = images.toblocks(chunk_size=chunk_size, padding=padding)
      sources = asarray(blocks.map_generic(self._get))

      # add offsets based on block coordinates
      for inds in itertools.product(*[range(d) for d in sources.shape]):
          offset = (asarray(inds) * asarray(blocks.blockshape)[1:])
          for source in sources[inds]:
              source.coordinates += offset
              if padding:
                leftpad = [blocks.padding[i + 1] if inds[i] != 0 else 0 for i in range(len(inds))]
                source.coordinates -= asarray(leftpad)
      
      # flatten list and create model
      flattened = list(itertools.chain.from_iterable(sources.flatten().tolist()))
      return ExtractionModel(many(flattened))
Esempio n. 19
0
def predict_conv_net(network, data, truth=None):
    '''
    Applies the ConvNet to data to predictions
    '''
    from regional import many
    predictions = []
    cropped = []
    for i in range(len(data)):
        predictions.append(network.predict_proba(data[i][newaxis, ...])[0,...,0])
        if truth is not None:
            mask = many(truth[i]).mask(data[i].shape[:2], fill='black')[...,0]
            clip = data[i].shape[0] - predictions[i].shape[0]
            left = clip/2
            right = left + clip%2
            cropped.append(mask[left:-right, left:-right])
    if truth:
        return predictions, cropped
    else:
        return predictions
Esempio n. 20
0
def predict_conv_net(network, data, truth=None):
    '''
    Applies the ConvNet to data to predictions
    '''
    from regional import many
    predictions = []
    cropped = []
    for i in range(len(data)):
        predictions.append(
            network.predict_proba(data[i][newaxis, ...])[0, ..., 0])
        if truth is not None:
            mask = many(truth[i]).mask(data[i].shape[:2], fill='black')[..., 0]
            clip = data[i].shape[0] - predictions[i].shape[0]
            left = clip / 2
            right = left + clip % 2
            cropped.append(mask[left:-right, left:-right])
    if truth:
        return predictions, cropped
    else:
        return predictions
Esempio n. 21
0
        def merge_once(initial):
            centers = asarray(initial.center)
            nearest = [top_k(centers, source.center, k_nearest) for source in initial]

            regions = []
            skip = []
            keep = []

            for ia, source in enumerate(initial):
                for ib in nearest[ia]:
                    other = initial[ib]
                    if not ia == ib and source.overlap(other) > overlap:
                        source = source.merge(other)
                        if ib not in keep:
                            skip.append(ib)

                regions.append(source)
                keep.append(ia)

            return many([region for ir, region in enumerate(regions) if ir not in skip])
Esempio n. 22
0
    def region_to_mask(self, regions_json):
        """
		Converts region JSON file into mask

		Arguments
		---------
		regions_json : json file
			JSON file which needs to be converted into corresponding mask

		Returns
		-------
		output : 2D numpy array
			Mask image
		"""
        regions = many([region['coordinates'] for region in regions_json])
        _mask = regions.mask(dims=(512, 512),
                             stroke='white',
                             fill='white',
                             background='black')

        return color.rgb2gray(_mask)
Esempio n. 23
0
    def fit(self, images, chunk_size=None, padding=None):
        images = check_images(images)
        chunk_size = chunk_size if chunk_size is not None else images.shape[1:]
        blocks = images.toblocks(chunk_size=chunk_size, padding=padding)
        sources = asarray(blocks.map_generic(self._get))

        # add offsets based on block coordinates
        for inds in itertools.product(*[range(d) for d in sources.shape]):
            offset = (asarray(inds) * asarray(blocks.blockshape)[1:])
            for source in sources[inds]:
                source.coordinates += offset
                if padding:
                    leftpad = [
                        blocks.padding[i + 1] if inds[i] != 0 else 0
                        for i in range(len(inds))
                    ]
                    source.coordinates -= asarray(leftpad)

        # flatten list and create model
        flattened = list(
            itertools.chain.from_iterable(sources.flatten().tolist()))
        return ExtractionModel(many(flattened))
Esempio n. 24
0
        def merge_once(initial):
            centers = asarray(initial.center)
            nearest = [
                top_k(centers, source.center, k_nearest) for source in initial
            ]

            regions = []
            skip = []
            keep = []

            for ia, source in enumerate(initial):
                for ib in nearest[ia]:
                    other = initial[ib]
                    if not ia == ib and source.overlap(other) > overlap:
                        source = source.merge(other)
                        if ib not in keep:
                            skip.append(ib)

                regions.append(source)
                keep.append(ia)

            return many([
                region for ir, region in enumerate(regions) if ir not in skip
            ])
Esempio n. 25
0
def test_dilate():
	v = many([one([1, 1]), one([1, 1])]).dilate(1)
	truth = [[0, 0], [0, 1], [0, 2], [1, 0], [1, 1], [1, 2], [2, 0], [2, 1], [2, 2]]
	assert allclose(v.coordinates, [truth, truth])
Esempio n. 26
0
def test_overlap():
	coords = [[1, 1], [1, 2], [2, 1], [2, 2]]
	v = many([coords, coords]).overlap(one([1, 1]))
	assert v == [0.25, 0.25]
Esempio n. 27
0
def test_model_construction():
    regions = many([[[0, 1], [0, 2]], [[0, 2], [0, 3]]])
    model = ExtractionModel(regions=regions)
    assert isinstance(model.regions, many)
    assert model.regions.count == 2
Esempio n. 28
0
def test_similarity_perfect_flipped():
    a = many([[[0, 0], [0, 1], [1, 0], [1, 1]],
              [[10, 10], [10, 11], [11, 10], [11, 11]]])
    b = many([[[10, 10], [10, 11], [11, 10], [11, 11]],
              [[0, 0], [0, 1], [1, 0], [1, 1]]])
    assert centers(a, b) == (1.0, 1.0)
Esempio n. 29
0
def overlay(model, image=None, compare=None, threshold=inf, correct=False):
    """
    Overlay regions onto reference image, with optional comparison regions.

    Parameters
    ----------
    model : ExtractionModel

    image : array-like, optional, default = None
         Base image, can provide a 2d array,
         if unspecified will be black.

    modelCompare : ExtractionModel, default = None
        Regions to be compared to if provided.

    threshold : float, default = inf
        Distance threshold for matching sources.

    correct : bool, default = False
        If True and a comparision given will only show correct regions
    """

    if image is not None:
        if image.max() > 1:
            im = norm(image)
        else:
            im = image
        size = im.shape
    else:
        size = (max([r.bbox[2] for r in model.regions]) + 1,
                max([r.bbox[3] for r in model.regions]) + 1)
        if compare is not None:
            sizeCompare = (max([r.bbox[2] for r in compare.regions]) + 1,
                           max([r.bbox[3] for r in compare.regions]) + 1)
            size = (maximum(size[0],
                            sizeCompare[0]), maximum(size[1], sizeCompare[1]))
        im = full(size, 0.0)

    if compare is not None:
        matches = match(model.regions, compare.regions, threshold)
        matchesCompare = full(compare.regions.count, nan)

        for ii in where(~isnan(matches))[0]:
            matchesCompare[matches[ii]] = ii

        if any(~isnan(matches)):
            hits = many([model.regions[i] for i in where(~isnan(matches))[0]])
            h = hits.mask(size,
                          background='black',
                          fill=None,
                          stroke=[0, 0.7, 0])
        else:
            h = full((size[0], size[1], 3), 0.0)
        if any(isnan(matches)):
            falseAlarms = many(
                [model.regions[i] for i in where(isnan(matches))[0]])
            fA = falseAlarms.mask(size,
                                  background='black',
                                  fill=None,
                                  stroke=[0.7, 0, 0])
        else:
            fA = full((size[0], size[1], 3), 0.0)
        if any(~isnan(matchesCompare)):
            truePositives = many(
                [compare.regions[i] for i in where(~isnan(matchesCompare))[0]])
            tP = truePositives.mask(size,
                                    background='black',
                                    fill=None,
                                    stroke=[0, 0, 0.7])
        else:
            tP = full((size[0], size[1], 3), 0.0)
        if any(isnan(matchesCompare)):
            misses = many(
                [compare.regions[i] for i in where(isnan(matchesCompare))[0]])
            m = misses.mask(size,
                            background='black',
                            fill=None,
                            stroke=[0.7, 0.7, 0])
        else:
            m = full((size[0], size[1], 3), 0.0)
        if correct:
            mask = maximum(tP, h)
        else:
            mask = maximum(maximum(maximum(tP, fA), h), m)
    else:
        mask = model.regions.mask(size,
                                  background='black',
                                  fill=None,
                                  stroke=[0, 0.7, 0])

    base = tile(im, (3, 1, 1)).transpose(1, 2, 0)
    return maximum(base, mask)
Esempio n. 30
0
##produce fake calcium imaging data

from showit import image
import matplotlib.pyplot as plot
from regional import many

from fakearray import calcium_imaging

data, series, truth = calcium_imaging(shape=(100,300), n=5, t=50, seed=42, noise=0.5, withparams=True)
base = data.mean().toarray()
image(base);
plot.show()
image(many(truth).mask(dims=data.shape[1:], cmap='rainbow', stroke='black', base=base));
plot.show()
Esempio n. 31
0
def test_bbox():
	coords = [[0, 0], [0, 2], [2, 0], [1, 1], [2, 2]]
	truth = [0, 0, 2, 2]
	r = many([coords, coords])
	assert allclose(r.bbox, [truth, truth])
Esempio n. 32
0
def test_extent():
	coords = [[0, 0], [0, 2], [2, 0], [1, 1], [2, 2]]
	r = many([coords, coords])
	assert allclose(r.extent, [[3, 3], [3, 3]])
Esempio n. 33
0
def test_hull():
	coords = [[0, 0], [0, 2], [2, 0], [1, 1], [2, 2]]
	truth = [[0, 0], [2, 0], [2, 2], [0, 2]]
	r = many([coords, coords])
	assert allclose(r.hull, [truth, truth])
Esempio n. 34
0
def test_overlap_too_many():
    a = many([[[0, 0], [0, 1]], [[10, 10], [10, 11]]])
    b = many([[[0, 0], [0, 1]], [[10, 10], [10, 11], [11, 10], [11, 12]]])
    assert shapes(a, b) == (1.0, 0.75)
Esempio n. 35
0
def test_center():
	coords = [[0, 0], [0, 1], [1, 0], [1, 1]]
	r = many([coords, coords])
	assert allclose(r.center, [[0.5, 0.5], [0.5, 0.5]])
Esempio n. 36
0
def test_index():
	coords = [[0, 0], [0, 1], [1, 0], [1, 1]]
	r = many([one(coords), one(coords)])
	assert allclose(r[0].coordinates, coords)
	assert allclose(r[int64(0)].coordinates, coords)
Esempio n. 37
0
def test_similarity_no_threshold():
    a = many([[[0, 0], [0, 1], [1, 0], [1, 1]],
              [[10, 10], [10, 11], [11, 10], [11, 11]]])
    b = many([[[0, 0], [0, 1], [1, 0], [1, 1]], [[30, 30], [31, 30], [31,
                                                                      31]]])
    assert centers(a, b) == (1.0, 1.0)
Esempio n. 38
0
def test_exclude():
	coords = [[0, 0], [0, 1], [1, 0], [1, 1]]
	truth = [[1, 0], [1, 1]]
	r = many([coords, coords]).exclude(one([[0, 0], [0, 1]]))
	assert allclose(r.coordinates, [truth, truth])
Esempio n. 39
0
def test_area():
	coords = [[0, 0], [0, 2], [2, 0], [2, 2]]
	r = many([coords, coords])
	assert r.area == [4, 4]
Esempio n. 40
0
def test_overlap_perfect_flipped():
    a = many([[[0, 0], [0, 1]], [[10, 10], [10, 11]]])
    b = many([[[10, 10], [10, 11]], [[0, 0], [0, 1]]])
    assert shapes(a, b) == (1.0, 1.0)
Esempio n. 41
0
def test_distance():
	coords = [[0, 0], [0, 2], [2, 0], [2, 2]]
	r = many([coords, coords])
	assert r.distance([1, 1]) == [0, 0]
Esempio n. 42
0
                                      withparams=True)
base = data.mean(0)
image(base, size=10)
plot.show()

algorithm = CNMF(k=5, gSig=[4, 4], merge_thresh=0.8)

model, temporaldata = algorithm.fit(data)


def convert(array):
    r, c = np.where(array > 0.0)
    return one(zip(r, c))


regions = many([convert(model[:, :, i]) for i in range(model.shape[2])])

#show true solution
image(
    many(truth).mask(dims=data.shape[1:],
                     cmap='rainbow',
                     stroke='black',
                     base=base))
plot.show()

#show algorithm solution
masks = regions.mask(cmap_stroke='rainbow',
                     fill=None,
                     base=base.clip(0, 4000) / 4000)
image(masks, size=14)
plot.show()
Esempio n. 43
0
def test_merge():
	coords = [[0, 0], [0, 2], [2, 0], [2, 2]]
	truth = coords + [[1,1]]
	r = many([coords, coords]).merge([1, 1])
	assert allclose(r.coordinates, [truth, truth])
Esempio n. 44
0
from showit import image
from fakearray import calcium_imaging

from cnmf import CNMF



data, series, truth = calcium_imaging(n=5, t=10, seed=42, noise=0.5, withparams=True)
base = data.mean(0)
image(base, size=10);
plot.show()

algorithm = CNMF( k=5, gSig=[4,4], merge_thresh=0.8)

model,temporaldata = algorithm.fit(data)

def convert(array):
    r,c = np.where(array > 0.0)
    return one(zip(r,c))

regions = many([convert(model[:,:,i]) for i in range(model.shape[2])])

#show true solution
image(many(truth).mask(dims=data.shape[1:], cmap='rainbow', stroke='black', base=base));
plot.show()

#show algorithm solution
masks = regions.mask(cmap_stroke='rainbow', fill=None, base=base.clip(0,4000) / 4000)
image(masks, size=14);
plot.show()
Esempio n. 45
0
def test_crop():
	coords = [[0, 0], [0, 2], [2, 0], [2, 2]]
	truth = [[0, 0]]
	r = many([coords, coords]).crop([0, 0], [1, 1])
	assert allclose(r.coordinates, [truth, truth])
Esempio n. 46
0
 def load(path):
     with open(path, 'r') as f:
         raw = load(f)
     regions = many([x['coordinates'] for x in raw['regions']])
     return ExtractionModel(regions)
Esempio n. 47
0
def test_inbounds():
	coords = [[1, 1], [1, 2], [2, 1], [2, 2]]
	v = many([coords, coords]).inbounds([0, 0], [3, 3])
	assert v == [True, True]
Esempio n. 48
0
def test_overlap_too_few():
    a = many([[[0, 0], [0, 1], [1, 0], [1, 1]],
              [[10, 10], [10, 11], [11, 10], [11, 11]]])
    b = many([[[0, 0], [0, 1], [1, 0], [1, 1]], [[10, 10], [11, 11]]])
    assert shapes(a, b) == (0.75, 1.0)
Esempio n. 49
0
def test_outline():
	coords = [[1, 1]]
	truth = [[0, 0], [0, 1], [0, 2], [1, 0], [1, 2], [2, 0], [2, 1], [2, 2]]
	r = many([coords, coords]).outline(0, 1)
	assert allclose(r.coordinates, [truth, truth])
Esempio n. 50
0
def test_mask_colormap():
	r = many([one([0, 0]), one([1, 1])])
	im = r.mask(cmap='gray', value=[0, 1], background='red')
	assert allclose(im[:,:,0], [[0, 1], [1, 1]])
	assert allclose(im[:,:,1], [[0, 0], [0, 1]])
	assert allclose(im[:,:,2], [[0, 0], [0, 1]])
Esempio n. 51
0
def test_similarity():
    a = many([[[0, 0], [0, 1], [1, 0], [1, 1]],
              [[10, 10], [10, 11], [11, 10], [11, 11]]])
    b = many([[[0, 0], [0, 1], [1, 0], [1, 1]], [[30, 30], [31, 30], [31,
                                                                      31]]])
    assert centers(a, b, threshold=5) == (0.5, 0.5)