def _computeGaussianSmoothing(self, vol, sigma, roi, in2d):
        if WITH_FAST_FILTERS:
            # Use fast filters (if available)
            result = numpy.zeros(vol.shape).astype(vol.dtype)
            assert vol.channelIndex == 0

            for channel in range(vol.shape[0]):
                c_slice = slice(channel, channel + 1)
                if in2d:

                    for z in range(vol.shape[1]):
                        result[c_slice, z : z + 1] = fastfilters.gaussianSmoothing(
                            vol[c_slice, z : z + 1], sigma, window_size=self.WINDOW_SIZE
                        )
                else:
                    result[c_slice] = fastfilters.gaussianSmoothing(vol[c_slice], sigma, window_size=self.WINDOW_SIZE)

            roi = roiToSlice(*roi)
            return result[roi]
        else:
            # Use Vigra's filters
            if in2d:
                sigma = (0, sigma, sigma)

            # vigra's filter functions need roi without channels axis
            vigra_roi = (roi[0][1:], roi[1][1:])
            return vigra.filters.gaussianSmoothing(vol, sigma, roi=vigra_roi, window_size=self.WINDOW_SIZE)
    def _computeGaussianSmoothing(self, vol, sigma, roi, in2d):
        if WITH_FAST_FILTERS:
            # Use fast filters (if available)
            result = numpy.zeros(vol.shape).astype(vol.dtype)
            assert vol.channelIndex == 0

            for channel in range(vol.shape[0]):
                c_slice = slice(channel, channel + 1)
                if in2d:

                    for z in range(vol.shape[1]):
                        result[c_slice,
                               z:z + 1] = fastfilters.gaussianSmoothing(
                                   vol[c_slice, z:z + 1],
                                   sigma,
                                   window_size=self.WINDOW_SIZE)
                else:
                    result[c_slice] = fastfilters.gaussianSmoothing(
                        vol[c_slice], sigma, window_size=self.WINDOW_SIZE)

            roi = roiToSlice(*roi)
            return result[roi]
        else:
            # Use Vigra's filters
            if in2d:
                sigma = (0, sigma, sigma)

            # vigra's filter functions need roi without channels axis
            vigra_roi = (roi[0][1:], roi[1][1:])
            return vigra.filters.gaussianSmoothing(
                vol, sigma, roi=vigra_roi, window_size=self.WINDOW_SIZE)
Example #3
0
        def pmapToHeightMap(pmap):
            
            r  = int(min(3.0 * scale, 1.0) + 0.5)
            footprint, origin = makeBall(r=r)


            blurredSmall = fastfilters.gaussianSmoothing(pmap, 1.0*scale)
            blurredLarge = fastfilters.gaussianSmoothing(pmap, 6.0*scale)
            blurredSuperLarge = fastfilters.gaussianSmoothing(pmap, 10.0*scale)


            combined = pmap + blurredSuperLarge*0.3 + 0.15*blurredLarge + 0.1*blurredSmall

            r  = int(min(5.0 * scale, 1.0) + 0.5)
            footprint, origin = makeBall(r=r)

            combined = scipy.ndimage.percentile_filter(input=combined, 
                                                        #size=(20,20,20),
                                                        footprint=footprint, 
                                                        #origin=origin, 
                                                        mode='reflect',
                                                        percentile=50.0)

    
            if False:
                nifty.viewer.view3D(pmap, show=False, title='pm',cmap='gray')
                nifty.viewer.view3D(medianImg, show=False, title='medianImg',cmap='gray')
                nifty.viewer.view3D(combined, show=False, title='combined',cmap='gray')
                pylab.show()

            return combined
Example #4
0
def distance_transform_watershed(input_,
                                 threshold,
                                 sigma_seeds,
                                 sigma_weights=2.,
                                 min_size=100,
                                 alpha=.9,
                                 pixel_pitch=None,
                                 apply_nonmax_suppression=False):
    """ Compute watershed segmentation based on distance transform seeds.

    Following the procedure outlined in "Multicut brings automated neurite segmentation closer to human performance":
    https://hci.iwr.uni-heidelberg.de/sites/default/files/publications/files/217205318/beier_17_multicut.pdf

    Arguments:
        input_ [np.ndarray] - input height map.
        threshold [float] - value for the threshold applied before distance tranform.
        sigma_seeds [float] - smoothing factor for the watershed seed map.
        sigma_weigths [float] - smoothing factor for the watershed weight map (default: 2).
        min_size [int] - minimal size of watershed segments (default: 100)
        alpha [float] - alpha used to blend input_ and distance_transform in order to obtain the
            watershed weight map (default: .9)
        pixel_pitch [listlike[int]] - anisotropy factor used to compute the distance transform (default: None)
        apply_nonmax_suppression [bool] - whetther to apply non-maxmimum suppression to filter out seeds.
            Needs nifty. (default: False)
    """
    if apply_nonmax_suppression and nonMaximumDistanceSuppression is None:
        raise ValueError(
            "Non-maximum suppression is only available with nifty.")

    # threshold the input and compute distance transform
    thresholded = (input_ > threshold).astype('uint32')
    dt = vigra.filters.distanceTransform(thresholded, pixel_pitch=pixel_pitch)

    # compute seeds from maxima of the (smoothed) distance transform
    if sigma_seeds > 0:
        dt = ff.gaussianSmoothing(dt, sigma_seeds)
    compute_maxima = vigra.analysis.localMaxima if dt.ndim == 2 else vigra.analysis.localMaxima3D
    seeds = compute_maxima(dt,
                           marker=np.nan,
                           allowAtBorder=True,
                           allowPlateaus=True)
    seeds = np.isnan(seeds)
    if apply_nonmax_suppression:
        seeds = non_maximum_suppression(dt, seeds)
    seeds = vigra.analysis.labelMultiArrayWithBackground(seeds.view('uint8'))

    # normalize and invert distance transform
    dt = 1. - (dt - dt.min()) / dt.max()

    # compute weights from input and distance transform
    if sigma_weights > 0.:
        hmap = alpha * ff.gaussianSmoothing(input_,
                                            sigma_weights) + (1. - alpha) * dt
    else:
        hmap = alpha * input_ + (1. - alpha) * dt

    # compute watershed
    ws, max_id = watershed(hmap, seeds, size_filter=min_size)
    return ws, max_id
Example #5
0
    def pmapToHeightMap(pmap):

        footprint, origin = makeBall(r=3)

        medianImg = scipy.ndimage.percentile_filter(
            input=pmap,
            #size=(20,20,20),
            footprint=footprint,
            #origin=origin,
            mode='reflect',
            percentile=50.0)
        if False:
            blurredSmall = vigra.gaussianSmoothing(
                pmap.T,
                1.0,
            ).T
            blurredLarge = vigra.gaussianSmoothing(
                pmap.T,
                6.0,
            ).T
            blurredSuperLarge = vigra.gaussianSmoothing(
                pmap.T,
                10.0,
            ).T

        else:
            blurredSmall = fastfilters.gaussianSmoothing(
                pmap,
                1.0,
            )
            blurredLarge = fastfilters.gaussianSmoothing(
                pmap,
                6.0,
            )
            blurredSuperLarge = fastfilters.gaussianSmoothing(
                pmap,
                10.0,
            )

        combined = medianImg + blurredSuperLarge * 0.3 + 0.15 * blurredLarge + 0.1 * blurredSmall

        footprint, origin = makeBall(r=3)
        combined = scipy.ndimage.percentile_filter(
            input=combined,
            #size=(20,20,20),
            footprint=footprint,
            #origin=origin,
            mode='reflect',
            percentile=50.0)

        combined = fastfilters.gaussianSmoothing(combined, 1.3)

        #if False:
        #    nifty.viewer.view3D(pmap, show=False, title='pm',cmap='gray')
        #    nifty.viewer.view3D(medianImg, show=False, title='medianImg',cmap='gray')
        #    nifty.viewer.view3D(combined, show=False, title='combined',cmap='gray')
        #    pylab.show()

        return combined
Example #6
0
 def filter_fn(self, source_raw: numpy.ndarray) -> numpy.ndarray:
     a = fastfilters.gaussianSmoothing(source_raw,
                                       sigma=self.sigma0,
                                       window_size=self.window_size)
     b = fastfilters.gaussianSmoothing(source_raw,
                                       sigma=self.sigma1,
                                       window_size=self.window_size)
     return a - b
Example #7
0
 def filter_fn(
     self, source_raw: "ndarray[Any, dtype[float32]]"
 ) -> "ndarray[Any, dtype[float32]]":
     a = fastfilters.gaussianSmoothing(source_raw,
                                       sigma=self.sigma0,
                                       window_size=self.window_size)
     b = fastfilters.gaussianSmoothing(source_raw,
                                       sigma=self.sigma1,
                                       window_size=self.window_size)
     return a - b
Example #8
0
    def __call__(self, raw):
        f = fastfilters.gaussianSmoothing(raw, self.sigma)

        f -= self.mi
        f /= (self.ma - self.mi)

        return f[:, :, :, None]
def test_border_bug():
    a = np.ones((150,150), dtype=np.float32)
    a[:] = np.nan
    a[25:125, 25:125] = 0

    for _ in range(10):
        res = ff.gaussianSmoothing(a[25:125, 25:125], np.sqrt(99), window_size=10)
        ok_(np.all(res == 0))
Example #10
0
 def __init__(self, raw, sigma):
     self.sigma = sigma
     f = fastfilters.gaussianSmoothing(raw, self.sigma)
     self.mi = numpy.min(f)
     self.ma = numpy.max(f)
     self.range = (self.ma - self.mi) / tolFactor
     self.mi -= self.range
     self.ma += self.range
Example #11
0
 def _computeGaussianSmoothing(self, vol, sigma, roi):
     if WITH_FAST_FILTERS:
         # Use fast filters (if available)
         if vol.channels > 1:
             result = numpy.zeros(vol.shape).astype(vol.dtype)
             chInd = vol.channelIndex
             chSlice = [slice(None) for dim in range(len(vol.shape))]
             
             for channel in range(vol.channels):
                 chSlice[chInd] = slice(channel, channel+1)
                 result[chSlice] = fastfilters.gaussianSmoothing(vol[chSlice], sigma, window_size=self.WINDOW_SIZE)
         else:
             result = fastfilters.gaussianSmoothing(vol, sigma, window_size = self.WINDOW_SIZE)
             
         roi = roiToSlice(*roi)
         return result[roi]
     else:
         # Use Vigra's filters
         return vigra.filters.gaussianSmoothing(vol, sigma, roi=roi, window_size=self.WINDOW_SIZE)
Example #12
0
    def __call__(self, dataIn, slicing, featureArray):
        fIndex = 0
        dataIn = numpy.require(dataIn,'float32').squeeze()

        dataWithChannel = extractChannels(dataIn, self.usedChannels)

        slicingEv = slicing + [slice(0,2)]

        for c in range(dataWithChannel.shape[2]):

            data = dataWithChannel[:,:,c]

            # pre-smoothed
            sigmaPre = self.sigmas[0]/2.0
            preS = fastfilters.gaussianSmoothing(data, sigmaPre)

            for sigma in self.sigmas:

                neededScale = getScale(target=sigma, presmoothed=sigmaPre)
                preS = fastfilters.gaussianSmoothing(preS, neededScale)
                sigmaPre = sigma

                featureArray[:,:,fIndex] = preS[slicing]
                fIndex += 1

                featureArray[:,:,fIndex] = fastfilters.laplacianOfGaussian(preS, neededScale)[slicing]
                fIndex += 1

                featureArray[:,:,fIndex] = fastfilters.gaussianGradientMagnitude(preS, neededScale)[slicing]
                fIndex += 1


                featureArray[:,:,fIndex:fIndex+2] = fastfilters.hessianOfGaussianEigenvalues(preS, neededScale)[slicingEv]
                fIndex += 2

                
                #print("array shape",featureArray[:,:,:,fIndex:fIndex+3].shape)
                feat = fastfilters.structureTensorEigenvalues(preS, float(sigma)*0.3, float(sigma)*0.7)[slicingEv]
                #print("feat  shape",feat.shape)
                featureArray[:,:,fIndex:fIndex+2] = feat
                fIndex += 2

        assert fIndex == self.n_features
def test_border_bug():
    a = np.ones((150, 150), dtype=np.float32)
    a[:] = np.nan
    a[25:125, 25:125] = 0

    for _ in range(10):
        res = ff.gaussianSmoothing(a[25:125, 25:125],
                                   np.sqrt(99),
                                   window_size=10)
        ok_(np.all(res == 0))
Example #14
0
    def pmapToHeightMap(pmap):
        
        footprint, origin = makeBall(r=3)

        medianImg = scipy.ndimage.percentile_filter(input=pmap, 
                                                    #size=(20,20,20),
                                                    footprint=footprint, 
                                                    #origin=origin, 
                                                    mode='reflect',
                                                    percentile=50.0)
        if False:
            blurredSmall = vigra.gaussianSmoothing(pmap.T, 1.0,).T
            blurredLarge = vigra.gaussianSmoothing(pmap.T, 6.0,).T
            blurredSuperLarge = vigra.gaussianSmoothing(pmap.T, 10.0,).T

        else:
            blurredSmall = fastfilters.gaussianSmoothing(pmap, 1.0,)
            blurredLarge = fastfilters.gaussianSmoothing(pmap, 6.0,)
            blurredSuperLarge = fastfilters.gaussianSmoothing(pmap, 10.0,)

        combined = medianImg + blurredSuperLarge*0.3 + 0.15*blurredLarge + 0.1*blurredSmall

        footprint, origin = makeBall(r=3)
        combined = scipy.ndimage.percentile_filter(input=combined, 
                                                    #size=(20,20,20),
                                                    footprint=footprint, 
                                                    #origin=origin, 
                                                    mode='reflect',
                                                    percentile=50.0)


        if False:
            nifty.viewer.view3D(pmap, show=False, title='pm',cmap='gray')
            nifty.viewer.view3D(medianImg, show=False, title='medianImg',cmap='gray')
            nifty.viewer.view3D(combined, show=False, title='combined',cmap='gray')
            pylab.show()

        return combined
Example #15
0
    def _computeGaussianSmoothing(self, vol, sigma, roi):
        invalid_z = vol.shape[
            1] == 1 or sigma < .3 or sigma * self.WINDOW_SIZE > vol.shape[1]
        if invalid_z and vol.shape[1] > 1 and not self.ComputeIn2d.value:
            logger.warning(
                f'PixelFeaturesPresmoothed: Pre-smoothing in 2d for sigma {sigma:.2f} (z dimension too small)'
            )

        if WITH_FAST_FILTERS:
            # Use fast filters (if available)
            result = numpy.zeros(vol.shape).astype(vol.dtype)
            assert vol.channelIndex == 0

            for channel in range(vol.shape[0]):
                c_slice = slice(channel, channel + 1)
                if self.ComputeIn2d.value or invalid_z:

                    for z in range(vol.shape[1]):
                        result[c_slice,
                               z:z + 1] = fastfilters.gaussianSmoothing(
                                   vol[c_slice, z:z + 1],
                                   sigma,
                                   window_size=self.WINDOW_SIZE)
                else:
                    result[c_slice] = fastfilters.gaussianSmoothing(
                        vol[c_slice], sigma, window_size=self.WINDOW_SIZE)

            roi = roiToSlice(*roi)
            return result[roi]
        else:
            # Use Vigra's filters
            if invalid_z or self.ComputeIn2d.value:
                sigma = (0, sigma, sigma)

            # vigra's filter functions need roi without channels axis
            vigra_roi = (roi[0][1:], roi[1][1:])
            return vigra.filters.gaussianSmoothing(
                vol, sigma, roi=vigra_roi, window_size=self.WINDOW_SIZE)
Example #16
0
        def pmapToHeightMap(pmap):

            r = int(min(3.0 * scale, 1.0) + 0.5)
            footprint, origin = makeBall(r=r)

            blurredSmall = fastfilters.gaussianSmoothing(pmap, 1.0 * scale)
            blurredLarge = fastfilters.gaussianSmoothing(pmap, 6.0 * scale)
            blurredSuperLarge = fastfilters.gaussianSmoothing(
                pmap, 10.0 * scale)

            combined = pmap + blurredSuperLarge * 0.3 + 0.15 * blurredLarge + 0.1 * blurredSmall

            r = int(min(5.0 * scale, 1.0) + 0.5)
            footprint, origin = makeBall(r=r)

            combined = scipy.ndimage.percentile_filter(
                input=combined,
                #size=(20,20,20),
                footprint=footprint,
                #origin=origin,
                mode='reflect',
                percentile=50.0)

            if False:
                nifty.viewer.view3D(pmap, show=False, title='pm', cmap='gray')
                nifty.viewer.view3D(medianImg,
                                    show=False,
                                    title='medianImg',
                                    cmap='gray')
                nifty.viewer.view3D(combined,
                                    show=False,
                                    title='combined',
                                    cmap='gray')
                pylab.show()

            return combined
Example #17
0
    def _apply_coord(old_coord):
        coord = transform_coordinate(old_coord)

        # range check
        if any(co < 0 or co >= maxr for co, maxr in zip(coord, max_range)):
            return

        # get the coordinates to iterate over and the interpolation weights
        coords, weights = interpolate_coordinates(coord)

        # iterate over coordinates and compute the output value
        val = 0.
        for coord, weight in zip(coords, weights):
            chunk_id = blocking.coordinatesToBlockId(list(coord))
            chunk, offset = chunk_cache.get(chunk_id, (None, None))
            if chunk is None:

                chunk_pos = blocking.blockGridPosition(chunk_id)
                if sigma is None:
                    block = blocking.getBlock(chunk_id)
                    chunk_bb = tuple(
                        slice(beg, end)
                        for beg, end in zip(block.begin, block.end))
                else:
                    block = blocking.getBlockWithHalo(chunk_id, list(halo))
                    chunk_bb = tuple(
                        slice(beg, end) for beg, end in zip(
                            block.outerBlock.begin, block.outerBlock.end))

                chunk = data[chunk_bb]
                if sigma is not None:
                    chunk = ff.gaussianSmoothing(chunk, sigma)
                    inner_bb = tuple(
                        slice(beg, end)
                        for beg, end in zip(block.innerBlockLocal.begin,
                                            block.innerBlockLocal.end))
                    chunk = chunk[inner_bb]

                offset = [cp * ch for cp, ch in zip(chunk_pos, chunks)]
                chunk_cache[chunk_id] = (chunk, offset)

            chunk_coord = tuple(oc - of for oc, of in zip(coord, offset))
            val += weight * chunk[chunk_coord]

        out_coord = tuple(co - of for co, of in zip(old_coord, start))
        out[out_coord] = val
Example #18
0
 def differenceOfGausssiansFF(image, sigma0, sigma1, window_size):
     return fastfilters.gaussianSmoothing(image, sigma0, window_size) - fastfilters.gaussianSmoothing(
         image, sigma1, window_size
     )
Example #19
0
def gaussian(a, *, scale):
    return fastfilters.gaussianSmoothing(a, scale)
Example #20
0
import fastfilters as ff
import numpy as np

a = np.ones((150, 150), dtype=np.float32)
a[:] = np.nan
a[25:125, 25:125] = 1

for _ in range(10):
    res = ff.gaussianSmoothing(a[25:125, 25:125], np.sqrt(99), window_size=5)
    assert (np.any(np.isnan(res)) == False)
    assert (np.any(np.where(res < -1000)) == False)
Example #21
0
import fastfilters as ff
import numpy as np

a = np.ones((150,150), dtype=np.float32)
a[:] = np.nan
a[25:125, 25:125] = 1

for _ in range(10):
	res = ff.gaussianSmoothing(a[25:125, 25:125], np.sqrt(99), window_size=5)
	assert(np.any(np.isnan(res)) == False)
	assert(np.any(np.where(res < -1000)) == False)