Пример #1
0
 def apply_learning(self, smap, vector, bmu, radius, rate, func, params, batchlearn=False):
     toric, shape = params['toric'], params['shape']
     if numpy.ma.isMaskedArray(vector):
         vector = vector.filled(0)
     if toric:
         bigshape = tuple(map(lambda x: 3*x, shape))
         midselect = tuple([ slice(s, 2*s) for s in shape ])
         features = numpy.ones(bigshape)
         copy_coord = lambda p, s: tuple([p+i*s for i in range(3)])
         all_coords = [ copy_coord(coord, s) for coord, s in zip(bmu, shape) ]
         for p in itertools.product(*all_coords):
             features[p] = 0
         distance = distance_transform_edt(features)[midselect]
     else:
         features = numpy.ones(shape)
         features[bmu] = 0
         distance = distance_transform_edt(features)
     #radmap = numpy.exp( -sqdistance / (2.*radius)**2 )
     radmap = func(distance, rate, radius)
     if not batchlearn:
         adjmap = (smap - vector) * radmap[..., None]
         smap -= adjmap
     else:
         adjmap = radmap[..., None]
         return adjmap
Пример #2
0
def calc_bwdist(data):
    """
    Given data (binary), return distance function
    :param data:
    :return:
    """
    return 0.5 * (distance_transform_edt(data == 0) + distance_transform_edt(data == 1)) - 0.5
Пример #3
0
def distance_trend(times, frames, threshold=25, min_size=12):
	nf = len(frames)
	zt = filters.gaussian_filter(frames,1.5) > threshold
	d_outer = np.zeros((nf,) + frames[0].shape)
	d_inner = np.zeros((nf,) + frames[0].shape)

	for i in range(nf):
		skmorph.remove_small_objects(zt, min_size=min_size, in_place=True)
		d_outer[i] = morphology.distance_transform_edt(np.invert(zt[i]))
		d_inner[i] = morphology.distance_transform_edt(zt[i])

	return d_outer, d_inner
Пример #4
0
def spread_labels(labels,maxdist=9999999):
    """Spread the given labels to the background"""
    distances,features = morphology.distance_transform_edt(labels==0,return_distances=1,return_indices=1)
    indexes = features[0]*labels.shape[1]+features[1]
    spread = labels.ravel()[indexes.ravel()].reshape(*labels.shape)
    spread *= (distances<maxdist)
    return spread
Пример #5
0
def InitialGuess(y,I):
    #-- nearest neighbor interpolation (in case of missing values)
    if any(~I):
        try:
          from scipy.ndimage.morphology import distance_transform_edt
          #if license('test','image_toolbox')
          #[z,L] = bwdist(I);
          L = distance_transform_edt(1-I)
          z = y;
          z[~I] = y[L[~I]];
        except:
          # If BWDIST does not exist, NaN values are all replaced with the
          # same scalar. The initial guess is not optimal and a warning
          # message thus appears.
          z = y;
          z[~I] = mean(y[I]);
    else:
        z = y;
    # coarse fast smoothing
    z = dctND(z,f=dct)
    k = array(z.shape)
    m = ceil(k/10)+1
    d = []
    for i in xrange(len(k)):
      d.append(arange(m[i],k[i]))
    d = np.array(d).astype(int)
    z[d] = 0.
    z = dctND(z,f=idct)
    return z
Пример #6
0
 def _set_distance_transform(self):
     """ Compute the Euclidean distance transform of the valid to the
         invalid area.
         Stores the result in self._px_to_edge
     """
     if self._px_to_edge is None:
         self._px_to_edge = morphology.distance_transform_edt(self._mask > 0)
Пример #7
0
def distMapUpdate(distMap, binaryImg, placedCircle):
    "Update a part of the distance map"

    radiusCirc = (placedCircle.radius).astype(np.int16)
    circCenter = placedCircle.center
    lowerLeftCoordX = circCenter[0] - 2* radiusCirc
    lowerLeftCoordY = circCenter[1] - 2* radiusCirc
    mapToUpdate = np.ones((radiusCirc*4,radiusCirc*4))
    #print lowerLeftCoordX, lowerLeftCoordY
    #print radiusCirc
    #print mapToUpdate.shape
    
    lowerLeftCoordX,lowerLeftCoordY,limitX,limitY = ConsiderLimitsFrame(lowerLeftCoordX,lowerLeftCoordY,radiusCirc,binaryImg)
    
    mapToUpdate = np.ones((limitX,limitY))

    for i in range(0,limitX):
        for j in range(0,limitY):
            mapToUpdate[i,j] = binaryImg [lowerLeftCoordX+i,lowerLeftCoordY+j]
    
    distMapUpdate = distance_transform_edt(mapToUpdate)
    
    for i in range(limitX):
        for j in range(limitY):
            distMap[lowerLeftCoordX+i,lowerLeftCoordY+j] = distMapUpdate [i,j] 
    
    return distMap
Пример #8
0
def __surface_distances(input1, input2, voxelspacing=None, connectivity=1):
    """
    The distances between the surface voxel of binary objects in input1 and their
    nearest partner surface voxel of a binary object in input2.
    """
    input1 = numpy.atleast_1d(input1.astype(numpy.bool))
    input2 = numpy.atleast_1d(input2.astype(numpy.bool))
    if voxelspacing is not None:
        voxelspacing = _ni_support._normalize_sequence(voxelspacing, input1.ndim)
        voxelspacing = numpy.asarray(voxelspacing, dtype=numpy.float64)
        if not voxelspacing.flags.contiguous:
            voxelspacing = voxelspacing.copy()
            
    # binary structure
    footprint = generate_binary_structure(input1.ndim, connectivity)
            
    # extract only 1-pixel border line of objects
    input1_border = input1 - binary_erosion(input1, structure=footprint, iterations=1)
    input2_border = input2 - binary_erosion(input2, structure=footprint, iterations=1)
    
    # compute average surface distance        
    # Note: scipys distance transform is calculated only inside the borders of the
    #       foreground objects, therefore the input has to be reversed
    dt = distance_transform_edt(~input2_border, sampling=voxelspacing)
    sds = dt[input1_border]
    
    return sds    
Пример #9
0
def __surface_distances(result, reference, voxelspacing=None, connectivity=1):
    """
    The distances between the surface voxel of binary objects in result and their
    nearest partner surface voxel of a binary object in reference.
    """
    result = numpy.atleast_1d(result.astype(numpy.bool))
    reference = numpy.atleast_1d(reference.astype(numpy.bool))
    if voxelspacing is not None:
        voxelspacing = _ni_support._normalize_sequence(voxelspacing, result.ndim)
        voxelspacing = numpy.asarray(voxelspacing, dtype=numpy.float64)
        if not voxelspacing.flags.contiguous:
            voxelspacing = voxelspacing.copy()
            
    # binary structure
    footprint = generate_binary_structure(result.ndim, connectivity)
    
    # test for emptiness
    if 0 == numpy.count_nonzero(result): 
        raise RuntimeError('The first supplied array does not contain any binary object.')
    if 0 == numpy.count_nonzero(reference): 
        raise RuntimeError('The second supplied array does not contain any binary object.')    
            
    # extract only 1-pixel border line of objects
    result_border = result - binary_erosion(result, structure=footprint, iterations=1)
    reference_border = reference - binary_erosion(reference, structure=footprint, iterations=1)
    
    # compute average surface distance        
    # Note: scipys distance transform is calculated only inside the borders of the
    #       foreground objects, therefore the input has to be reversed
    dt = distance_transform_edt(~reference_border, sampling=voxelspacing)
    sds = dt[result_border]
    
    return sds
    def split_object(self, labeled_image):
        """ split object when it's necessary
        """
        
        labeled_image = labeled_image.astype(np.uint16)

        labeled_mask = np.zeros_like(labeled_image, dtype=np.uint16)
        labeled_mask[labeled_image != 0] = 1

        #ift structuring element about center point. This only affects eccentric structuring elements (i.e. selem with even num===============================
       
        labeled_image = skr.median(labeled_image, skm.disk(4))
        labeled_mask = np.zeros_like(labeled_image, dtype=np.uint16)
        labeled_mask[labeled_image != 0] = 1
        distance = scipym.distance_transform_edt(labeled_image).astype(np.uint16)
        #=======================================================================
        # binary = np.zeros(np.shape(labeled_image))
        # binary[labeled_image > 0] = 1
        #=======================================================================
        distance = skr.mean(distance, skm.disk(15))
         
        l_max = skr.maximum(distance, skm.disk(5))
        #l_max = skf.peak_local_max(distance, indices=False,labels=labeled_image, footprint=np.ones((3,3)))
        l_max = l_max - distance <= 0
        
        l_max = skr.maximum(l_max.astype(np.uint8), skm.disk(6))
        
       
        
        marker = ndimage.label(l_max)[0]
        split_image = skm.watershed(-distance, marker)
        
        split_image[split_image[0,0] == split_image] = 0
        
        return split_image
 def run(self, workspace):
     labeled_nuclei = workspace.object_set.get_objects(self.primary_objects.value).get_segmented()
     cell_image = workspace.image_set.get_image(self.image_name.value).pixel_data[:,:]
     image_collection = []
     cell_treshold = otsu(cell_image, min_threshold=0, max_threshold=1)
     
     cell_binary = (cell_image >= cell_treshold)
     cell_distance = scipym.distance_transform_edt(cell_binary).astype(np.uint16)
     cell_labeled = skm.watershed(-cell_distance, labeled_nuclei, mask=cell_binary)
     
      
     #
     #fil hall and filter on syze the object in cell_labeled
     #
     cell_labeled = self.filter_on_border(cell_labeled)
     cell_labeled = fill_labeled_holes(cell_labeled)
 
     objects = cellprofiler.objects.Objects()
     objects.segmented = cell_labeled
     objects.parent_image = cell_image
     
     workspace.object_set.add_objects(objects, self.object_name.value)        
     image_collection.append((cell_image, "Original"))
     image_collection.append((cell_labeled, "Labelized image"))
     workspace.display_data.image_collection = image_collection
Пример #12
0
def _make_costgrid(mask, ext, z):
    """Computes a costgrid following Kienholz et al. (2014) Eq. (2)

    Parameters
    ----------
    mask : numpy.array
        The glacier mask.
    ext : numpy.array
        The glacier boundaries' mask.
    z : numpy.array
        The terrain height.

    Returns
    -------
    numpy.array of the costgrid
    """

    dis = np.where(mask, distance_transform_edt(mask), np.NaN)
    z = np.where(mask, z, np.NaN)

    dmax = np.nanmax(dis)
    zmax = np.nanmax(z)
    zmin = np.nanmin(z)
    cost = ((dmax - dis) / dmax * cfg.PARAMS['f1']) ** cfg.PARAMS['a'] + \
           ((z - zmin) / (zmax - zmin) * cfg.PARAMS['f2']) ** cfg.PARAMS['b']

    # This is new: we make the cost to go over boundaries
    # arbitrary high to avoid the lines to jump over adjacent boundaries
    cost[np.where(ext)] = np.nanmax(cost[np.where(ext)]) * 50

    return np.where(mask, cost, np.Inf)
Пример #13
0
def medial(image):
    """Creates a medial axis transform image

    Args
    ----
    image: ndarray
        The image of which the medial axis is to be calculated.
        This should be a binary image.
    visualise: bool
        Option to visualise the medial axis transform.

    Returns
    -------
    out: ndarray
        A boolean image with the medial axis pixels
    """
    # Remove noise and make sure it's thresholded
    im = thresholds(image)

    # Calculate distance to all border pixels for each non-border pixel
    dist = distance_transform_edt(im)

    # Calculate laplacian
    lap = laplace(dist, mode="constant")

    # Select items that are maximums and therefore less than 0
    out = lap < 0

    # Mask all items that are outside the boundaries 
    # using the original image
    resultImage = np.logical_and(np.logical_not(out), im )

    return dist, lap, resultImage
Пример #14
0
	def predict(self, times, frames, output_times):
		nout = len(output_times)
		nf = len(frames)

		last_idx = np.argmax(times)
		zt = frames[last_idx] < self.thresh
		skmorph.remove_small_objects(zt, min_size=self.min_size, in_place=True)
		d = morphology.distance_transform_edt(np.invert(zt))
		return np.tile(d, (nout,1,1))
Пример #15
0
 def fun(batch):
     # Invert batch
     batch = 1. - batch
     # Merge batch and channel dimensions
     bshape = batch.shape
     batch = batch.reshape((bshape[0] * bshape[1], bshape[2], bshape[3]))
     # Distance transform by channel
     transbatch = np.array([np.exp(-gain * distance_transform_edt(img)) for img in batch])
     # Reshape batch and return
     return transbatch.reshape(bshape)
Пример #16
0
def _extract_mask_distance(image, mask = slice(None), voxelspacing = None):
    """
    Internal, single-image version of `mask_distance`.
    """
    if isinstance(mask, slice):
        mask = numpy.ones(image.shape, numpy.bool)
    
    distance_map = distance_transform_edt(mask, sampling=voxelspacing)
    
    return _extract_intensities(distance_map, mask)
Пример #17
0
def conspicuity_int_hist(im,
                         mask=None,
                         use_sigmoid=False,
                         morph_proc=True,
                         type='hypo',
                         a=3):
    if mask is None:
        mask = np.ones_like(im)

    class1, rv = tools.dominant_class(im, roi=mask)
    field = skimor.binary_closing(class1, selem=skimor.disk(3))
    field = skimor.binary_opening(field, selem=skimor.disk(3))
    field = (1 - field) * mask
    dist = scindimor.distance_transform_edt(field, return_distances=True)
    diff = abs(im - rv.mean())
    im_int = dist * diff

    # plt.figure()
    # i = cv2.resize(255*(class1 > 0).astype(np.uint8), (115, 215))
    # plt.imshow(i, 'gray'), plt.axis('off')
    # plt.show()

    # plt.figure()
    # plt.subplot(141), plt.imshow(cv2.resize(im, (115, 215)), 'gray'), plt.axis('off')
    # plt.subplot(142), plt.imshow(cv2.resize(dist, (115, 215)), 'jet'), plt.axis('off')
    # plt.subplot(143), plt.imshow(cv2.resize(diff, (115, 215)), 'jet'), plt.axis('off')
    # plt.subplot(144), plt.imshow(cv2.resize(im_int, (115, 215)), 'jet'), plt.axis('off')
    # plt.show()

    # im_int = skiexp.rescale_intensity(1 - rv.pdf(im), out_range=(0, 1))
    # plt.figure()
    # plt.subplot(121), plt.imshow(im_int, 'gray')
    # plt.subplot(122), plt.imshow(im_int2, 'gray'), plt.colorbar()
    # plt.show()
    mean_v = rv.mean()

    # plt.figure()
    # plt.subplot(141), plt.imshow(im, 'gray'), plt.title('input')
    # plt.subplot(142), plt.imshow(class1, 'gray'), plt.title('class1')
    # plt.subplot(143), plt.imshow(field, 'gray'), plt.title('field')
    # plt.subplot(144), plt.imshow(im_int, 'gray'), plt.title('dist')
    # plt.show()

    im_res = conspicuity_processing(im_int,
                                    mask,
                                    use_sigmoid=use_sigmoid,
                                    a=a,
                                    c=mean_v,
                                    sigm_t=0.2,
                                    use_morph=morph_proc,
                                    radius=3)

    return im_res
Пример #18
0
def surface_dist(input1, input2, sampling=1, connectivity=1):
    input1 = np.squeeze(input1)
    input2 = np.squeeze(input2)

    input_1 = np.atleast_1d(input1.astype(np.bool))
    input_2 = np.atleast_1d(input2.astype(np.bool))

    conn = morphology.generate_binary_structure(input_1.ndim, connectivity)

    ##    S = input_1 - morphology.binary_erosion(input_1, conn)
    ##    Sprime = input_2 - morphology.binary_erosion(input_2, conn)

    S = np.bitwise_xor(input_1, morphology.binary_erosion(input_1, conn))
    Sprime = np.bitwise_xor(input_2, morphology.binary_erosion(input_2, conn))

    dta = morphology.distance_transform_edt(~S, sampling)
    dtb = morphology.distance_transform_edt(~Sprime, sampling)

    sds = np.concatenate([np.ravel(dta[Sprime != 0]), np.ravel(dtb[S != 0])])

    return sds
Пример #19
0
def compute_surface_distance_per_label(y_1, y_2, sampling=1, connectivity=1):

    y1 = np.atleast_1d(y_1.astype(np.bool))
    y2 = np.atleast_1d(y_2.astype(np.bool))

    conn = morphology.generate_binary_structure(y1.ndim, connectivity)

    S1 = y1.astype(np.float32) - morphology.binary_erosion(y1, conn).astype(
        np.float32)
    S2 = y2.astype(np.float32) - morphology.binary_erosion(y2, conn).astype(
        np.float32)

    S1 = S1.astype(np.bool)
    S2 = S2.astype(np.bool)

    dta = morphology.distance_transform_edt(~S1, sampling)
    dtb = morphology.distance_transform_edt(~S2, sampling)

    sds = np.concatenate([np.ravel(dta[S2 != 0]), np.ravel(dtb[S1 != 0])])

    return sds
Пример #20
0
def get_mask_and_dist(mask_paths):
    mask_paths = list(mask_paths)

    sample = np.array(Image.open(mask_paths[0]))
    mask = np.zeros_like(sample)
    dist = np.full(sample.shape, np.inf)

    for mask_path in mask_paths:
        mask_ = np.array(Image.open(mask_path))
        mask_ = np.where(mask_, 1, 0)

        dist_ = distance_transform_edt(mask_) + distance_transform_edt(1 - mask_)
        dist = np.dstack((dist, dist_))
        dist = np.amin(dist, axis=2)
        
        mask_d = binary_dilation(mask_, structure=generate_binary_structure(2, 2), iterations=2).astype(mask_.dtype)
        mask_ = mask_d - mask_
        mask = np.logical_or(mask_, mask)
        mask = np.where(mask, 1, 0)

    return mask, dist
Пример #21
0
def spread_labels(labels, maxdist=9999999):
    """Spread the given labels to the background"""
    print("** spread_labels: labels=%s" % desc(labels))
    distances, features = morphology.distance_transform_edt(labels==0,
                                return_distances=1, return_indices=1)
    indexes = features[0]*labels.shape[1] + features[1]
    print("indexes=%s" % desc(indexes))
    spread = labels.ravel()[indexes.ravel()].reshape(*labels.shape)
    print("spread=%s" % desc(spread))
    spread *= (distances<maxdist)
    print("spread=%s" % desc(spread))
    return spread
Пример #22
0
def prox_job(pa):
    pa_thres = 0.5    
    mask = pa > pa_thres
    t = time.time()
    dt= distance_transform_edt(1-mask)
    m = np.mean(dt)
    m = 50
    temp_mask = (dt > m)
    dt[np.logical_not(temp_mask)] /= float(m)
    dt[temp_mask] = 1
        
    return dt.flatten() / np.max(dt)
Пример #23
0
def GenerateObstacleCostF(OBST, epsilon):
    obs_cost = np.zeros((N, N))
    for i in range(OBST.shape[0]):
        t = np.ones((N, N))
        t[OBST[i, 0], OBST[i, 1]] = 0
        t_cost = distance_transform_edt(t)
        t_cost[t_cost > epsilon[i]] = epsilon[i]
        t_cost = 1 / (2 * epsilon[i]) * (t_cost - epsilon[i])**2
        obs_cost = obs_cost + t_cost

    gx, gy = np.gradient(obs_cost)
    return obs_cost, gx, gy
Пример #24
0
def calc_center_distance(laminD, aspect, asPercentile=False):
    '''Calculate center distance. Center is by default defined as the voxel(s)
    with the highest lamin distance, otherwise as the top percentile.

    Args:
        mask (np.ndarray): binary image.
        aspect (tuple[float]): pixel/voxel dimension proportion.
        asPercentile (bool): define center as percentile.
    '''

    # Center as top percentile
    if asPercentile:
        centrD = distance_transform_edt(laminD < np.percentile(laminD, 99.),
                                        aspect[3 - len(laminD.shape):])

    # Center as top value
    else:
        centrD = distance_transform_edt(laminD != laminD.max(),
                                        aspect[3 - len(laminD.shape):])

    return (centrD)
Пример #25
0
def get_distance_transform(img, center):
    img = img.astype(float)
    H, W = img.shape[-2:]
    edges = 1.0 - canny(img.squeeze())
    dt = distance_transform_edt(edges, 0.8)
    dt_original = np.expand_dims(dt.copy(), 0)
    # r, c = int(center[0]), int(center[1])
    # dt[dt > dt[r, c]] = dt[r, c]
    # dt = (dt - np.min(dt)) / (np.max(dt) - np.min(dt))
    dt = np.expand_dims(dt, 0)
    dt_original /= np.sqrt(H**2 + W**2)
    return dt, dt_original
Пример #26
0
    def initiation_probabilities(self):
        """Return probability matrix based on distances to nearest fracture"""
        # Compute distance matrix
        d = distance_transform_edt(1 - self.grid)

        # Raise to the power of gamma
        d[d != 0] = np.power(d[d != 0], self.gamma)

        # Normalize by Z = \sum_ij d_ij
        d /= np.sum(d)

        return d
Пример #27
0
def Pc(Yor, Y_hator, tetha=5, c=1.0):
    Y = np.copy(Yor)
    Y_hat = np.copy(Y_hator)
    Y[Y != c] = 0
    Y[Y == c] = 1.0
    Y_hat[Y_hat != c] = 0
    Y_hat[Y_hat == c] = 1.0
    Bgt = Y - binary_erosion(Y, structure=np.ones((3, 3)))
    Bps = Y_hat - binary_erosion(Y_hat, structure=np.ones((3, 3)))
    D = distance_transform_edt(1 - Bgt)
    D_Bpd = D[Bps == 1.0]
    return np.sum(D_Bpd < tetha) / (float(np.sum(Bps == 1.0)) + 1)
Пример #28
0
def make_distance_map(image_shape, centres):

    # Distance transform
    f = np.ones(image_shape, bool)
    for idx in range(centres.shape[1]):
        x = int(min(max(0, centres[0, idx]), image_shape[0] - 1))
        y = int(min(max(0, centres[1, idx]), image_shape[1] - 1))

        f[x, y] = False

    distance_map = distance_transform_edt(f)
    return distance_map
Пример #29
0
def build_binary_structure(connectivity, n_dims, shape=None):
    """Return a dilation/erosion element with provided connectivity"""
    if shape is None:
        shape = [connectivity * 2 + 1] * n_dims
    else:
        shape = reformat_to_list(shape, length=n_dims)
    dist = np.ones(shape)
    center = tuple([tuple([int(s / 2)]) for s in shape])
    dist[center] = 0
    dist = distance_transform_edt(dist)
    struct = (dist <= connectivity) * 1
    return struct
Пример #30
0
def signedDistanceField2D_(ground_truth_map, cell_size):
    # regularize unknow area to open area
    map = (ground_truth_map > 0.75)
    # inverse map
    inv_map = 1 - map

    # get signed distance from map and inverse map
    map_dist = distance_transform_edt(inv_map)  # bwdist(map);
    inv_map_dist = distance_transform_edt(map)  # bwdist(inv_map);

    field = map_dist - inv_map_dist

    # metric
    field = field * cell_size
    # field = float(field);

    # limit inf
    if math.isinf(field[0, 0]):
        field = np.ones(field.shape) * 1000

    return field
 def fun(batch):
     # Invert batch
     batch = 1. - batch
     # Merge batch and channel dimensions
     bshape = batch.shape
     batch = batch.reshape(
         (bshape[0] * bshape[1], bshape[2], bshape[3]))
     # Distance transform by channel
     transbatch = np.array(
         [np.exp(-gain * distance_transform_edt(img)) for img in batch])
     # Reshape batch and return
     return transbatch.reshape(bshape)
Пример #32
0
 def _ws_slice(self, z, input_, output):
     thresholded = input_[z] < self.threshold_dt
     dt = distance_transform_edt(thresholded).astype('float32')
     if self.sigma_seeds > 0.:
         dt = vigra.filters.gaussianSmoothing(dt, self.sigma_seeds)
     seeds = vigra.analysis.localMaxima(dt, allowPlateaus=True, allowAtBorder=True, marker=np.nan)
     seeds = vigra.analysis.labelImageWithBackground(np.isnan(seeds).view('uint8'))
     ws, max_id = vigra.analysis.watershedsNew(input_[z], seeds=seeds)
     if self.size_filter > 0:
         ws, max_id = filter_by_size(input_[z], ws, self.size_filter)
     output[z] = ws.astype('uint64')
     return max_id
Пример #33
0
    def flat_label_to_edge_label(label, ):
        """
        Converts a segmentation label (H,W) to a binary edgemap (H,W)
        """
        radius = 2

        one_hot_basis = np.eye(cityscapes.N_CLASSES)
        one_hot = one_hot_basis[label]

        one_hot_pad = np.pad(one_hot, ((1, 1), (1, 1), (0, 0)), mode='constant', constant_values=0)
        edgemap = np.zeros(one_hot.shape[:-1])

        for i in range(cityscapes.N_CLASSES):
            dist = distance_transform_edt(one_hot_pad[..., i]) + \
                   distance_transform_edt(1.0 - one_hot_pad[..., i])
            dist = dist[1:-1, 1:-1]
            dist[dist > radius] = 0
            edgemap += dist
        edgemap = np.expand_dims(edgemap, axis=-1)
        edgemap = (edgemap > 0).astype(np.uint8)
        return edgemap
Пример #34
0
    def provide(self, request):

        voxel_size = self.spec[self.raw].voxel_size
        shape = gp.Coordinate((1, ) + request[self.raw].roi.get_shape())

        noise = np.abs(np.random.randn(*shape))
        smoothed_noise = gaussian_filter(noise, sigma=self.smoothness)

        seeds = np.zeros(shape, dtype=int)
        for i in range(self.n_objects):
            if i == 0:
                num_points = 100
            else:
                num_points = self.points_per_skeleton
            points = np.stack([
                np.random.randint(0, shape[dim], num_points)
                for dim in range(3)
            ],
                              axis=1)
            tree = skelerator.Tree(points)
            skeleton = skelerator.Skeleton(tree, [1, 1, 1],
                                           "linear",
                                           generate_graph=False)
            seeds = skeleton.draw(seeds, np.array([0, 0, 0]), i + 1)

        seeds[maximum_filter(seeds, size=4) != seeds] = 0
        seeds_dt = distance_transform_edt(seeds == 0) + 5. * smoothed_noise
        gt_data = cwatershed(seeds_dt, seeds).astype(np.uint64)[0] - 1

        labels = np.unique(gt_data)

        raw_data = np.zeros_like(gt_data, dtype=np.uint8)
        value = 0
        for label in labels:
            raw_data[gt_data == label] = value
            value += 255.0 / self.n_objects

        spec = request[self.raw].copy()
        spec.voxel_size = (1, 1)
        raw = gp.Array(raw_data, spec)

        spec = request[self.gt].copy()
        spec.voxel_size = (1, 1)
        gt_crop = (request[self.gt].roi -
                   request[self.raw].roi.get_begin()) / voxel_size
        gt_crop = gt_crop.to_slices()
        gt = gp.Array(gt_data[gt_crop], spec)

        batch = gp.Batch()
        batch[self.raw] = raw
        batch[self.gt] = gt

        return batch
Пример #35
0
def calculate_surface_distances(array_a, array_b, spacing, connectivity=1):
    """
        reference url: https://mlnotebook.github.io/post/surface-distance-function/
    """
    array_a = np.atleast_1d(array_a.astype(np.bool))
    array_b = np.atleast_1d(array_b.astype(np.bool))

    conn = morphology.generate_binary_structure(array_a.ndim, connectivity)

    S = array_a ^ morphology.binary_erosion(array_a, conn)
    Sprime = array_b ^ morphology.binary_erosion(array_b, conn)

    distance_atob = morphology.distance_transform_edt(~S, spacing)
    distance_btoa = morphology.distance_transform_edt(~Sprime, spacing)

    total_surface_distances = np.concatenate([
        np.ravel(distance_atob[Sprime != 0]),
        np.ravel(distance_btoa[S != 0])
    ])

    return total_surface_distances
Пример #36
0
def create_prepost_dt(
    clefts,
    labels,
    voxel_size,
    cleft_to_presyn_neuron_id,
    cleft_to_postsyn_neuron_id,
    bg_value=0,
    include_cleft=True,
    normalize_mode="tanh",
    normalize_args=50,
):
    max_distance = min(dim * vs for dim, vs in zip(clefts.shape, voxel_size))
    presyn_distances = -np.ones(clefts.shape, dtype=np.float) * max_distance
    postsyn_distances = -np.ones(clefts.shape, dtype=np.float) * max_distance

    contained_cleft_ids = np.unique(clefts)
    for cleft_id in contained_cleft_ids:
        if cleft_id != bg_value:
            d = -distance_transform_edt(clefts != cleft_id,
                                        sampling=voxel_size)
            try:
                pre_neuron_id = np.array(
                    list(cleft_to_presyn_neuron_id[cleft_id]))
                pre_mask = np.any(labels[..., None] == pre_neuron_id[None,
                                                                     ...],
                                  axis=-1)
                if include_cleft:
                    pre_mask = np.any([pre_mask, clefts == cleft_id], axis=0)
                presyn_distances[pre_mask] = np.max((presyn_distances, d),
                                                    axis=0)[pre_mask]
            except KeyError:
                warnings.warn("No Key in Pre Dict %s" % str(cleft_id))
            try:
                post_neuron_id = np.array(
                    list(cleft_to_postsyn_neuron_id[cleft_id]))
                post_mask = np.any(labels[..., None] == post_neuron_id[None,
                                                                       ...],
                                   axis=-1)
                if include_cleft:
                    post_mask = np.any([post_mask, clefts == cleft_id], axis=0)
                postsyn_distances[post_mask] = np.max((postsyn_distances, d),
                                                      axis=0)[post_mask]
            except KeyError:
                warnings.warn("No Key in Post Dict %s" % str(cleft_id))
    if normalize is not None:
        presyn_distances = normalize(presyn_distances,
                                     normalize_mode,
                                     normalize_args=normalize_args)
        postsyn_distances = normalize(postsyn_distances,
                                      normalize_mode,
                                      normalize_args=normalize_args)
        return presyn_distances, postsyn_distances
Пример #37
0
    def make_buffer(self, output_file):
        """
        Create a buffer raster from 0 to 1 over a set distance.

        Args:
            output_file: where to save this raster
        """

        # make a raster of the same extent, but with
        # square resolution which is dependant on distance buffer

        llc = self.extent[1]
        urc = self.extent[3]
        if self.over is None:
            transform = self.raster.ds.GetGeoTransform()
            dx = [transform[1], -transform[5]]
            nrows = self.raster.ds.RasterXSize
            ncols = self.raster.ds.RasterYSize
        else:
            dx = [self.distance / self.over, self.distance / self.over]
            # note this changes our extent if "over" is set
            nrows = int(ceil((urc[0] - llc[0]) / dx[0]))
            ncols = int(ceil((urc[1] - llc[1]) / dx[1]))

        # fill with edge value
        dist = np.full((ncols, nrows), 0.0)
        # then fill in the middle,
        # except where there is no data
        dist[1:-1, 1:-1] = 1
        if self.over is None:
            orig_raster = self.raster.get_array()
            nodata = self.raster.nodata
            # TODO this will only work if dist and orig_raster
            # are the same size
            dist[orig_raster == nodata] = 0
            # they also be nan...(shouldn't as we swap it out...)
            dist[np.isnan(orig_raster)] = 0
            # we now extend this mask - we only need to do this, if the
            # no data occurs (i.e. no contiguous data)
            if (nodata in orig_raster):
                mask = np.full((ncols, nrows), False)
                mask[dist == 0] = True
                mask = self.extend_mask(mask, 1)
                dist[mask] = 0
        # calc euclidian distance and convert to 0 -> 1 scale
        dist = distance_transform_edt(dist, sampling=[dx[0], dx[1]])
        dist = dist / self.distance
        dist[dist > 1] = 1.0

        # create a suitable output filename
        self.__write_raster__(output_file, np.flipud(dist), dx, [llc, urc],
                              self.raster.ds.GetProjection())
    def validdatagenerator(self):
        while True:
            # Randomize the indices to make an array
            indices_arr = np.random.permutation(len(self.valid_pat))

            for batch in range(0, len(indices_arr), self.batch_size):
                # slice out the current batch according to batch-size
                current_batch = indices_arr[batch:(batch + self.batch_size)]
                # initializing the arrays, x_train and y_train
                n = 0
                x_valid = np.ndarray((self.batch_size, self.img_rows,
                                      self.img_cols, self.img_slcs, 1),
                                     dtype=np.float32)
                y_valid = np.ndarray((self.batch_size, self.img_rows,
                                      self.img_cols, self.img_slcs, 1),
                                     dtype=np.float32)
                for i in current_batch:
                    x1 = self.valid_pat[i]
                    t1 = self.valid_sub_pat[i]
                    xof1 = self.x_centvalid[i] - 80
                    xof2 = self.x_centvalid[i] + 80
                    yof1 = self.y_centvalid[i] - 80
                    yof2 = self.y_centvalid[i] + 80
                    if yof2 > 512:
                        yof1 = 352
                        yof2 = 512
                    valid_img_in = np.load(
                        os.path.join(self.data_dir, "CT_{}_{}.npy".format(
                            x1, t1)))[xof1:xof2, yof1:yof2,
                                      self.z_startvalid[i]:self.z_endvalid[i]]
                    valid_img_roi = np.load(
                        os.path.join(
                            self.data_dir, "ROI_{}_{}_{}.npy".format(
                                x1, t1, self.roi_interest)))[
                                    xof1:xof2, yof1:yof2,
                                    self.z_startvalid[i]:self.z_endvalid[i]]

                    valid_img_in_equalized = np.zeros(valid_img_in.shape)
                    inverse_image = np.ones(valid_img_roi.shape)
                    for r in range(valid_img_in.shape[2]):
                        imagenew = valid_img_in[:, :, r]
                        valid_img_in_equalized[:, :,
                                               r] = image_histogram_equalization(
                                                   imagenew)[0]
                    valid_img_dist = distance_transform_edt(
                        inverse_image - valid_img_roi.astype('float32'))
                    # Appending them to existing batch
                    x_valid[n:(n + 1), :, :, :,
                            0] = valid_img_in_equalized / 255
                    y_valid[n:(n + 1), :, :, :, 0] = valid_img_roi
                    n += 1
                yield (x_valid, y_valid)
Пример #39
0
 def generate_trimap(self, alpha):
     # alpha \in [0, 1] should be taken into account
     # be careful when dealing with regions of alpha=0 and alpha=1
     fg = np.array(np.equal(alpha, 255).astype(np.float32))
     unknown = np.array(np.not_equal(alpha, 0).astype(
         np.float32))  # unknown = alpha > 0
     unknown = unknown - fg
     # image dilation implemented by Euclidean distance transform
     unknown = morphology.distance_transform_edt(
         unknown == 0) <= np.random.randint(1, 20)
     trimap = fg * 255
     trimap[unknown] = 128
     return trimap.astype(np.uint8)
Пример #40
0
def prox_job2(pa):
    pa_thres = 0.5    
    mask = pa > pa_thres
    t = time.time()
    dt,ind= distance_transform_edt(1-mask,  return_indices=True)
    inds = ind.T.reshape(-1, 3)
    m = np.mean(dt)
    m = 50
    temp_mask = (dt > m)
    dt[np.logical_not(temp_mask)] /= float(m)
    dt[temp_mask] = 1
    
    return dt.flatten() / np.max(dt)
Пример #41
0
def make_distance_map(image_shape,
                      centres):

    # Distance transform
    f = np.ones(image_shape, bool)
    for idx in xrange(centres.shape[1]):
        x = int(min(max(0, centres[0, idx]), image_shape[0] - 1))
        y = int(min(max(0, centres[1, idx]), image_shape[1] - 1))

        f[x, y] = False

    distance_map = distance_transform_edt(f)
    return distance_map
Пример #42
0
def generate_boundary(path, image_name, mode, num_classes, ignore_label, total_num):
    # create the output filename
    dst = f"../boundary/{mode}/{image_name}"
    # do the conversion
    semantic_image = cv2.imread(f"{path}/{image_name}", cv2.IMREAD_GRAYSCALE)
    onehot_image = np.array([semantic_image == i for i in range(num_classes)]).astype(np.uint8)
    # change the ignored label to 0
    onehot_image[onehot_image == ignore_label] = 0
    boundary_image = np.zeros(onehot_image.shape[1:])
    # for boundary conditions
    onehot_image = np.pad(onehot_image, ((0, 0), (1, 1), (1, 1)), mode='constant', constant_values=0)
    for i in range(num_classes):
        dist = distance_transform_edt(onehot_image[i, :]) + distance_transform_edt(1.0 - onehot_image[i, :])
        dist = dist[1:-1, 1:-1]
        dist[dist > 2] = 0
        boundary_image += dist
    boundary_image = (boundary_image > 0).astype(np.uint8)
    cv2.imwrite(dst, boundary_image)
    global boundary_progress
    boundary_progress += 1
    print("\rProgress: {:>3} %".format(boundary_progress * 100 / total_num), end=' ')
    sys.stdout.flush()
Пример #43
0
def onehot_to_binary_edges(mask, radius, num_classes):
    """
    Converts a segmentation mask (K,H,W) to a binary edgemap (H,W)

    """

    if radius < 0:
        return mask

    # We need to pad the borders for boundary conditions
    mask_pad = np.pad(mask, ((0, 0), (1, 1), (1, 1)), mode='constant', constant_values=0)

    edgemap = np.zeros(mask.shape[1:])
    for i in range(num_classes):
        # ti qu lun kuo
        dist = distance_transform_edt(mask_pad[i, :]) + distance_transform_edt(1.0 - mask_pad[i, :])
        dist = dist[1:-1, 1:-1]
        dist[dist > radius] = 0
        edgemap += dist
    # edgemap = np.expand_dims(edgemap, axis=0)
    edgemap = (edgemap > 0).astype(np.uint8)*255
    return edgemap
Пример #44
0
def get_weights(labels_batch):
    weights = np.array([])
    labels_batch_numpy = labels_batch.numpy()
    n = labels_batch_numpy.shape[0]
    labels_batch_numpy = labels_batch_numpy.astype('uint8')
    for i in range(n):
        label = labels_batch_numpy[i][0]
        trnsf = distance_transform_edt(label)
        trnsf = ((np.abs((trnsf.max() - trnsf))/trnsf.max())*(label)+1)
        trnsf = trnsf.flatten()
        weights = np.concatenate((weights, trnsf))
    weights = torch.from_numpy(weights)
    return weights
Пример #45
0
def onehot_to_multiclass_edges(mask, radius, num_classes):
    """
    Converts a segmentation mask (K,H,W) to an edgemap (K,H,W)
    """
    if radius < 0:
        return mask

    # We need to pad the borders for boundary conditions
    mask_pad = np.pad(mask, ((0, 0), (1, 1), (1, 1)),
                      mode='constant',
                      constant_values=0)

    channels = []
    for i in range(num_classes):
        dist = distance_transform_edt(
            mask_pad[i, :]) + distance_transform_edt(1.0 - mask_pad[i, :])
        dist = dist[1:-1, 1:-1]
        dist[dist > radius] = 0
        dist = (dist > 0).astype(np.uint8)
        channels.append(dist)

    return np.array(channels)
Пример #46
0
def generate_sd_field(track_wall_pixels, contained_pixel_position):
    track_pixels = flood_fill(track_wall_pixels, contained_pixel_position, 1, connectivity=1) # fill in the pixels between the track walls

    occupied_pixels = np.where(track_wall_pixels == 1, 0, 1) # prepare pixels for distance transform by making occupied pixels 0 and uoccupied pixels 1
    d_field = np.array(distance_transform_edt(occupied_pixels))

    # negate distances outside of the track walls
    for y, row in enumerate(d_field):
        for x, distance in enumerate(row):
            if track_pixels[y][x] == 0:
                d_field[y][x] = -distance

    return d_field
Пример #47
0
    def process(self, batch, request: BatchRequest):
        labels = batch[self.label_array_key].data
        spec = batch[self.label_array_key].spec.copy()
        spec.dtype = self.dtype

        binarized = labels != 0
        dt = -distance_transform_edt(np.logical_not(binarized),
                                     sampling=spec.voxel_size).astype(
                                         self.dtype)

        expanded = Array(data=dt, spec=spec)

        batch.arrays[self.distance_array_key] = expanded
Пример #48
0
 def regression_label(self):
     if self.__regression_label is None:
         # reg_oriental_label = np.zeros(self.size+[self.divisions], dtype=np.float)
         angle = np.zeros(self.size+[self.divisions]).reshape(-1, self.divisions)
         reg_distance_label = np.zeros(self.size+[2], dtype=np.float)
         orient = np.zeros(len(self.lines))
         dist_pre_line = np.zeros([len(self.lines)]+self.size)
         mask = self.mask()
         for idx, l in enumerate(self.lines):
             dist_pre_line[idx] = distance_transform_edt(mask != (idx+1))
             orient[idx] = l.angle()
         _, [indicesY, indicesX] = distance_transform_edt(mask==0, return_indices=True)
         dx = indicesX - np.tile(range(self.size[1]), (self.size[0], 1))
         dy = indicesY - np.tile(range(self.size[0]), (self.size[1], 1)).transpose()
         theta = orient[np.argmin(dist_pre_line, 0).reshape(-1)] # [H*W]
         angle[:] = [-np.pi/2 + k*np.pi / self.divisions for k in range(self.divisions)]
         d_theta = theta - angle.transpose()
         reg_oriental_label = d_theta.reshape([-1]+self.size).transpose()
         reg_distance_label[:,:,1] = dx
         reg_distance_label[:,:,0] = dy
         self.__regression_label = [reg_distance_label, reg_oriental_label]
     return self.__regression_label
def create_bad_labels():
    from scipy.ndimage.morphology import distance_transform_edt
    thresholds = [100, 150, 175, 180, 190, 200, 220, 250]
    l = []
    for threshold in thresholds:
        x = numpy.ones((480, 854))
        dt = distance_transform_edt(x)
        z = numpy.zeros((480, 854))
        z[:] = 255
        negatives = dt > threshold
        z[negatives] = 0
        l.append(numpy.expand_dims(z, axis=2))
    return l
Пример #50
0
def kernel(mask,smooth=5.,x=None,y=None,sigma=None):
    K = distance_transform_edt(mask, sampling=None, return_distances=True, return_indices=False, distances=None, indices=None)
    K = K > (smooth/5.)
    K = gaussian_filter(numpy.array(K,dtype=numpy.float64),smooth)
    if x is not None and y is not None and sigma is not None: 
        # multiply by gaussian darkfield kernel
        Ny, Nx = M.shape
        cx = (Nx-1)/2
        cy = (Ny-1)/2
        X,Y = numpy.ogrid[0:Ny, 0:Nx]
        R = numpy.hypot(X - (cx + x), Y - (cx + y))
        G = numpy.exp(-R**2/(2*sigma**2))
        K = K*G
    return K
Пример #51
0
	def write_vol_file(self,):


		next_seg = io.loadmat("%s/segs_all_time_stamps/timestamp_%d_label_map.mat" % (self.ws_data.workspace_location, self.time_point + self.direction))["ws"]
		current_vol = io.loadmat("%s/init_watershed_all_time_stamps/vol_t_%d.mat"  % (self.ws_data.workspace_location, self.time_point))["vol"]

		from scipy.ndimage.morphology import distance_transform_edt

		dt = distance_transform_edt(next_seg>1)
		dt = 1 - dt / float(dt.max())
		dt = dt / 2. + 0.5
		current_vol = current_vol * dt

		io.savemat("%s/init_watershed_all_time_stamps/vol_t_%d_propagate.mat"% (self.ws_data.workspace_location, self.time_point), {"vol":current_vol})
def get_surround_volume_v3(volume, distance=5, wall_level=0, prob=False, return_origin_instead_of_bbox=True, padding=5):
    """
    Return the volume with voxels surrounding the ``active" voxels in the input volume set to 1 (prob=False) or 1 - vol (prob=True)

    Args:
        volume: (vol, origin)
        wall_level (float): voxels with value above this level are regarded as active.
        distance (int): surrounding voxels are closer than distance (in unit of voxel) from any active voxels.
        prob (bool): if True, surround voxels are assigned 1 - voxel value; if False, surround voxels are assigned 1.
        padding (int): extra zero-padding, in unit of voxels.

    Returns:
        (surround_volume, surround_volume_origin)
    """
    from scipy.ndimage.morphology import distance_transform_edt
    distance = int(np.round(distance))

    # Identify the bounding box for the surrouding area.

    vol, origin = volume
    
    # if bbox is None:
    bbox = volume_origin_to_bbox(vol > wall_level, origin)

    xmin, xmax, ymin, ymax, zmin, zmax = bbox
    roi_xmin = xmin - distance - padding
    roi_ymin = ymin - distance - padding
    roi_zmin = zmin - distance - padding
    roi_xmax = xmax + distance + padding
    roi_ymax = ymax + distance + padding
    roi_zmax = zmax + distance + padding
    roi_bbox = np.array((roi_xmin,roi_xmax,roi_ymin,roi_ymax,roi_zmin,roi_zmax))
    vol_roi = crop_and_pad_volume(vol, in_bbox=bbox, out_bbox=roi_bbox)

    dist_vol = distance_transform_edt(vol_roi < wall_level)
    roi_surround_vol = (dist_vol > 0) & (dist_vol < distance) # surround part is True, otherwise False.

    if prob:
        surround_vol = np.zeros_like(vol_roi)
        surround_vol[roi_surround_vol] = 1. - vol_roi[roi_surround_vol]
        if return_origin_instead_of_bbox:
            return surround_vol, roi_bbox[[0,2,4]]
        else:
            return surround_vol, roi_bbox
    else:
        if return_origin_instead_of_bbox:
            return roi_surround_vol, roi_bbox[[0,2,4]]
        else:
            return roi_surround_vol, roi_bbox
Пример #53
0
 def apply_learning(self, smap, k, bmu, radius, rate):
     i,j = bmu
     if self.metric == 'RMSD':
         vector = self.align(self.inputvectors[k], smap[i,j])[0]
     else:
         vector = self.inputvectors[k]
     shape = (self.X, self.Y)
     if self.toricMap:
         bigshape = tuple(map(lambda x: 3*x, shape))
         midselect = tuple([ slice(s, 2*s) for s in shape ])
         features = numpy.ones(bigshape)
         copy_coord = lambda p, s: tuple([p+i*s for i in range(3)])
         all_coords = [ copy_coord(coord, s) for coord, s in zip(bmu, shape) ]
         for p in itertools.product(*all_coords):
             features[p] = 0
         distance = distance_transform_edt(features)[midselect]
     else:
         features = numpy.ones(shape)
         features[bmu] = 0
         distance = distance_transform_edt(features)
     #radmap = numpy.exp( -sqdistance / (2.*radius)**2 )
     radmap = rate * numpy.exp( - distance**2 / (2.*radius)**2 )
     adjmap = (smap - vector) * radmap[..., None]
     smap -= adjmap
Пример #54
0
    def get_seeds_using_class_1(self, class1):
        dist_data = np.where(class1 == 1, False, True)
        dist_data *= self.segmentation > 0
        dists = distance_transform_edt(dist_data)

        seeds = dists > 0.5 * dists.max()

        all_seeds = np.zeros(self.data3d.shape, dtype=np.int)
        # allSeeds[np.nonzero(self.segmentation)] = 80
        all_seeds[np.nonzero(class1)] = 1  # zdrava tkan
        all_seeds[np.nonzero(seeds)] = 2  # outliers
        # kvuli segmentaci pomoci random walkera se omezi obraz pouze na
        # segmentovana jatra a cevy
        all_seeds = np.where(self.segmentation == 0, -1, all_seeds)

        return all_seeds
Пример #55
0
def raw_data_to_maps(raw_data, raw_width, raw_height):
    cropped_map = np.flipud(np.reshape(np.matrix(raw_data, dtype=np.uint8), (raw_width, raw_height)))[map_origin_y:map_origin_y + map_height, map_origin_x:map_origin_x + map_width]
    walls = cropped_map.copy()

    cropped_map[cropped_map > 0] = 1
    walls = median_filter(walls, size=(median_filter_size / resolution))
    walls[walls == 0] = 255
    walls[walls == 100] = 0

    walls[walls == 1] = 0
    walls[walls == 157] = 0
    path = distance_transform_edt(1 - median_filter(cropped_map, size=median_filter_size / resolution))
    path[path * resolution > max_wall_distance] = 0
    path[path * resolution < min_wall_distance] = 0
    path[path > 0] = 0x0000FF

    return path, walls
Пример #56
0
def distmap_volume(B,perimeter_image=None):
    """Moberg & Sosik biovolume algorithm
    returns volume and representative transect"""
    if perimeter_image is None:
        perimeter_image = find_perimeter(B)
    D = distance_transform_edt(1-perimeter_image) + 1
    D = D * (B>0)
    Dm = np.ma.array(D,mask=1-B)
    # representative transect
    x = 4 * np.ma.mean(Dm) - 2
    # diamond correction
    c1 = x**2 / (x**2 + 2*x + 0.5)
    # circle correction
    # c2 = np.pi / 2 
    # volume = c1 * c2 * 2 * np.sum(D)
    volume = c1 * np.pi * np.sum(D)
    # return volume and representative transect
    return volume, x
Пример #57
0
def gauss_degrade(image,margin=1.0,change=None,noise=0.02,minmargin=0.5,inner=1.0):
    if image.ndim==3: image = mean(image,axis=2)
    m = mean([amin(image),amax(image)])
    image = 1*(image>m)
    if margin<minmargin: return 1.0*image
    pixels = sum(image)
    if change is not None:
        npixels = int((1.0+change)*pixels)
    else:
        edt = distance_transform_edt(image==0)
        npixels = sum(edt<=(margin+1e-4))
    r = int(max(1,2*margin+0.5))
    ri = int(margin+0.5-inner)
    if ri<=0: mask = binary_dilation(image,iterations=r)-image
    else: mask = binary_dilation(image,iterations=r)-binary_erosion(image,iterations=ri)
    image += mask*randn(*image.shape)*noise*min(1.0,margin**2)
    smoothed = gaussian_filter(1.0*image,margin)
    frac = max(0.0,min(1.0,npixels*1.0/prod(image.shape)))
    threshold = mquantiles(smoothed,prob=[1.0-frac])[0]
    result = (smoothed>threshold)
    return 1.0*result
Пример #58
0
def InitialGuess(y, I):
    # Initial Guess with weighted/missing data
    # nearest neighbor interpolation (in case of missing values)
    z = y
    if (1 - I).any():
        notI = ~I
        z, L = distance_transform_edt(notI,  return_indices=True)
        z[notI] = y[L.flat[notI]]

    # coarse fast smoothing using one-tenth of the DCT coefficients
    siz = z.shape
    d = z.ndim
    z = dctn(z)
    for k in range(d):
        z[int((siz[k] + 0.5) / 10) + 1::, ...] = 0
        z = z.reshape(np.roll(siz, -k))
        z = z.transpose(np.roll(range(z.ndim), -1))
        # z = shiftdim(z,1);
    z = idctn(z)

    return z
Пример #59
0
def all_gaps(c):
    """Computes a list of all the pairwise gaps between
    connected components of an image."""
    # c,n = morph.label(c>0)
    n = len(unique(c))-1
    if n<2: return []
    #imshow(where(c>0,c%3+1,0),cmap=cm.jet)
    dts = []
    for i,v in enumerate(unique(c)):
        if v==0: continue
        dt = morphology.distance_transform_edt(c!=v)
        dts.append(dt)
    result = []
    for i in range(len(dts)):
        for j in range(i+1,len(dts)):
            dt1,dt2 = dts[i],dts[j]
            dtm = minimum(dt1,dt2)
            mv = amin(dtm[(dt1<=dtm+1)&(dt2<=dtm+1)])
            result.append((i,j,mv))
            result.append((j,i,mv))
    return result
Пример #60
0
def kernel(mask,smooth=5.,x=None,y=None,sigma=None,mask_expand=1.,N_gaussians=1):
    if mask_expand > 0.:
        K = distance_transform_edt(mask, sampling=None, return_distances=True, return_indices=False, distances=None, indices=None)
        K = K > mask_expand
        K = numpy.array(K, dtype=numpy.float64)
    else:
        K = numpy.array(mask, dtype=numpy.float64)
    K = gaussian_filter(K,smooth)
    if x is not None and y is not None and sigma is not None and N_gaussians > 0:
        signs_x = [1,1,-1,-1]
        signs_y = [1,-1,1,-1]
        G = numpy.zeros_like(K)
        Ny, Nx = mask.shape
        cx = (Nx-1)/2
        cy = (Ny-1)/2
        X,Y = numpy.ogrid[0:Ny, 0:Nx]
        for i,sign_x,sign_y in zip(range(N_gaussians), signs_x, signs_y):
            # multiply by gaussian darkfield kernel
            Rsq = (X - (cx + sign_x*x))**2 + (Y - (cx + sign_y*y))**2
            G += numpy.exp(-Rsq/(2*sigma**2))
        K = K*G
    return K