Example #1
2
def _local_maxima(image, diameter, separation, percentile=64):
    "Find local maxima whose brightness is above a given percentile."
    # Find the threshold brightness, representing the given
    # percentile among all NON-ZERO pixels in the image.
    flat = np.ravel(image)
    threshold = stats.scoreatpercentile(flat[flat > 0], percentile)
    # The intersection of the image with its dilation gives local maxima.
    assert image.dtype == np.uint8, "Perform dilation on exact (uint8) data." 
    dilation = morphology.grey_dilation(
        image, footprint=circular_mask(diameter, separation))
    maxima = np.where((image == dilation) & (image > threshold))
    if not np.size(maxima) > 0:
        raise ValueError, ("Bad image! Found zero maxima above the {}"
                           "-percentile treshold at {}.".format(
                           percentile, threshold))
    # Flat peaks, for example, return multiple maxima.
    # Eliminate redundancies within the separation distance.
    maxima_map = np.zeros_like(image)
    maxima_map[maxima] = image[maxima]
    peak_map = filters.generic_filter(
        maxima_map, _Cfilters.nullify_secondary_maxima(), 
        footprint=circular_mask(separation), mode='constant')
    # Also, do not accept peaks near the edges.
    margin = int(separation)/2
    peak_map[..., :margin] = 0
    peak_map[..., -margin:] = 0
    peak_map[:margin, ...] = 0
    peak_map[-margin:, ...] = 0
    peaks = np.where(peak_map != 0)
    if not np.size(peaks) > 0:
        raise ValueError, "Bad image! All maxima were in the margins."
    return peaks[1], peaks[0] # x, y
Example #2
0
def dilate_data(data, footprint = None, structure = None):
    
    if footprint is not None:
        dilated_data = grey_dilation(data, footprint = footprint, structure = structure)
    else:
        dilated_data = grey_dilation(data, size=(10, 10))
    
    return dilated_data
Example #3
0
def determine_search_location(A, d1, d2, method='ellipse', min_size=3, max_size=8, dist=3, expandCore=iterate_structure(generate_binary_structure(2, 1), 2).astype(int)):
    """
    restrict search location to subset of pixels

    TODO
    """
    from scipy.ndimage.morphology import grey_dilation
    from scipy.sparse import coo_matrix, issparse

    d, nr = np.shape(A)

    A = csc_matrix(A)

    IND = False * np.ones((d, nr))
    if method == 'ellipse':
        Coor = dict()
        Coor['x'] = np.kron(np.ones((d2, 1)), np.expand_dims(range(d1), axis=1))
        Coor['y'] = np.kron(np.expand_dims(range(d2), axis=1), np.ones((d1, 1)))
        if not dist == np.inf:             # determine search area for each neuron
            cm = np.zeros((nr, 2))        # vector for center of mass
            Vr = []    # cell(nr,1);
            IND = []       # indicator for distance
            cm[:, 0] = np.dot(Coor['x'].T, A[:, :nr].todense()) / A[:, :nr].sum(axis=0)
            cm[:, 1] = np.dot(Coor['y'].T, A[:, :nr].todense()) / A[:, :nr].sum(axis=0)
            for i in range(nr):            # calculation of variance for each component and construction of ellipses
                dist_cm = coo_matrix(np.hstack((Coor['x'] - cm[i, 0], Coor['y'] - cm[i, 1])))
                Vr.append(dist_cm.T * spdiags(A[:, i].toarray().squeeze(),
                                              0, d, d) * dist_cm / A[:, i].sum(axis=0))
                
                if np.sum(np.isnan(Vr))>0:
                    raise Exception('You cannot pass empty (all zeros) components!')
                
                D, V = eig(Vr[-1])
                
                d11 = np.min((max_size**2, np.max((min_size**2, D[0].real))))
                d22 = np.min((max_size**2, np.max((min_size**2, D[1].real))))
                # search indexes for each component
                IND.append(np.sqrt((dist_cm * V[:, 0])**2 / d11 +
                                   (dist_cm * V[:, 1])**2 / d22) <= dist)

            IND = (np.asarray(IND)).squeeze().T
        else:
            IND = True * np.ones((d, nr))
    elif method == 'dilate':
        for i in range(nr):
            A_temp = np.reshape(A[:, i].todense(), (d2, d1))
            if len(expandCore) > 0:
                A_temp = grey_dilation(A_temp, footprint=expandCore)
            else:
                A_temp = grey_dilation(A_temp, (1, 1))

#            A_temp = grey_dilation(A_temp, footprint = expandCore)
            IND[:, i] = np.squeeze(np.reshape(A_temp, (d, 1))) > 0
    else:
        IND = True * np.ones((d, nr))

    return IND
Example #4
0
    def _get_brodmann_area(self):
        nii = nb.load(self.inputs.atlas)
        origdata = nii.get_data()
        newdata = np.zeros(origdata.shape)

        if not isinstance(self.inputs.labels, list):
            labels = [self.inputs.labels]
        else:
            labels = self.inputs.labels
        for lab in labels:
            newdata[origdata == lab] = 1
        if self.inputs.hemi == "right":
            newdata[floor(float(origdata.shape[0]) / 2) :, :, :] = 0
        elif self.inputs.hemi == "left":
            newdata[: ceil(float(origdata.shape[0]) / 2), :, :] = 0

        if self.inputs.dilation_size != 0:
            newdata = grey_dilation(
                newdata,
                (
                    2 * self.inputs.dilation_size + 1,
                    2 * self.inputs.dilation_size + 1,
                    2 * self.inputs.dilation_size + 1,
                ),
            )

        return nb.Nifti1Image(newdata, nii.get_affine(), nii.get_header())
Example #5
0
def jeff_coral_finder(im, sand_intensity_threshold, coral_gradient_threshold,
                      maximum_altseqfilt_radius, shadow_discriminant_threshold,
                      shadow_discriminant_scaling):
  im_grey = N.asarray(im.convert("L"))
  im = N.asarray(im)
  dot = N.array([[0,1,0], [1,1,1], [0,1,0]])
  dilated = morphology.grey_dilation(im_grey, dot.shape, structure=dot)
  eroded = morphology.grey_erosion(im_grey, dot.shape, structure=dot)
  gradient = dilated - eroded
  fisher_discriminant = N.dot(im, shadow_discriminant_scaling)

  # Make initial class determinations.
  is_shadow = fisher_discriminant < shadow_discriminant_threshold
  is_sand   = im_grey > sand_intensity_threshold
  is_smooth = gradient < coral_gradient_threshold
  is_coral  = is_smooth & ~is_sand & ~is_shadow

  # Now perform an alternating sequence filter on coral,
  for radius in range(1, maximum_altseqfilt_radius+1):
    se = disk_strel(radius)
    opened = morphology.binary_opening(is_coral, se)
    is_coral = morphology.binary_closing(opened, se)
  # Now perform an alternating sequence filter on sand.
  for radius in range(1, maximum_altseqfilt_radius+1):
    se = disk_strel(radius)
    opened = morphology.binary_opening(is_sand, se)
    is_sand = morphology.binary_closing(opened, se)
  # Use coral mask to exclude sand.
  is_sand = is_sand & ~is_coral
  return is_sand, is_coral
Example #6
0
def dilation(parameters):
    """Dilates a greyscale image.

    For the simple case of a full and flat structuring element, it can be
    viewed as a maximum filter over a sliding window.

    It wraps `scipy.ndimage.morphology.grey_dilation`. The `footprint`,
    `structure`, `output`, `mode`, `cval` and `origin` options are not
    supported.

    Keep in mind that `mode` and `cval` influence the results. In this case
    the default mode is used, `reflect`.

    :param parameters['data'][0]: input array
    :type parameters['data'][0]: numpy.array
    :param parameters['size']: which neighbours to take into account, defaults
                               to (3, 3) a.k.a. numpy.ones((3, 3))
    :type parameters['size']: list

    :return: numpy.array

    """
    data = parameters['data'][0]
    size = tuple(parameters['size'])

    return morphology.grey_dilation(data, size=size)
Example #7
0
    def _grey_dilation(self):
        vol_name = str(self.out_edit.text())
        num = self.size_combo.currentIndex() + 3
        size = (num,num,num)
        mode = self.mode_combo.currentText()
        cval = self.cval_edit.text()


        if not vol_name:
            self.out_edit.setFocus()
            return

        try:
            cval = int(cval)
        except ValueError:
            self.cval_edit.selectAll()
            return
        if cval>255 or cval<0:
            print "cval must be 0-255!"
            return

        source_row = self.source_combo.currentIndex()
        source_data = self._model.data(self._model.index(source_row),
                                       Qt.UserRole + 5)

        new_vol = morphology.grey_dilation(source_data,size=size,mode=mode,cval=cval)
        self._model.addItem(new_vol,
                            None,
                            vol_name,
                            self._model._data[0].get_header())
        self.done(0)
Example #8
0
def boxes(orig):
    # SciPy
    img = ImageOps.grayscale(orig)
    # OpenCV
    # img = cv.cvtColor(orig,cv.COLOR_RGB2GRAY)

    im = numpy.array(img)

    # Inner morphological gradient.
    #
    # The SciPy way
    im = morphology.grey_dilation(im, (3, 3)) - im
    # The OpenCV way
    # im2 = cv.dilate(im, None) - im

    # Binarize.
    mean, std = im.mean(), im.std()
    t = mean + std
    im[im < t] = 0
    im[im >= t] = 1

    # Connected components.
    lbl, numcc = label(im)
    # Size threshold.
    min_size = 200 # pixels
    box = []
    for i in range(1, numcc + 1):
        py, px = numpy.nonzero(lbl == i)
        if len(py) < min_size:
            im[lbl == i] = 0
            continue

        xmin, xmax, ymin, ymax = px.min(), px.max(), py.min(), py.max()
        box.append(Rectangle(xmin, ymin, xmax, ymax))
        # Four corners and centroid.
        # box.append({'points': [(xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax)], 'centroid': (numpy.mean(px), numpy.mean(py)), 'width':xmax - xmin, 'height':ymax - ymin})
        # box.append({'p1': (xmin, ymin), 'p2': (xmax, ymax), 'centroid': (numpy.mean(px), numpy.mean(py)), 'width':xmax - xmin, 'height':ymax - ymin})
    '''
    children_processed = []
    nested = []
    for b in box:
        print "----"
        if b in children_processed:
            print "Time to skip"
            skip = True
            continue
        else:
            skip = False
            print "Don't skip"
        if skip:
            print "Failed to skip"
        children = [x for x in box if (b['p1'] != x['p1'] and (b['width'] * b['height']) > (x['width'] * x['height']) and b['p2'] != x['p2'] and b['p1'][0] < x['p2'][0] and b['p2'][0] > x['p1'][0] and b['p1'][1] < x['p2'][1] and b['p2'][1] > x['p1'][1])]

        children_processed += children
        if children:
            b.update({'children':children})
        nested.append(b)
    '''
    return im.astype(numpy.uint8) * 255, box
Example #9
0
def general_cc_var_num_channels(img, diff_order=0, mink_norm=1, sigma=1, mask_im=None, saturation_threshold=255,
                                dilation_size=3, clip_range=True):
    # img must have first dim color channel! img[c, x, y(, z, ...)]
    dim_img = len(img.shape[1:])
    if clip_range:
        minm = img.min()
        maxm = img.max()
    img_internal = np.array(img)
    if mask_im is None:
        mask_im = np.zeros(img_internal.shape[1:], dtype=bool)
    img_dil = deepcopy(img_internal)
    for c in range(img.shape[0]):
        img_dil[c] = grey_dilation(img_internal[c], tuple([dilation_size] * dim_img))
    mask_im = mask_im | np.any(img_dil >= saturation_threshold, axis=0)
    if sigma != 0:
        mask_im[:sigma, :] = 1
        mask_im[mask_im.shape[0] - sigma:, :] = 1
        mask_im[:, mask_im.shape[1] - sigma:] = 1
        mask_im[:, :sigma] = 1
        if dim_img == 3:
            mask_im[:, :, mask_im.shape[2] - sigma:] = 1
            mask_im[:, :, :sigma] = 1

    output_img = deepcopy(img_internal)

    if diff_order == 0 and sigma != 0:
        for c in range(img_internal.shape[0]):
            img_internal[c] = gaussian_filter(img_internal[c], sigma, diff_order)
    elif diff_order == 1:
        for c in range(img_internal.shape[0]):
            img_internal[c] = gaussian_gradient_magnitude(img_internal[c], sigma)
    elif diff_order > 1:
        raise ValueError("diff_order can only be 0 or 1. 2 is not supported (ToDo, maybe)")

    img_internal = np.abs(img_internal)

    white_colors = []

    if mink_norm != -1:
        kleur = np.power(img_internal, mink_norm)
        for c in range(kleur.shape[0]):
            white_colors.append(np.power((kleur[c][mask_im != 1]).sum(), 1. / mink_norm))
    else:
        for c in range(img_internal.shape[0]):
            white_colors.append(np.max(img_internal[c][mask_im != 1]))

    som = np.sqrt(np.sum([i ** 2 for i in white_colors]))

    white_colors = [i / som for i in white_colors]

    for c in range(output_img.shape[0]):
        output_img[c] /= (white_colors[c] * np.sqrt(3.))

    if clip_range:
        output_img[output_img < minm] = minm
        output_img[output_img > maxm] = maxm
    return white_colors, output_img
Example #10
0
def dil_td_msg(chosen_td_msg, dilate_shape=(4, 4)):
    """

    :param chosen_td_msg:
    :param dilate_shape: (h, w)  h大一些使得膨胀在垂直方向更大
    :return:
    """
    chosen_td_msg_dil = np.zeros((16, 200, 1000), dtype=np.uint8)
    for i, td_msg in enumerate(chosen_td_msg):
        chosen_td_msg_dil[i] = grey_dilation(td_msg, dilate_shape)

    return chosen_td_msg_dil
Example #11
0
def streamAmplification(arr):
    streamAmp = arr.copy()
    for i in range(len(streamAmp)):
        for j in range(len(streamAmp[i])):
            if streamAmp[i][j] < 14:
                streamAmp[i][j] = 0
    morphed = morph.grey_dilation(streamAmp, structure = create_circular_mask(35))
    minVal = np.amin(morphed)
    morphed -= minVal
    maxVal = np.amax(morphed)
    morphed /= maxVal if (maxVal != 0) else 1
    return morphed
Example #12
0
def imdilate(I, size = (3,3), soft = 0.0):
    """
    'soft' is a weight to be used if dilated image is to be combined back with orignal one for smoothing.
    soft=0 means no smoothing (image fully dilated), soft=1 returns original image (no dilation).
    """
    from scipy.ndimage.morphology import grey_dilation
    if I.ndim > len(size):
        size = size + (1,)*(I.ndim - len(size))     # append missing 1's at the end
    J = grey_dilation(I, size)
    if soft:
        J = (1-soft)*J + soft*I
    return J
Example #13
0
def construct_dilate_parallel(pars):
    """
    """

    from scipy.ndimage.morphology import generate_binary_structure, iterate_structure, grey_dilation

    A_i, dims, expandCore, d = pars
    A_temp = np.reshape(A_i.toarray(), dims[::-1])
    if len(expandCore) > 0:
        if len(expandCore.shape) < len(dims):  # default for 3D
            expandCore = iterate_structure(
                generate_binary_structure(len(dims), 1), 2).astype(int)
        A_temp = grey_dilation(A_temp, footprint=expandCore)
    else:
        A_temp = grey_dilation(A_temp, [1] * len(dims))

    dist_indicator_i = scipy.sparse.coo_matrix(
        np.squeeze(np.reshape(A_temp, (d, 1)))[:, None] > 0)

    # search indexes for each component
    return dist_indicator_i
Example #14
0
def group_into_puffs(puffBoolean, opts):
    '''
    takes the puffBoolean image and the options dictionary, returns the puff ids and their locations
    '''
    print('Grouping Into Puffs')
    puffBooleanDilate = grey_dilation(puffBoolean, (opts['dilateT'], opts['dilateXY'], opts['dilateXY']))
    results=bwconncomp(puffBooleanDilate, puffBoolean) #join all puffs together
    del puffBooleanDilate
    print("%d puffs detected" % results.NumObjects)
    puff_idx = [puff for puff in results.PixelIdxList if len(puff) >= opts['minPuffSize']]
    print('%d puffs remaining because some were too small.' % np.size(puff_idx, 0))
    return puff_idx
Example #15
0
def paramvsNpart(Npx, pxlen, rmin, rmax, N_part_min, N_part_max, N_partstep,
                 tipFunc, h, tipparname, tipmin, tipmax, tipstep, N_sample,
                 paramFunc, filename):
    '''tipFunc deve essere una funzione.
    paramFunc deve essere una funzione sola della superficie/ dell'immagine.
    '''
    N_part = np.linspace(N_part_min, N_part_max, N_partstep)
    tip = np.linspace(tipmin, tipmax, tipstep)

    out = open(filename, 'w')
    out.write(
        str(Npx) + ' ' + str(pxlen) + ' ' + str((rmax + rmin) / 2) + ' ' +
        str(h) + ' ' + str(mf.Ncp(pxlen, Npx, (rmin + rmax) / 2)) + ' ')
    out.write('#Npx, pxlen, avR_part, h_tip, Ncp(avR_part)\n')
    for m in tip:
        out.write(str(m) + ' ')
    out.write('#tip_' + tipparname + ' (rows)\n')

    for i in range(N_sample):
        print('N_sample = ' + str(i + 1))
        z_param = []
        img_param = []
        xyr = []
        z = mf.genFlat(Npx)

        for N in N_part:
            out.write(str(int(N)) + ' ')
            print('N_part = ' + str(int(N)))
            z, xyr = mf.genUnifIsolSph(z, pxlen, int(N), rmin, rmax, xyr, True)
            #      mf.plotfalsecol(z,pxlen)
            z_param.append(paramFunc(z))
            #    print('max height surface=',h_max(z,10))

            for m in tip:
                if tipparname == 'angle': tip_ar = tipFunc(pxlen, h, angle=m)
                if tipparname == 'r': tip_ar = tipFunc(pxlen, h, r=m)
                img_ar = mph.grey_dilation(z, structure=-tip_ar)
                img_param.append(paramFunc(img_ar))
        #       print('max height image=',h_max(z,10))

        out.write('#Npart\n')
        for j in range(len(N_part)):
            out.write(str(z_param[j]) + ' ')
        out.write('#Surface par\n')

        for i in range(len(tip)):
            for j in range(len(N_part)):
                out.write(str(img_param[i + j * len(tip)]) + ' ')
            out.write('\n')
        out.write('\n')
    out.close()
    print('datas printed in ' + filename)
Example #16
0
    def update_dilated_map(self):
        self.inflated_grid = np.array(self.map_data.data)

        self.inflated_grid = np.reshape(self.inflated_grid, (self.map_data.info.width,self.map_data.info.width))
        self.inflated_grid = morphology.grey_dilation(self.inflated_grid, size=(self.n,self.n))
        self.grid_2d = self.inflated_grid
        self.inflated_grid = np.reshape(self.inflated_grid, (self.map_data.info.width*self.map_data.info.width))
        self.manual_paint()
        

        #plt.imshow(self.grid_2d, cmap='hot', interpolation='nearest')
        #plt.show()
        return self.inflated_grid
Example #17
0
    def GenerateHeightmap(self, cloud, tableHeight):
        '''Generates image representing contents of descriptor volume.
    - Input cloud: Cloud of the entire scene (besides the table), in the base/world refernece frame.
    - Input tableHeight: Location of the top of the table surface in the z direction. (Assumes table
      is normal to z axis and objects are above the table in the +z direction.)
    '''

        X = self.GetHandPoints(cloud)
        self.image = self.ComputeHeightmap(X, tableHeight)
        self.image = grey_dilation(self.image, size=3)
        self.image = self.image.reshape(
            (self.image.shape[0], self.image.shape[1], 1))
        return self.image
def dilateROIMask(filename, dilation_size):
    import numpy as np
    import nibabel as nb
    from scipy.ndimage.morphology import grey_dilation
    import os
    
    nii = nb.load(filename)
    origdata = nii.get_data()
    newdata = grey_dilation(origdata , (2 * dilation_size + 1,
                                       2 * dilation_size + 1,
                                       2 * dilation_size + 1))
    nb.save(nb.Nifti1Image(newdata, nii.get_affine(), nii.get_header()), 'dialted_mask.nii')
    return os.path.abspath('dialted_mask.nii')
Example #19
0
def plotTipDep(z, pxlen, h, R_mu, R_sigma, N_part_real):
    R_tip = np.linspace(0.01, 20, 10)
    N_part_est = []
    for R in R_tip:
        tip = mf.genParabolicTip(pxlen,h,r=R) 
        img = mph.grey_dilation(z, structure=-tip)
        N_part_est.append(partNum(img, pxlen, R_mu, R_sigma)[0])
        print(R)
    
    plt.figure()
    plt.plot(R_tip, np.array(N_part_est) / N_part_real, color='r', marker='o')
    plt.xlabel(r'$R_{tip} [nm]$')
    plt.ylabel(r'$N_{part,est} / N_{part,real}$')
    plt.title(r'$N_{part,real} = $' + str(N_part_real) + r'$, \mu_R = $' + str(R_mu) + r'$nm, \sigma_R = $' + str(R_sigma) + r'$nm$')
    plt.grid()
Example #20
0
def find_location(result, part, image_dim, part_dim, margin, max_collisions):
    """find a collision free location"""
    size_with_margin = part_dim + 2 * margin
    mask = np.zeros((size_with_margin, size_with_margin))
    mask[margin:part_dim + margin, margin:part_dim + margin] = part
    mask = grey_dilation(mask, size=2 * margin + 1, mode='constant', cval=0.0)
    im_end = image_dim - size_with_margin + 1
    here = (np.random.randint(0, im_end), np.random.randint(0, im_end))
    collision_count = 0
    while collision(mask, size_with_margin, here, result):
        collision_count += 1
        assert max_collisions is None or collision_count < max_collisions, "too many collisions"
        here = (np.random.randint(0, im_end), np.random.randint(0, im_end))

    return (here[0] + margin, here[1] + margin), collision_count
 def map_callback(self,msg):
     """
     receives new map info and inflates the map
     """
     old_map = np.array(msg.data)
     side_length = int(round(np.sqrt(old_map.shape[0])))
     old_map = old_map.reshape((side_length, side_length))
     print(old_map)
     new_map = grey_dilation(old_map, size=(13,13))
     new_map = new_map.reshape(side_length**2).tolist()
     print(len(new_map))
     new_msg = OccupancyGrid()
     new_msg.data = new_map
     new_msg.info = msg.info
     self.pub.publish(new_msg)
def realizeSingleNumber(info, size=28, dataset='training'):
    palette = np.ones(
        (size, size, 3), dtype='float32') * colorMap[info['bgcolor']]

    num_sample_idx = np.random.randint(len(numPools[dataset][info['number']]))
    num_sample = numPools[dataset][info['number']][num_sample_idx][1]

    if info['style'] == 'stroke':
        mask = grey_dilation(num_sample, (3, 3)).reshape((size, size, 1))
        palette = palette * (1 - mask)

    mask = num_sample.reshape((size, size, 1))
    palette = palette * (1 - mask) + (mask * colorMap[info['color']]) * mask

    return palette
Example #23
0
def stream_amplification(arr):
    """
    Attempts to amplify only streams with impoundment index.
    """
    streamAmp = arr.copy()
    for i in range(len(streamAmp)):
        for j in range(len(streamAmp[i])):
            if streamAmp[i][j] < 14:
                streamAmp[i][j] = 0
    morphed = morph.grey_dilation(streamAmp,
                                  structure=create_circular_mask(35))
    minVal = np.amin(morphed)
    morphed -= minVal
    maxVal = np.amax(morphed)
    morphed /= maxVal if (maxVal != 0) else 1
    return morphed
Example #24
0
    def _make_table(self, size):
        """ creates a weight map of the table

        :param size: y resolution of the generated table
        :return: map of the table
        """
        weight = 100000
        pixel_per_mm = size / 2000
        x_size = int(size * 1.5)
        y_size = size
        wall_size = int(size / 20)
        #array = 1 + np.random.random((y_size, x_size)) / 2
        array = np.ones((y_size, x_size))
        for y in range(wall_size):
            array[y, :] = weight - weight / wall_size * y
        for y in range(y_size - 1, y_size - wall_size - 1, -1):
            array[y, :] = weight - weight / wall_size * (y_size - y - 1)
        for x in range(wall_size):
            array[:, x] = weight - weight / wall_size * x
        for x in range(x_size - 1, x_size - wall_size - 1, -1):
            array[:, x] = weight - weight / wall_size * (x_size - x - 1)
        array[0:math.ceil(580 * pixel_per_mm),
              math.floor(978 * pixel_per_mm):math.ceil(2022 * pixel_per_mm
                                                       )] = weight  # Stairs
        # yellow start
        array[math.floor(767 * pixel_per_mm):math.ceil((789 + 50) *
                                                       pixel_per_mm),
              0:math.ceil(400 * pixel_per_mm)] = weight
        array[math.floor((1211 - 50) * pixel_per_mm):math.ceil(1233 *
                                                               pixel_per_mm),
              0:math.ceil(400 * pixel_per_mm)] = weight
        # green start
        array[math.floor(767 * pixel_per_mm):math.ceil((789 + 50) *
                                                       pixel_per_mm),
              math.floor(2600 * pixel_per_mm):math.ceil(3000 *
                                                        pixel_per_mm)] = weight
        array[math.floor((1211 - 50) * pixel_per_mm):math.ceil(1233 *
                                                               pixel_per_mm),
              math.floor(2600 * pixel_per_mm):math.ceil(3000 *
                                                        pixel_per_mm)] = weight

        array[math.floor(1800 * pixel_per_mm):math.ceil(2000 * pixel_per_mm),
              math.floor(1100 * pixel_per_mm):math.ceil(1900 *
                                                        pixel_per_mm)] = weight
        array = morphology.grey_dilation(array, size=(11, 11))
        return array
Example #25
0
def reconstruct(srs, prc, ln_mask, cmask, kernel):
    '''
    reconstruct coast line by dilation
    src - source dem
    prc - dem for processing
    lm_mask - mask with correction boundary
    cmask - mask with unmodified boundary
    kernel - structure element for dilation operation
    kernel5=np.asarray([[1,0,1,0,1],[0,1,1,1,0],[1,1,1,1,1],[0,1,1,1,0],[1,0,1,0,1]])
    kernel3=np.asarray([[1,0,1],[0,1,0],[1,0,1]])
    '''
    rec = grey_dilation(prc, structure=kernel)
    rec_m = rec + np.median((srs - rec)[ln_mask == 1])
    min_val = -29  #np.min(srs!=-9999)
    rec_m[rec_m < min_val] = -9999
    rec_m[cmask == 1] = srs[cmask == 1]
    return rec_m
Example #26
0
 def _draw(self, xbins, ybins, sigma):
     # Convert np.float64 to int.
     xbins = int(xbins)
     ybins = int(ybins)
     sigma = int(sigma)
     coo, left, right, bottom, top = ConvertHeatmap(xbins, ybins).convert(
         self._coordinates)
     heatmap = coo.toarray()
     heatmap = grey_dilation(heatmap, size=(sigma, sigma))
     self._ax.clear()
     self._ax.imshow(
         heatmap.T,
         extent=[left, right, bottom, top],
         cmap="hot",
         interpolation="nearest",
     )
     self._fig.canvas.draw()
Example #27
0
def _extract_peaks(specgram, neighborhood, threshold):
    """
    Partition the spectrogram into subcells and extract peaks from each
    cell if the peak is sufficiently energetic compared to the neighborhood.
    """
    kernel = np.ones(shape=neighborhood)
    local_averages = convolve(specgram, kernel / kernel.sum(), mode="constant", cval=0)

    # suppress all points below the floor value
    floor = (1 + threshold) * local_averages
    candidates = np.where(specgram > floor, specgram, 0)

    # grayscale dilation is equivalent to non-maximal suppression
    local_maximums = grey_dilation(candidates, footprint=kernel)
    peak_coords = np.argwhere(specgram == local_maximums)
    peaks = zip(peak_coords[:, 0], peak_coords[:, 1])

    return peaks
Example #28
0
def seeds_knossos(dset_info, seeds, comp, write_pf, fixval=None, fill_edges=True, dilate_seeds=None):
    """"""
    dset_name = dataset_name(dset_info)
    
    annotationfile = os.path.join(dset_info['datadir'], 
                                  dset_name + '_knossos', 
                                  'annotation_' + comp + '.xml')
    objs = get_knossos_controlpoints(annotationfile)
    for i, obj in enumerate(objs):
        # ...
        if fixval:
            objval = fixval
        elif obj['name'] is not None:
            objval = int(obj['name'][3:7])
        else:
            objval = i
        # ...
        points = []
        for _, coords in obj['nodedict'].iteritems():
            points.append(knossoscoord2dataset(dset_info, coords))
        # ...
        if fill_edges:
            for edge in obj['edgelist']:
                point0 = knossoscoord2dataset(dset_info, obj['nodedict'][edge[0]])
                point1 = knossoscoord2dataset(dset_info, obj['nodedict'][edge[1]])
                points_z = np.linspace(point0[0], point1[0], 100).astype(int)
                points_y = np.linspace(point0[1], point1[1], 100).astype(int)
                points_x = np.linspace(point0[2], point1[2], 100).astype(int)
                p = [[z,y,x] for z,y,x in zip(points_z, points_y, points_x)]
                points = points + p
        # ...
        for point in points:
            try:
                seeds[point[0],point[1],point[2]] = objval
            except:
                pass
        print(objval, points)
    if dilate_seeds is not None:
        seeds = grey_dilation(seeds, size=dilate_seeds)
    
    writeh5(seeds, dset_info['datadir'], dset_name + write_pf, 
            element_size_um=dset_info['elsize'])
    
    return seeds
Example #29
0
def boxes(orig):
    img = ImageOps.grayscale(orig)
    im = numpy.array(img)

    # Inner morphological gradient.
    im = morphology.grey_dilation(im, (3, 3)) - im

    # Binarize.
    mean, std = im.mean(), im.std()
    t = mean + std
    im[im < t] = 0
    im[im >= t] = 1

    # Connected components.
    lbl, numcc = label(im)
    # Size threshold.
    min_size = 200 # pixels
    box = []
    cnt = 0
    dt = datetime.now()
    for i in range(1, numcc + 1):
        py, px = numpy.nonzero(lbl == i)
        if len(py) < min_size:
            im[lbl == i] = 0
            continue

        xmin, xmax, ymin, ymax = px.min(), px.max(), py.min(), py.max()
        print(xmin, xmax, ymin, ymax)
        doc_ref = db.collection(u'trainingCollection').document("trainingImage").collection("125464").document(str(cnt))
        data = {
            "position": [int(xmin), int(xmax), int(ymin), int(ymax)]
        }
        doc_ref.set(data, merge=True)
        cnt += 1
        
        node = Node(xmin, ymin, xmax, ymax)
        tree.insert(node)
        # Four corners and centroid.
        box.append([
            [(xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax)],
            (numpy.mean(px), numpy.mean(py))])
    # for i in range(0,len(box)):
    #     print(box[i])
    return im.astype(numpy.uint8) * 255, box
Example #30
0
    def inflate_map(self):
        rospy.loginfo("[NAVIGATOR]: Inflating Map")

        # convert to matrix
        map_matrix = np.reshape(self.map_probs,
                                (384, 384))  #TODO: Magic numbers
        map_matrix_inflated = morpho.grey_dilation(map_matrix, size=(3, 3))

        # flatten back to list
        self.map_probs_inflated = map_matrix_inflated.flatten().tolist()

        if self.map_width > 0 and self.map_height > 0 and len(
                self.map_probs_inflated) > 0:
            self.occupancy = StochOccupancyGrid2D(self.map_resolution,
                                                  self.map_width,
                                                  self.map_height,
                                                  self.map_origin[0],
                                                  self.map_origin[1], 8,
                                                  self.map_probs_inflated)
Example #31
0
 def extractGrid(self, msg):
     # TODO: extract grid from msg.data and other usefull information #,footprint=np.ones((3,3)), , structure =np. ones ((1, 1))
     #self.grid = morphology.grey_dilation(msg.data, size=(3,3))
     #self.grid = msg.data
     self.res = msg.info.resolution
     self.width = msg.info.width
     self.gridInfo = msg.info
     self.height = msg.info.height
     self.origin = msg.info.origin
     self.position = msg.info.origin.position
     self.orientation = msg.info.origin.orientation
     #print(self.origin, self.position, self.orientation, self.res)
     if (self.lookAroundSteps == 0):
         self.lookAroundSteps = 2 * float(self.robotDiameter) / float(
             self.res)
         self.lookAroundSteps = int(round(self.lookAroundSteps, 3))
         print("lAS: ", self.lookAroundSteps)
         tmp = round(self.lookAroundSteps)
         if (tmp >= self.lookAroundSteps): self.lookAroundSteps = int(tmp)
         else: self.lookAroundSteps = int(tmp) + 1
         self.lookAroundStepsTmp = self.lookAroundSteps
     self.grid = np.reshape(msg.data, (self.height, self.width)).T
     #for j in range(self.height):
     #    str = ""
     #    for i in range(self.width):
     #        if (self.grid[i][j] >0): str += "#"
     #        else: str += " "
     #print(str)
     print("UPDATED")
     self.grid = (morphology.grey_dilation(self.grid,
                                           size=(self.lookAroundSteps,
                                                 self.lookAroundSteps)))
     self.threshould = int(self.lookAroundSteps * 0.65) - 1
     #for j in range(self.height):
     #    str = ""
     #    for i in range(self.width):
     #        if (self.grid[i][j] >0): str += "#"
     #        else: str += " "
     #print(str)
     #self.threshould = 3
     self.Update = False
     self.gridUpdated = True
Example #32
0
def part_two_visualized(x,SIZE=20001):
    '''Solves part two'''
    from scipy.ndimage.morphology import grey_dilation
    grid = np.zeros((SIZE,SIZE),dtype=np.uint16)
    
    for wire in x:
        newgrid = np.zeros((SIZE,SIZE),dtype=np.uint16)
        pos = (SIZE//2,SIZE//2)
        for entry in wire:
            newgrid,pos = trace_grid(newgrid, pos, entry)
        grid = grid + newgrid


    intersects = np.argwhere(grid > 1)

    grid = np.zeros((SIZE,SIZE),dtype=np.uint32)
    
    for wire in x:
        last = 10000
        newgrid = np.zeros((SIZE,SIZE),dtype=np.uint32)
        pos = (SIZE//2,SIZE//2)
        for entry in wire:
            newgrid,pos,last = trace_grid_more(newgrid, pos, entry, last)
        grid = np.max(grid,newgrid)
        # grid[SIZE//2,SIZE//2] = 1

    dists = []

    for i in intersects:
        dists.append(grid[i[0],i[1]])

    idx = np.argwhere(dists == min(dists))[0][0]

    pgrid = grid
    pgrid[SIZE//2,SIZE//2] = 3
    pgrid[intersects[idx][0],intersects[idx][1]] = 4
    pgrid = grey_dilation(pgrid, size=(SIZE//500, SIZE//500))
    plt.figure()
    plt.imshow(np.transpose(pgrid),cmap='plasma',aspect='auto')
    plt.show()

    return min(dists)
Example #33
0
    def compute_1pl_message(in_mess, pert_radius):
        """Compute the outgoing message of a lateral factor given the
        perturbation radius and input message.

        Parameters
        ----------
        in_mess : numpy.array
            Input BP messages to the factor. Each message has shape vps x hps.
        pert_radius : int
            Perturbation radius corresponding to the factor.

        Returns
        -------
        out_mess : numpy.array
            Output BP message (at the opposite end of the factor from the input message).
            Shape is (vps, hps).
        """
        pert_diameter = 2 * pert_radius + 1
        out_mess = grey_dilation(in_mess, (pert_diameter, pert_diameter))
        return out_mess - out_mess.max()
Example #34
0
 def g6_envelope(self, **args):
     """output the envelope of g6"""
     r,g6,g = np.transpose(self.g6(**args))
     #smooth g6
     sg6 = gaussian_filter1d(np.copy(g6),1)
     maxima = np.flatnonzero(
             np.where(np.exp(
                     sg6 - grey_dilation(sg6,size=[3])
                     )>0.9999, 1, 0)
             )
     #keep only positive maxima further than the largest maxima
     maxima = [m for m in maxima[g6.argmax():] if g6[m]>0]
     envelope = np.column_stack((r[maxima],g6[maxima]))
     np.savetxt(
         os.path.join(self.path,self.head + '_total_env.g6'),
         envelope,
         fmt='%f',
         delimiter='\t'
         )
     return envelope
Example #35
0
def threshold_mask(mask, image,  value  ) :
  n=mask.max()
  npix=3
  
  for i in range(1,n+1):
  
    icount=0
    tmp_mask_old = 0
    while(icount<100):
      tmp_mask = np.equal( i,  mask  )
      
      if not np.any(tmp_mask):
        continue

      mask[:]=mask*(1-tmp_mask)
      
      massimo = (image*tmp_mask).max()
      print " MASSIMO " , massimo
      tmp_mask_2 =   morph.grey_dilation(tmp_mask,  footprint=np.ones([npix,npix]),
                                         structure=np.zeros([npix,npix]))

      print " Npunti , value " ,   tmp_mask.sum(), tmp_mask_2.sum() , value

      tmp_mask = np.less( value*massimo,  image  )*tmp_mask_2

      print " Npunti  " ,   tmp_mask.sum()
     
      
      mask[:]+=tmp_mask*i
     
      if ( tmp_mask-tmp_mask_old).sum()==0:
        break
      tmp_mask_old = tmp_mask


      print " ================ "
      print mask.sum()

      icount+=1

  return mask
Example #36
0
 def update_map(self):
     self.DynamicMap = np.zeros((120, 180))
     dilate = 15
     cylinders = self.cyl.get_cylinders_xy_and_radius(
         self.laser_data.ranges)
     for i in range(len(cylinders)):
         x = int(
             (cylinders[i].cx + self.mav.drone_pose.pose.position.x) / 0.05)
         y = int(
             (cylinders[i].cy + 4.5 + self.mav.drone_pose.pose.position.y) /
             0.05)  #Correct later
         obstacle = self.node(x, y)
         for collied_node in self.near(obstacle, dilate):
             if collied_node in self.nodeList:
                 self.DeleteBranch(collied_node)
         if x > 120 or y > 180 or x < 0 or y < 0:
             continue
         self.DynamicMap[x - 1][y - 1] = 100
     self.DynamicMap = morphology.grey_dilation(self.DynamicMap,
                                                size=(dilate * 2,
                                                      dilate * 2))
Example #37
0
def smear_on_low_freq(image,
                      mapped_image_values,
                      median_size=5,
                      contour_contrast_threshold=200,
                      dilation=3):
    contour_array = cleaned_contour(
        image, contrast_threshold=contour_contrast_threshold)
    contour_array = grey_dilation(contour_array, size=(dilation, dilation))

    value_array = array(mapped_image_values).reshape(
        (image.height, image.width)).astype('uint8')
    img = Image.fromarray(value_array, 'L')

    median_image = img.filter(ImageFilter.MedianFilter(median_size))

    median_data = array(median_image.getdata()).reshape(
        (image.height, image.width)).astype('uint8')
    high_freq_mask = [contour_array > 100]
    median_data[high_freq_mask] = value_array[high_freq_mask]

    return median_data.flatten().tolist()
Example #38
0
def nonmaxsup(im, radius, threshold):
    '''
    Non-max Suppression Algorithm.  Returns an image with pixels that
    are not maximal within a square neighborhood zeroed out.
    '''

    # Normalize the image & threshold
    im = im / im.max()
    im[np.nonzero(im < threshold)] = 0

    # Extract local maxima by performing a grey scale morphological
    # dilation and then finding points in the corner strength image that
    # match the dilated image and are also greater than the threshold.
    from scipy.ndimage import morphology
    num_dimensions = len(im.shape)
    neighborhood_size = radius * np.ones(num_dimensions)

    mx = morphology.grey_dilation(im,
                                  footprint=np.ones(neighborhood_size),
                                  mode='constant',
                                  cval=0)
    return im * (im >= mx)
Example #39
0
def part_two_visualized(x):
    '''Visualization'''
    import numpy as np 
    import matplotlib.pyplot as plt
    from scipy.ndimage.morphology import grey_dilation

    a,b = int(x[0]),int(x[1])
    r = range(a,b+1)
    filtered = filter(is_sequential,r)
    filtered2 = filter(is_special,filtered)
    filtered2 = list(filtered2)
    dat = np.zeros((10,(b-a)//10+1),dtype=np.int8)
    print(a,b,dat.shape)
    for e in filtered2:
        try:
            dat[e%10,(e-a)//10] = 1
        except IndexError as x:
            print(e)
            raise x
    plt.figure()
    plt.imshow(grey_dilation(dat,size=(3,1000)),cmap='viridis',aspect='auto')
    plt.show()
Example #40
0
    def _get_brodmann_area(self):
        nii = nb.load(self.inputs.atlas)
        origdata = nii.get_data()
        newdata = np.zeros(origdata.shape)

        if not isinstance(self.inputs.labels, list):
            labels = [self.inputs.labels]
        else:
            labels = self.inputs.labels
        for lab in labels:
            newdata[origdata == lab] = 1
        if self.inputs.hemi == 'right':
            newdata[floor(float(origdata.shape[0]) / 2):, :, :] = 0
        elif self.inputs.hemi == 'left':
            newdata[:ceil(float(origdata.shape[0]) / 2), :, : ] = 0

        if self.inputs.dilation_size != 0:
            newdata = grey_dilation(newdata , (2 * self.inputs.dilation_size + 1,
                                               2 * self.inputs.dilation_size + 1,
                                               2 * self.inputs.dilation_size + 1))

        return nb.Nifti1Image(newdata, nii.get_affine(), nii.get_header())
def generate_random_missing_phases(X, missing_ratio, width=1):
    """

    Parameters
    ----------
    X : nd-array, complex
        Array to be masked
    missing_ratio : float
        Ratio of missing phases, in [0, 1]
    width : int
        width of the holes

    Returns
    -------
    B : nd-array
        Modified array, with same shape as X, with B[M]=X[M] unchanged and
        B[~M] = abs(X[~M])
    M : nd-array, bool
        Mask, as an array with same shape as X, with False values for masked
        phases and True values for unchanged coefficients
    """
    nb_miss = int(np.round(missing_ratio * X.size))

    M0 = np.zeros(X.shape)
    ind_miss = np.random.permutation(M0.size)[:nb_miss]
    M0.flat[ind_miss] = np.arange(nb_miss) + nb_miss
    for i in range((width - 1) // 2):
        M0 = grey_dilation(M0, footprint=[[0, 1, 0], [1, 1, 1], [0, 1, 0]])
    ind_sort = np.argsort(M0.flat)
    M0.flat[ind_sort[:-nb_miss]] = 0

    M = np.ones(X.shape, dtype=bool)
    M[np.nonzero(M0)] = False

    B = X.copy()
    B[~M] = np.abs(B[~M])

    return B, M
Example #42
0
def saveImg(Npx, pxlen, Npart, R_median, R_std, R_mu, R_sigma, R_tip, h,
            N_sample, **kwargs):
    note = kwargs.get('note', None)
    if not os.path.exists('images/Npart=' + str(Npart) + note):
        os.makedirs('images/Npart=' + str(Npart) + note)
    else:
        shutil.rmtree('images/Npart=' + str(Npart) + note)
        os.makedirs('images/Npart=' + str(Npart) + note)
    for i in range(1, N_sample + 1):
        print('i=' + str(i))
        z = mf.genFlat(Npx)
        z, trash = mf.genLogNormSolidSph(z, pxlen, Npart, R_mu, R_sigma)
        mf.plotfalsecol(z, pxlen)
        plt.savefig('images/Npart=' + str(Npart) + note + '/r=0' + '_' +
                    str(i) + '.png')
        np.savetxt('images/Npart=' + str(Npart) + note + '/r=0' + '_' +
                   str(i) + '.txt',
                   z,
                   header='Rmedian=' + str(R_median) + ', Rstd=' + str(R_std) +
                   ', mu=' + str(R_mu) + ', sigma=' + str(R_sigma) +
                   ', Npxl=' + str(Npx) + ', pxlen=' + str(pxlen) + ', h=' +
                   str(h))
        plt.close('all')
        for r in R_tip:
            print('r=' + str(r))
            tip = mf.genParabolicTip(pxlen, h, r=r)
            img = mph.grey_dilation(z, structure=-tip)
            np.savetxt('images/Npart=' + str(Npart) + note + '/r=' + str(r) +
                       '_' + str(i) + '.txt',
                       img,
                       header='Rmedian=' + str(R_median) + ', Rstd=' +
                       str(R_std) + ', mu=' + str(R_mu) + ', sigma=' +
                       str(R_sigma) + ', Npxl=' + str(Npx) + ', pxlen=' +
                       str(pxlen) + ', h=' + str(h))
            mf.plotfalsecol(img, pxlen)
            plt.savefig('images/Npart=' + str(Npart) + note + '/r=' + str(r) +
                        '_' + str(i) + '.png')
            plt.close('all')
Example #43
0
def paramDepend(Npx, pxlen, rmin, rmax, N_part_min, N_part_max,
                tipType, h, aspectratio_min, aspectratio_max, aspectratio_step,
                N_sample, calcParam, y_label):
    
    z = genFlat(Npx)
    N_part = np.arange(N_part_min, N_part_max+1, 1)
    aspectratio = np.linspace(aspectratio_min, aspectratio_max, aspectratio_step)
    
    plt.figure()
    plt_colors = [np.random.random(3) for _ in range(len(aspectratio) + 1)] # +1 per la superficie stessa
    
    for i in range(N_sample):
        
        z_param = []
        img_param = []
        
        for N in N_part:
            print('N = ', N)
            z_N = genUnifIsolSph(z,pxlen,N,rmin,rmax)
            z_param.append(calcParam(z_N*pxlen))
                
            for ar in aspectratio:
                print('ar = ', ar)
                tip_ar = tipType(pxlen,h,ar)
                img_ar = mph.grey_dilation(z_N, structure=-tip_ar)
                img_param.append(calcParam(img_ar*pxlen)) 
        
        plt_label = 'surface' if i==0 else '' # visualizza label solo una volta
        plt.plot(N_part, z_param, marker='.', color=plt_colors[-1], label=plt_label)
        for j in range(len(aspectratio)):
            plt_label = 'a.r. = '+str(aspectratio[j])  if i==0 else '' # visualizza label solo una volta
            plt.plot(N_part, img_param[j::len(aspectratio)], marker='.', color=plt_colors[j], label = plt_label)
        
    plt.xlabel(r'$N_{part}$')
    plt.ylabel(y_label)
    plt.grid()   
    plt.legend()
    plt.tight_layout()
def overlay_images(dir1, dir2, out_dir, ftype1='.png',ftype2='.png'):
    '''
    Given two directories full of images, load one image from each directory and 
    overlay it on the other with the specified tint and color

    Parameters
    ----------

    dir1 : str
        Path to the directory of images that will be used as the background

    dir2 : str
        Path to the directory of images that will be tinted and overlaid

    out_dir : str
        Path to the directory at which output will be saved

    '''
  

    bg_ims = glob.glob(dir1+'/*'+ftype1)
    fg_ims = glob.glob(dir2+'/*'+ftype2)
    bg_color = (153/255., 204/255., 255/255.)
    fg_color = (255/255., 204/255., 102/255.)

    if len(bg_ims) != len(bg_ims):
        warnings.warn("The two image directories contain different numbers of images.")
    
    for ind, bg_im in enumerate(bg_ims):
        
        fg_im = fg_ims[ind]
        im1 = imread2(bg_im)
        if len(im1.shape)==3:
            im1 = sum(im1, axis=2)/3.

        im2 = imread2(fg_im)
        if len(im2.shape)==3:
            im2 = sum(im2, axis=2)/3.


        im2_norm = im2.astype(double)/255.
        im2_mask = im2_norm < .2
        
        im2_norm[im2_mask] = 0.0
        just_bigger_sizes = im2_norm*im1
        just_bigger_sizes = grey_dilation(just_bigger_sizes, size=(2,2))
        
        im2_norm = im2.astype(double)/255.
#         im2_mask = im2_norm > .2
        im2_norm[~im2_mask] = 0.0
        just_smaller_sizes = im2_norm*im1
        
        just_smaller_sizes = grey_dilation(just_smaller_sizes, size=(2,2))
        
        if ind==0:
            norm_factor1 = max(ravel(just_smaller_sizes)) 
            norm_factor2 = max(ravel(just_bigger_sizes)) 
        just_smaller_sizes = (just_smaller_sizes.astype(double)/norm_factor1)*255
        just_bigger_sizes = (just_bigger_sizes.astype(double)/norm_factor2)*255
        
        
        rgb_bg = concatenate([(just_smaller_sizes*chan)[...,newaxis] for chan in bg_color],axis=-1)
        
        rgb_img = concatenate([(just_bigger_sizes*chan)[...,newaxis] for chan in fg_color],axis=-1)
        
        finim = rgb_bg + rgb_img

        bg_name = os.path.split(bg_im)[-1][:-4]
        fg_name = os.path.split(fg_im)[-1][:-4]
        savestr = out_dir +'/'+bg_name+'_times_'+fg_name+'.png'

        if ind==0:
            cmax0=max(ravel(finim))
        toimage(finim, cmin=0.0, cmax=cmax0).save(savestr)
Example #45
0
    def S1C1(self, Y_data_f):
        """
        Y_data_f must be a 2D data
        """
        c1_list = []
        height, width = Y_data_f.shape
        
        rot_num = len(self.gabor_thetas)

        s1 = NM.empty((self.band_num, 
                       self.scale_num_in_band,
                       rot_num,
                       height,
                       width), dtype=NM.float64)

        c1 = NM.empty((self.band_num, 
                       rot_num,
                       height,
                       width), dtype=NM.float64)
        
        ### Compute the Normalize factor
        Y_data_f_2 = NM.power(Y_data_f, 2)       
        
        
        ### Compute S1
        for idx_band in xrange(self.band_num):
            for idx_scale in xrange(self.scale_num_in_band):          
      
                idx = idx_band*self.scale_num_in_band + idx_scale
                filter_size = self.filter_sizes[idx]
                gabor_sigma = self.gabor_sigmas[idx]
                gabor_lambda = self.gabor_lambdas[idx]
                ### TODO -- avoid divide 0
                factor = scipy_f.convolve(Y_data_f_2, 
                                         NM.ones((filter_size, filter_size)), 
                                         mode="constant")
                factor = NM.power(factor, 0.5)

                for idx_r in xrange(rot_num):                   
                    theta = self.gabor_thetas[idx_r]
                    gabor_filter=self.GetGaborFilter(filter_size,  
                                                     theta,
                                                     gabor_sigma,
                                                     gabor_lambda,
                                                     self.gabor_gamma)
                    temp = NM.fabs(scipy_f.correlate(Y_data_f, 
                                                     gabor_filter, 
                                                     mode='constant'))
                    self.RemoveBorder(temp, filter_size)
                    NM.divide(temp, factor, temp)
                    s1[idx_band, idx_scale, idx_r,:,:] = temp
                    del temp

        ### Compute C1
        ### pool over scales within band
        for idx_band in xrange(self.band_num):
            for idx_r in xrange(rot_num):   
                T = s1[idx_band, 0, idx_r,:,:]
                for idx_scale in xrange(1, self.scale_num_in_band): 
                    T = NM.maximum(s1[idx_band, idx_scale, idx_r,:,:], T)
                c1[idx_band, idx_r] = T

        ### pool over local neighborhood
        for idx_band in xrange(self.band_num):
            grid_size = self.pool_grids[idx_band]
            gap = grid_size/2
            grid_size = grid_size*2-1
            for idx_r in xrange(rot_num):   
                t = c1[idx_band, idx_r]
                c1[idx_band, idx_r] = scipy_morp.grey_dilation(t, 
                                                               size=grid_size, 
                                                               mode='constant') 
                t = c1[idx_band, idx_r, 0::gap, 0::gap]
                c1_list.append(t)
                        
        del s1
        del c1
        ### subSample            
        return c1_list
Example #46
0
 def gray_dilation(self, *args, **kw):
     '''see scipy.ndimage.morphology.grey_dilation'''
     return Image(_morphology.grey_dilation(self, *args, **kw)).convert_type(self.dtype)
Example #47
0
def determine_search_location(A, dims, method='ellipse', min_size=3, max_size=8, dist=3,
                              expandCore=iterate_structure(generate_binary_structure(2, 1), 2).astype(int), dview=None):
    """
    compute the indices of the distance from the cm to search for the spatial component

    does it by following an ellipse from the cm or doing a step by step dilatation around the cm


    Parameters:
    ----------
    [parsed]
     cm[i]:
        center of mass of each neuron

     A[:, i]: the A of each components

     dims:
        the dimension of each A's ( same usually )

     dist:
        computed distance matrix

     dims: [optional] tuple
                x, y[, z] movie dimensions

    method: [optional] string
            method used to expand the search for pixels 'ellipse' or 'dilate'

    expandCore: [optional]  scipy.ndimage.morphology
            if method is dilate this represents the kernel used for expansion

    min_size: [optional] int

    max_size: [optional] int

    dist: [optional] int

    dims: [optional] tuple
             x, y[, z] movie dimensions

    Returns:
    --------
    dist_indicator: np.ndarray
        distance from the cm to search for the spatial footprint

    Raise:
    -------
    Exception('You cannot pass empty (all zeros) components!')
    """

    from scipy.ndimage.morphology import grey_dilation

    # we initialize the values
    if len(dims) == 2:
        d1, d2 = dims
    elif len(dims) == 3:
        d1, d2, d3 = dims
    d, nr = np.shape(A)
    A = csc_matrix(A)
    dist_indicator = scipy.sparse.csc_matrix((d, nr),dtype= np.float32)

    if method == 'ellipse':
        Coor = dict()
        # we create a matrix of size A.x of each pixel coordinate in A.y and inverse
        if len(dims) == 2:
            Coor['x'] = np.kron(np.ones(d2), list(range(d1)))
            Coor['y'] = np.kron(list(range(d2)), np.ones(d1))
        elif len(dims) == 3:
            Coor['x'] = np.kron(np.ones(d3 * d2), list(range(d1)))
            Coor['y'] = np.kron(
                np.kron(np.ones(d3), list(range(d2))), np.ones(d1))
            Coor['z'] = np.kron(list(range(d3)), np.ones(d2 * d1))
        if not dist == np.inf:  # determine search area for each neuron
            cm = np.zeros((nr, len(dims)))  # vector for center of mass
            Vr = []  # cell(nr,1);
            dist_indicator = []
            pars = []
            # for each dim
            for i, c in enumerate(['x', 'y', 'z'][:len(dims)]):
                # mass center in this dim = (coor*A)/sum(A)
                cm[:, i] = old_div(
                    np.dot(Coor[c], A[:, :nr].todense()), A[:, :nr].sum(axis=0))

            # parrallelizing process of the construct ellipse function
            for i in range(nr):
                pars.append([Coor, cm[i], A[:, i], Vr, dims,
                             dist, max_size, min_size, d])
            if dview is None:
                res = list(map(construct_ellipse_parallel, pars))
            else:
                if 'multiprocessing' in str(type(dview)):
                    res = dview.map_async(
                        construct_ellipse_parallel, pars).get(4294967)
                else:
                    res = dview.map_sync(construct_ellipse_parallel, pars)
            for r in res:
                dist_indicator.append(r)

            dist_indicator = (np.asarray(dist_indicator)).squeeze().T

        else:
            raise Exception('Not implemented')
            dist_indicator = True * np.ones((d, nr))

    elif method == 'dilate':
        for i in range(nr):
            A_temp = np.reshape(A[:, i].toarray(), dims[::-1])
            if len(expandCore) > 0:
                if len(expandCore.shape) < len(dims):  # default for 3D
                    expandCore = iterate_structure(
                        generate_binary_structure(len(dims), 1), 2).astype(int)
                A_temp = grey_dilation(A_temp, footprint=expandCore)
            else:
                A_temp = grey_dilation(A_temp, [1] * len(dims))

            dist_indicator[:, i] = scipy.sparse.coo_matrix(np.squeeze(np.reshape(A_temp, (d, 1)))[:,None] > 0)
    else:
        raise Exception('Not implemented')
        dist_indicator = True * np.ones((d, nr))

    return dist_indicator
Example #48
0
def determine_search_location(A, dims, method='ellipse', min_size=3, max_size=8, dist=3,
                              expandCore=iterate_structure(generate_binary_structure(2, 1), 2).astype(int), dview=None):
    """
    restrict search location to subset of pixels

    TODO
    """
    from scipy.ndimage.morphology import grey_dilation
    from scipy.sparse import coo_matrix, issparse

    if len(dims) == 2:
        d1, d2 = dims
    elif len(dims) == 3:
        d1, d2, d3 = dims

    d, nr = np.shape(A)

    A = csc_matrix(A)

    IND = False * np.ones((d, nr))
    if method == 'ellipse':
        Coor = dict()
        if len(dims) == 2:
            Coor['x'] = np.kron(np.ones(d2), list(range(d1)))
            Coor['y'] = np.kron(list(range(d2)), np.ones(d1))
        elif len(dims) == 3:
            Coor['x'] = np.kron(np.ones(d3 * d2), list(range(d1)))
            Coor['y'] = np.kron(np.kron(np.ones(d3), list(range(d2))), np.ones(d1))
            Coor['z'] = np.kron(list(range(d3)), np.ones(d2 * d1))
        if not dist == np.inf:             # determine search area for each neuron
            cm = np.zeros((nr, len(dims)))        # vector for center of mass
            Vr = []    # cell(nr,1);
            IND = []       # indicator for distance

            for i, c in enumerate(['x', 'y', 'z'][:len(dims)]):
                cm[:, i] = old_div(np.dot(Coor[c], A[:, :nr].todense()), A[:, :nr].sum(axis=0))

#            for i in range(nr):            # calculation of variance for each component and construction of ellipses
#                dist_cm = coo_matrix(np.hstack([Coor[c].reshape(-1, 1) - cm[i, k]
#                                                for k, c in enumerate(['x', 'y', 'z'][:len(dims)])]))
#                Vr.append(dist_cm.T * spdiags(A[:, i].toarray().squeeze(),
#                                              0, d, d) * dist_cm / A[:, i].sum(axis=0))
#
#                if np.sum(np.isnan(Vr)) > 0:
#                    raise Exception('You cannot pass empty (all zeros) components!')
#
#                D, V = eig(Vr[-1])
#
#                dkk = [np.min((max_size**2, np.max((min_size**2, dd.real)))) for dd in D]
#
#                # search indexes for each component
#                IND.append(np.sqrt(np.sum([(dist_cm * V[:, k])**2 / dkk[k]
#                                           for k in range(len(dkk))], 0)) <= dist)
#            IND = (np.asarray(IND)).squeeze().T
            pars = []
            for i in range(nr):
                pars.append([Coor, cm[i], A[:, i], Vr, dims, dist, max_size, min_size, d])

            if dview is None:
                res = list(map(contruct_ellipse_parallel, pars))
            else:
                res = dview.map_sync(contruct_ellipse_parallel, pars)

            for r in res:
                IND.append(r)

            IND = (np.asarray(IND)).squeeze().T

        else:
            IND = True * np.ones((d, nr))
    elif method == 'dilate':
        for i in range(nr):
            A_temp = np.reshape(A[:, i].toarray(), dims[::-1])  # , order='F')
            # A_temp = np.reshape(A[:, i].toarray(), (d2, d1))
            if len(expandCore) > 0:
                if len(expandCore.shape) < len(dims):  # default for 3D
                    expandCore = iterate_structure(
                        generate_binary_structure(len(dims), 1), 2).astype(int)
                A_temp = grey_dilation(A_temp, footprint=expandCore)
            else:
                A_temp = grey_dilation(A_temp, [1] * len(dims))

            IND[:, i] = np.squeeze(np.reshape(A_temp, (d, 1))) > 0
    else:
        IND = True * np.ones((d, nr))

    return IND
Example #49
0
def grow_mask(input, npix  ) :
  output =  morph.grey_dilation(input,  footprint=np.ones([npix,npix]),
                                structure=np.zeros([npix,npix]))
  return output