def measure(self, position_root, timepoint, annotations, before, after):
        print(f'Measuring position {position_root.name} - {timepoint}')
        measures = {}
        derived_root = position_root.parent / 'derived_data'

        mask = self.get_mask(position_root, derived_root, timepoint,
                             annotations)
        if mask is None:
            return [numpy.nan] * len(self.feature_names)

        measures['mask_area'] = mask.sum() * self.microns_per_pixel**2

        moments = ski_measure.moments(mask, order=1)
        centroid = numpy.array(
            [moments[1, 0] / moments[0, 0], moments[0, 1] / moments[0, 0]])

        centroid_distances = []
        for adjacent in (before, after):
            if adjacent is not None:
                adj_mask = self.get_mask(position_root, derived_root,
                                         adjacent['timepoint'], annotations)
                if adj_mask is None:
                    centroid_distances.append(numpy.nan)
                    break
                adj_moments = ski_measure.moments(adj_mask, order=1)
                adj_centroid = numpy.array([
                    adj_moments[1, 0] / adj_moments[0, 0],
                    adj_moments[0, 1] / adj_moments[0, 0]
                ])
                adj_dist = ((centroid - adj_centroid)**2).sum()**0.5
                centroid_distances.append(adj_dist)
        measures['mask_centroid_dist'] = numpy.sum(
            centroid_distances) * self.microns_per_pixel
        return [measures[feature] for feature in self.feature_names]
Esempio n. 2
0
def test_scale_phosphene():
    img = np.zeros((200, 200), dtype=np.double)
    img[90:110, 70:90] = 1
    img_area = skim.moments(img, order=0)
    for scale in [0.9, 1, 1.5, 2, 4]:
        scaled = imgproc.scale_phosphene(img, scale)
        scaled_area = skim.moments(scaled, order=0)
        npt.assert_almost_equal(scaled_area, img_area * scale**2)
Esempio n. 3
0
def transform_rot(image):
    
    # Need black background (0 boundary condition) 
    # for rotational transform
    # Thats why Sobel or inverse transformation here
    #image_ref = filters.sobel(image)
    image_ref =  1.0 - image
   
    # Center of mass to be used as rotation center
    m = moments(image_ref,order=1)    
    cx = m[1, 0]/m[0, 0]
    cy = m[0, 1]/m[0, 0]
    com = cx, cy
   
    # This next step is perfect in the math but the rotation angle
    # it generates varies drastically with changes in the watch image
    # thus its not robust enough for universal alignment.
    # Therefore we add an extra rotation step after it.
    # Ascertaining rotation angle from FFT transform
    ind1 = np.arange(image.shape[0],dtype=float)
    ind2 = np.arange(image.shape[1],dtype=float)[:,None]
    angle = \
    np.angle(ind1-com[0]+1j*(ind2-com[1]))
    exp_theta = np.exp(1j*angle)
    angle_rot = np.angle(np.sum(np.sum(image_ref*exp_theta,axis=1)),deg=True)
    # Creating temporary rotated version of input image 
    image_rot_aux = \
    transform.rotate(image,angle_rot,resize=False,center=com,mode='nearest')

    # Second rotation step based on Inertia tensor
    # Again need 0 boundary condition away from object and
    # thus Sobel or inverse transform
    #image_ref = filters.sobel(image_rot_aux)
    image_ref =  1.0 - image_rot_aux

    m = moments(image_ref,order=2)
    Ixx = m[2, 0]/m[0, 0] - np.power(cx,2)
    Iyy = m[0, 2]/m[0, 0] - np.power(cy,2)
    Ixy = m[1, 1]/m[0, 0] - cx*cy
    inertia = [[Ixx, Ixy],[Ixy, Iyy]]
    w, v = np.linalg.eig(inertia)
    idx = w.argsort()[::-1]   
    w = w[idx]
    v = v[:,idx]
    cross = np.cross(v[:,0],v[:,1])
    # Ensuring eigenvectors satisfy right-hand rule
    if (cross < 0):
       v[:,1] *= -1
    
    # Ascertaining rotation angle from inertia tensor eigenvectors
    angle_rad = np.arctan2(v[1,0],v[0,0]) + np.pi/2
    angle_rot = np.degrees(angle_rad)
    
    # Creating final rotated version of input image
    image_rot = \
    transform.rotate(image_rot_aux,angle_rot,resize=False,center=com,mode='nearest')

    return image_rot
Esempio n. 4
0
def modified_hausdorff(image1, image2, metric=cv2.NORM_L2):
    ''' Compute the Modified Hausdorff distance. '''

    # Align center with centroids
    h1, w1 = image1.shape
    h2, w2 = image2.shape
    m1 = measure.moments(image1, order=1)
    m2 = measure.moments(image2, order=1)
    xc1, yc1 = int(m1[1, 0] / m1[0, 0]), int(m1[0, 1] / m1[0, 0])
    xc2, yc2 = int(m2[1, 0] / m2[0, 0]), int(m2[0, 1] / m2[0, 0])
    dx1, dy1 = (xc1 - w1 / 2), (yc1 - h1 / 2)
    dx2, dy2 = (xc2 - w2 / 2), (yc2 - h2 / 2)

    # Contour extraction
    _, contours1, hierarchy1 = cv2.findContours(image1.copy(), cv2.RETR_CCOMP,
                                                cv2.CHAIN_APPROX_SIMPLE)
    _, contours2, hierarchy2 = cv2.findContours(image2.copy(), cv2.RETR_CCOMP,
                                                cv2.CHAIN_APPROX_SIMPLE)

    # Contours drawing
    padded1 = np.zeros_like(image1)
    padded2 = np.zeros_like(image2)
    idx = 0
    while idx >= 0:
        padded1 = cv2.drawContours(padded1, contours1, idx, (255, 255, 255))
        idx = hierarchy1[0][idx][0]
    idx = 0
    while idx >= 0:
        padded2 = cv2.drawContours(padded2, contours2, idx, (255, 255, 255))
        idx = hierarchy2[0][idx][0]

    # Padding
    padded1 = np.pad(padded1, ((max(0, -dy1), max(0, dy1)),
                               (max(0, -dx1), max(0, dx1))),
                     mode='constant',
                     constant_values=0)
    padded2 = np.pad(padded2, ((max(0, -dy2), max(0, dy2)),
                               (max(0, -dx2), max(0, dx2))),
                     mode='constant',
                     constant_values=0)

    # Distance computations
    h1, w1 = padded1.shape
    h2, w2 = padded2.shape
    h, w = max(h1, h2) + 2, max(w1, w2) + 2
    dx1, dy1 = (w - w1) / 2, (h - h1) / 2
    dx2, dy2 = (w - w2) / 2, (h - h2) / 2
    base1 = np.zeros((h, w), dtype=np.uint8)
    base2 = np.zeros((h, w), dtype=np.uint8)
    base1[dy1:dy1 + h1, dx1:dx1 + w1] = padded1
    base2[dy2:dy2 + h2, dx2:dx2 + w2] = padded2
    dist1 = cv2.distanceTransform(255 - base1, metric, cv2.DIST_MASK_PRECISE)
    dist2 = cv2.distanceTransform(255 - base2, metric, cv2.DIST_MASK_PRECISE)
    h12 = dist1[base2 == 255].mean()
    h21 = dist2[base1 == 255].mean()

    return max(h12, h21)
Esempio n. 5
0
def test_rotation():
    (trainX, trainY), (testX, testY) = mnist.load_data()
    fig, ax = plt.subplots(4, 10, figsize=(16, 10))
    index = 10
    for i in range(index, index + 10):
        M_norot = moments(trainX[i], order=1)
        trainX[i] = rotate(trainX[i], 90, order=1, reshape=False)
        M_rot = moments(trainX[i], order=1)

        print("avant", M_norot[1, 0] / M_norot[0, 0],
              M_norot[0, 1] / M_norot[0, 0])
        print("apres", M_rot[1, 0] / M_rot[0, 0], M_rot[0, 1] / M_rot[0, 0])
        ax[0][i - index].imshow(trainX[i])

    plt.show()
Esempio n. 6
0
def centroid_com(data, data_mask=None):
    """
    Calculate the centroid of an array as its center of mass determined
    from image moments.

    Parameters
    ----------
    data : array_like
        The image data.

    data_mask : array_like, bool, optional
        A boolean mask with the same shape as `data`, where a `True`
        value indicates the corresponding element of `data` is invalid.

    Returns
    -------
    centroid : tuple
        (x, y) coordinates of the centroid.
    """
    from skimage.measure import moments

    if data_mask is not None:
        if data.shape != data_mask.shape:
            raise ValueError('data and data_mask must have the same shape')
        data[data_mask] = 0.

    m = moments(data, 1)
    xcen = m[1, 0] / m[0, 0]
    ycen = m[0, 1] / m[0, 0]
    return xcen, ycen
Esempio n. 7
0
 def huMoment(self, img):
     h = 1 - img
     m = moments(h)
     cr = m[0, 1] / m[0, 0]
     cc = m[1, 0] / m[0, 0]
     mu = moments_central(h, cr, cc)
     return mu
Esempio n. 8
0
def fitOnImageEllipse(data):
    '''
    Returns the length of the long and short axis and the angle measure
    of the long axis to the horizontal of the best fit ellipsebased on
    image moments.

    usage: longAxis, shortAxis, angle = fitEllipse(N_by_M_image_as_array)
    '''
    # source:
    #     Kieran F. Mulchrone, Kingshuk Roy Choudhury,
    # Fitting an ellipse to an arbitrary shape:
    # implications for strain analysis, Journal of
    # Structural Geology, Volume 26, Issue 1,
    # January 2004, Pages 143-153, ISSN 0191-8141,
    # <http://dx.doi.org/10.1016/S0191-8141(03)00093-2.>
    #     Lourena Rocha, Luiz Velho, Paulo Cezar P. Carvalho
    # Image Moments-Based Structuring and Tracking of
    # Objects, IMPA-Instituto Nacional de Matematica Pura
    # e Aplicada. Estrada Dona Castorina, 110, 22460
    # Rio de Janeiro, RJ, Brasil,
    # <http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=1167130>

    m = moments(data, 2)  # super fast compated to anything in pure python
    xc = m[1, 0] / m[0, 0]
    yc = m[0, 1] / m[0, 0]
    a = (m[2, 0] / m[0, 0]) - (xc**2)
    b = 2 * ((m[1, 1] / m[0, 0]) - (xc * yc))
    c = (m[0, 2] / m[0, 0]) - (yc**2)
    theta = .5 * (np.arctan2(b, (a - c)))
    w = np.sqrt(6 * (a + c - np.sqrt(b**2 + (a - c)**2)))
    l = np.sqrt(6 * (a + c + np.sqrt(b**2 + (a - c)**2)))
    return l, w, theta
Esempio n. 9
0
def centroid_com(data, mask=None):
    """
    Calculate the centroid of a 2D array as its center of mass
    determined from image moments.

    Parameters
    ----------
    data : array_like or `~astropy.nddata.NDData`
        The 2D array of the image.

    mask : array_like, bool, optional
        A boolean mask with the same shape as ``data``, where a `True`
        value indicates the corresponding element of ``data`` is
        invalid.  If ``mask`` is input it will override ``data.mask``
        for `~astropy.nddata.NDData` inputs.

    Returns
    -------
    xcen, ycen : tuple of floats
        (x, y) coordinates of the centroid.
    """

    from skimage.measure import moments
    data = _convert_image(data, mask=mask)
    m = moments(data, 1)
    xcen = m[1, 0] / m[0, 0]
    ycen = m[0, 1] / m[0, 0]
    return xcen, ycen
Esempio n. 10
0
def centroid_com(data, mask=None):
    """
    Calculate the centroid of a 2D array as its center of mass
    determined from image moments.

    Parameters
    ----------
    data : array_like or `~astropy.nddata.NDData`
        The 2D array of the image.

    mask : array_like, bool, optional
        A boolean mask with the same shape as ``data``, where a `True`
        value indicates the corresponding element of ``data`` is
        invalid.  If ``mask`` is input it will override ``data.mask``
        for `~astropy.nddata.NDData` inputs.

    Returns
    -------
    xcen, ycen : tuple of floats
        (x, y) coordinates of the centroid.
    """

    from skimage.measure import moments
    data = _convert_image(data, mask=mask)
    m = moments(data, 1)
    xcen = m[1, 0] / m[0, 0]
    ycen = m[0, 1] / m[0, 0]
    return xcen, ycen
Esempio n. 11
0
def fitOnImageEllipse(data):
    '''
    Returns the length of the long and short axis and the angle measure
    of the long axis to the horizontal of the best fit ellipsebased on
    image moments.

    usage: longAxis, shortAxis, angle = fitEllipse(N_by_M_image_as_array)
    '''
    # source:
    #     Kieran F. Mulchrone, Kingshuk Roy Choudhury,
    # Fitting an ellipse to an arbitrary shape:
    # implications for strain analysis, Journal of
    # Structural Geology, Volume 26, Issue 1,
    # January 2004, Pages 143-153, ISSN 0191-8141,
    # <http://dx.doi.org/10.1016/S0191-8141(03)00093-2.>
    #     Lourena Rocha, Luiz Velho, Paulo Cezar P. Carvalho
    # Image Moments-Based Structuring and Tracking of
    # Objects, IMPA-Instituto Nacional de Matematica Pura
    # e Aplicada. Estrada Dona Castorina, 110, 22460
    # Rio de Janeiro, RJ, Brasil,
    # <http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=1167130>

    m = moments(data, 2) # super fast compated to anything in pure python
    xc = m[1,0] / m[0,0]
    yc = m[0,1] / m[0,0]
    a = (m[2,0] / m[0,0]) - (xc**2)
    b = 2 * ((m[1,1] / m[0,0]) - (xc * yc))
    c = (m[0,2] / m[0,0]) - (yc**2)
    theta = .5 * (np.arctan2(b, (a - c)))
    w = np.sqrt(6 * (a + c - np.sqrt(b**2 + (a-c)**2)))
    l = np.sqrt(6 * (a + c + np.sqrt(b**2 + (a-c)**2)))
    return l, w, theta
Esempio n. 12
0
def get_moments(image):
    ''' Image moments does not perform well in this scenario, I didn't 
    investigated the source
    '''
    order = 7
    imm = measure.moments(image, order=order)
    return imm
Esempio n. 13
0
def centroid_com(data, mask=None):
    """
    Calculate the centroid of a 2D array as its center of mass
    determined from image moments.

    Parameters
    ----------
    data : array_like
        The 2D array of the image.

    mask : array_like (bool), optional
        A boolean mask, with the same shape as ``data``, where a `True`
        value indicates the corresponding element of ``data`` is masked.

    Returns
    -------
    xcen, ycen : float
        (x, y) coordinates of the centroid.
    """

    from skimage.measure import moments
    data = _convert_image(data, mask=mask)
    m = moments(data, 1)
    xcen = m[1, 0] / m[0, 0]
    ycen = m[0, 1] / m[0, 0]
    return xcen, ycen
Esempio n. 14
0
def get_hu_moments(samples):
    print "getting hu moments..."
    features = []
    for sample in samples:
        '''
        sample = np.array(sample)
        th = 200
        img_binary = (sample < th).astype(np.double)
        img_label = label(img_binary, background=255)
        regions = regionprops(img_label)
        if regions == []:
            print "no regions"
        for props in regions:
            minr, minc, maxr, maxc = props.bbox
            roi = img_binary[minr:maxr, minc:maxc]

        '''
        sample = np.array(sample)
        sample = sample.astype(np.double)
        m = moments(sample)
        cr = m[0, 1] / m[0, 0]
        cc = m[1, 0] / m[0, 0]
        mu = moments_central(sample, cr, cc)
        nu = moments_normalized(mu)
        hu = moments_hu(nu)
        features.append(hu)
    return features
Esempio n. 15
0
    def calculateCentroid(self, mask, bbox):
        m = measure.moments(mask)
        c = np.array((m[0, 1] / m[0, 0], m[1, 0] / m[0, 0]))

        #centroid is (x, y) while measure returns (y,x and bbox is yx)
        self.centroid  = np.array((c[0] + bbox[1], c[1]+ bbox[0]))
        self.blob_name = "c-{:d}-{:.1f}x-{:.1f}y".format(self.id, self.centroid[0], self.centroid[1])
Esempio n. 16
0
File: lab1.py Progetto: Yak73/Dip
def decision(fruit1, avg_momnt1, avg_momnt2, type_sample="Testing"):
    """
    Decision which fruit fits each image and calculate error

    ARGS:
    fruit1 - name of 1st fruit
    avg_moment1 - average moment value of dataset[fruit1]
    avg_moment2 - average moment value of dataset[fruit2]
    type_sample - name of root folder (Training or Testing)
    RETURN VALUE:
    list with error for each image
    (If wrong decision: value error have minus)
    """
    os.chdir(type_sample + "/" + fruit1)
    img_list = os.listdir(path=os.getcwd())
    decision_list = []
    for img in img_list:
        image = io.imread(img)
        img_grayscale = color.rgb2gray(image)
        edges = filters.sobel(img_grayscale)  # FILTER
        cur_moment = float(moments(edges, order=0))
        err1 = abs(cur_moment - avg_momnt1)
        err2 = abs(cur_moment - avg_momnt2)
        if err1 < err2:
            decision_list.append(err1)
        else:
            decision_list.append(-1 * err2)
    os.chdir(os.getcwd() + "/../../")
    return decision_list
Esempio n. 17
0
def bscan_cut(bscans, onh_ybox=[-1, -1]):
    if onh_ybox != [-1, -1]:
        #either no coordinates were entered, or they weren't detected
        cut_scans = bscans[onh_ybox[0]:onh_ybox[1], :, :]
    else:
        cut_scans = bscans[120:136, :, :]
    avg_scans = np.mean(cut_scans, axis=0)
    #Email from Mayank to Eric on 8-15-2015 on Mayank and Robert's formula to resize bscans to 1 px**2
    #height = y_height*0.84
    #width = 1600 px
    height = np.round(avg_scans.shape[1] * 0.84)
    width = 1600
    scan = tf.resize(avg_scans, (height, width), order=3, mode="reflect")
    #drop to 8 bit, otherwise the averaging takes a very long time. Which seems strange...
    scan_s = scan.astype("uint8")
    scan_m = sf.rank.mean(scan_s, selem=mp.disk(50))
    #Worried there are issues with top hat. May smear in noise that becomes part of the center of mass computation.
    scan_b = mp.white_tophat(scan_m, selem=mp.square(500))
    m = sm.moments(scan_b, order=1)
    y = int(np.round(m[0, 1] / m[0, 0]))
    ymin = y - 300
    ymax = y + 300
    cut_scan = scan[ymin:ymax, :]

    return cut_scan.astype("float32")
Esempio n. 18
0
def centroid_com(data, mask=None):
    """
    Calculate the centroid of a 2D array as its center of mass
    determined from image moments.

    Parameters
    ----------
    data : array_like
        The 2D array of the image.

    mask : array_like (bool), optional
        A boolean mask, with the same shape as ``data``, where a `True`
        value indicates the corresponding element of ``data`` is masked.

    Returns
    -------
    xcen, ycen : float
        (x, y) coordinates of the centroid.
    """

    from skimage.measure import moments
    data = _convert_image(data, mask=mask)
    m = moments(data, 1)
    xcen = m[1, 0] / m[0, 0]
    ycen = m[0, 1] / m[0, 0]
    return xcen, ycen
Esempio n. 19
0
def get_hu_moment_from_image(image):
    """
    Compute the 7 Hu's moments from an image.
    This set of moments is proofed to be translation, scale and rotation invariant.

    Parameters
    ----------
    image: array-like
        a 2d array of double or uint8 corresponding to an image

    Returns
    -------
    (7, 1) array of double
        7 Hu's moments

    References
    ----------
    http://scikit-image.org/docs/dev/api/skimage.measure.html#skimage.measure.moments
    """
    order = 7
    raw_moments = moments(image, order=order)
    cr = raw_moments[0, 1] / raw_moments[0, 0]
    cc = raw_moments[1, 0] / raw_moments[0, 0]
    central_moments = moments_central(image, cr, cc, order=order)
    normalized_moments = moments_normalized(central_moments, order)
    hu_moments = moments_hu(normalized_moments)
    return hu_moments
def extract_features(roi, props):

    features = []

    m = moments(roi)
    # print(m)

    cr = m[0, 1] / m[0, 0]
    cc = m[1, 0] / m[0, 0]

    mu = moments_central(roi, (cr, cc))
    nu = moments_normalized(mu)

    #finding Seven Features
    hu = moments_hu(nu)

    # seven features to be put into feature list
    features.extend(hu)

    # print(features)

    features.append(roi.shape[1]/roi.shape[0])
    features.append(props.eccentricity)
    features.append(props.convex_area/props.area)
    features.append(props.orientation)
    features.append(props.euler_number)
    
    return np.array([features])
Esempio n. 21
0
def collect(path, mean, std):
    img = io.imread('./images/' + path + '.bmp')
    hist = exposure.histogram(img)
    th = get_threshold('./images/' + path + '.bmp')
    img_binary = (img < th).astype(np.double)
    img_label = label(img_binary, background=0)
    regions = regionprops(img_label)
    boxes = []
    features = []
    for props in regions:
        box = []
        minr, minc, maxr, maxc = props.bbox
        if maxc - minc < 10 or maxr - minr < 10 or maxc - minc > 120 or maxr - minr > 120:
            continue
        box.append(minr)
        box.append(maxr)
        box.append(minc)
        box.append(maxc)
        boxes.append(box)

        roi = img_binary[minr:maxr, minc:maxc]
        m = moments(roi)
        cr = m[0, 1] / m[0, 0]
        cc = m[1, 0] / m[0, 0]
        mu = moments_central(roi, cr, cc)
        nu = moments_normalized(mu)
        hu = moments_hu(nu)
        features.append(hu)

    feature_arr = normalize(features, mean, std)
    return (boxes, feature_arr)
Esempio n. 22
0
def extract_features(path, show, tag):
    img = io.imread('./images/' + path + '.bmp')
    hist = exposure.histogram(img)
    th = get_threshold('./images/' + path + '.bmp')
    img_binary = (img < th).astype(np.double)
    img_label = label(img_binary, background=0)

    # Show images
    if show == 1:
        io.imshow(img)
        plt.title('Original Image')
        io.show()

        plt.bar(hist[1], hist[0])
        plt.title('Histogram')
        plt.show()

        io.imshow(img_binary)
        plt.title('Binary Image')
        io.show()

        io.imshow(img_label)
        plt.title('Labeled Image')
        io.show()

    regions = regionprops(img_label)
    if show == 1:
        io.imshow(img_binary)
        ax = plt.gca()

    features = []

    for props in regions:
        minr, minc, maxr, maxc = props.bbox
        if maxc - minc < 10 or maxr - minr < 10 or maxc - minc > 120 or maxr - minr > 120:
            continue
        if show == 1:
            ax.add_patch(
                Rectangle((minc, minr),
                          maxc - minc,
                          maxr - minr,
                          fill=False,
                          edgecolor='red',
                          linewidth=1))
        roi = img_binary[minr:maxr, minc:maxc]
        m = moments(roi)
        cr = m[0, 1] / m[0, 0]
        cc = m[1, 0] / m[0, 0]
        mu = moments_central(roi, cr, cc)
        nu = moments_normalized(mu)
        hu = moments_hu(nu)
        features.append(hu)
        if (len(path) == 1):
            tag.append(ord(path))

    if show == 1:
        plt.title('Bounding Boxes')
        io.show()
    return features
Esempio n. 23
0
def test_moments(anisotropic):
    image = np.zeros((20, 20), dtype=np.float64)
    image[14, 14] = 1
    image[15, 15] = 1
    image[14, 15] = 0.5
    image[15, 14] = 0.5
    if anisotropic:
        spacing = (1.4, 2)
    else:
        spacing = (1, 1)
    if anisotropic is None:
        m = moments(image)
    else:
        m = moments(image, spacing=spacing)
    assert_equal(m[0, 0], 3)
    assert_almost_equal(m[1, 0] / m[0, 0], 14.5 * spacing[0])
    assert_almost_equal(m[0, 1] / m[0, 0], 14.5 * spacing[1])
Esempio n. 24
0
def MM_area(im1):
    import numpy as np
    from skimage.measure import moments

    I = np.array(im1, dtype=np.double)
    M = moments(I)
    area = M[0, 0]
    return area
Esempio n. 25
0
 def compute_hu_moments(i):
     b = cells_aligned_padded[i].astype(np.uint8)
     m = moments(b, order=1)
     hu = moments_hu(
         moments_normalized(
             moments_central(b, cc=m[0, 1] / m[0, 0],
                             cr=m[1, 0] / m[0, 0])))
     return hu
Esempio n. 26
0
    def calculateCentroid(self, mask):
        m = measure.moments(mask)
        c = np.array((m[0, 1] / m[0, 0], m[1, 0] / m[0, 0]))

        #centroid is (x, y) while measure returns (y,x and bbox is yx)
        self.centroid = np.array((c[1] + self.bbox[1], c[0] + self.bbox[0]))
        self.blob_name = "coral-" + str(self.centroid[0]) + "-" + str(
            self.centroid[1])
Esempio n. 27
0
def test_moments_coords():
    image = np.zeros((20, 20), dtype=np.double)
    image[13:17, 13:17] = 1
    mu_image = moments(image)

    coords = np.array([[r, c] for r in range(13, 17) for c in range(13, 17)],
                      dtype=np.double)
    mu_coords = moments_coords(coords)
    assert_almost_equal(mu_coords, mu_image)
Esempio n. 28
0
def test_moments_coords():
    image = np.zeros((20, 20), dtype=np.double)
    image[13:17, 13:17] = 1
    mu_image = moments(image)

    coords = np.array([[r, c] for r in range(13, 17)
                       for c in range(13, 17)], dtype=np.double)
    mu_coords = moments_coords(coords)
    assert_almost_equal(mu_coords, mu_image)
Esempio n. 29
0
def testKNN():
    trainFeatures, trainLebels = extractFeatures()
    knn = neighbors.KNeighborsClassifier()
    knn.fit(trainFeatures, trainLebels)
    #score = knn.score(trainFeatures, trainLebels)
    testNames = ['test1', 'test2']
    #testNames = ['test2']
    testFeatures = []
    testLabels = []
    testTruth = []
    correct = 0
    #textPosition = []
    for i in range(len(testNames)):
        classes, locations = readPkl(testNames[i])
        img = io.imread(testNames[i] + '.bmp')
        #testTruth = ['a']*7+['d']*7+['m']*7+['n']*7+['o']*7+['p']*7+['q']*7+['r']*7+['u']*7+['w']*7
        ret, binary = cv.threshold(img, 0, 255, cv.THRESH_BINARY | cv.THRESH_OTSU)
        #ret, binary = cv.threshold(img, 0, 255, cv.THRESH_BINARY | cv.THRESH_TRIANGLE)
        th = ret
        img_binary = (img < th).astype(np.double)
        img_dilation = morphology.binary_dilation(img_binary, selem=None)
        img_erosion = morphology.binary_erosion(img_binary, selem=None)
        img_label = label(img_binary, background=0)
        regions = regionprops(img_label)
        io.imshow(img_binary)
        ax = plt.gca()
        thresholdR = 15
        thresholdC = 15
        for props in regions:
            minr, minc, maxr, maxc = props.bbox
            # Computing Hu Moments and Removing Small Components
            if (maxr - minr) >= thresholdR and (maxc - minc) >= thresholdC:
                #textPosition.append((maxc, minr))
                roi = img_binary[minr:maxr, minc:maxc]
                m = moments(roi)
                cr = m[0, 1] / m[0, 0]
                cc = m[1, 0] / m[0, 0]
                mu = moments_central(roi, cr, cc)
                nu = moments_normalized(mu)
                hu = moments_hu(nu)
                testFeatures.append(hu)
                
                testLabels.append(knn.predict([testFeatures[-1]]))
                
                indexFix = locationFix(locations, minr, minc, maxr, maxc)
                if indexFix is not None:
                    if testLabels[-1] == classes[indexFix]:
                        correct += 1
                
                plt.text(maxc, minr, testLabels[-1][0], bbox=dict(facecolor='white', alpha=0.5))
                ax.add_patch(Rectangle((minc, minr), maxc - minc, maxr - minr, fill=False, edgecolor='red', linewidth=1))
        plt.title('Bounding Boxes')
        io.show()
    print correct, len(testLabels)
    correctRate = correct / len(testLabels)
    print correctRate
def extractFeature(name, showall, showbb, flag):

    (img, regions, ax, rthre, cthre) = extractImage(name, showall, showbb,
                                                    flag)

    Features = []
    boxes = []

    for props in regions:
        tmp = []
        minr, minc, maxr, maxc = props.bbox
        if maxc - minc < cthre or maxr - minr < rthre or maxc - minc > cthre * 9 or maxr - minr > rthre * 9:
            continue
        tmp.append(minr)
        tmp.append(minc)
        tmp.append(maxr)
        tmp.append(maxc)
        boxes.append(tmp)
        if showbb == 1:
            ax.add_patch(
                Rectangle((minc, minr),
                          maxc - minc,
                          maxr - minr,
                          fill=False,
                          edgecolor='red',
                          linewidth=1))
        # computing hu moments and removing small components
        roi = img[minr:maxr, minc:maxc]
        m = moments(roi)
        cr = m[0, 1] / m[0, 0]
        cc = m[1, 0] / m[0, 0]
        mu = moments_central(roi, cr, cc)
        nu = moments_normalized(mu)
        hu = moments_hu(nu)

        area = (maxr - minr) * (maxc - minc)
        # add convexity
        p = perimeter(img[minr:maxr, minc:maxc])
        con = (area / (p * p)) * 4 * math.pi
        convex = np.array([con])
        hu = np.concatenate((hu, convex))

        # add density
        den = area / float(props.convex_area)
        dense = np.array([den])
        hu = np.concatenate((hu, dense))

        Features.append(hu)

    # print boxes

    plt.title('Bounding Boxes')
    if showbb == 1:
        io.show()

    return Features, boxes,
Esempio n. 31
0
def test_moments():
    image = np.zeros((20, 20), dtype=np.double)
    image[14, 14] = 1
    image[15, 15] = 1
    image[14, 15] = 0.5
    image[15, 14] = 0.5
    m = moments(image)
    assert_equal(m[0, 0], 3)
    assert_almost_equal(m[1, 0] / m[0, 0], 14.5)
    assert_almost_equal(m[0, 1] / m[0, 0], 14.5)
Esempio n. 32
0
def get_inertia(mask, mu=None):
    """compute inertia tensor and eigenvalues from mask, if moments are give the function is much faster"""
    if mu is None:
        mu = measure.moments(mask)

    inertia_tensor = measure.inertia_tensor(mask, mu)
    inertia_eigen = measure.inertia_tensor_eigvals(mask,
                                                   mu=mu,
                                                   T=inertia_tensor)
    return inertia_tensor, inertia_eigen
Esempio n. 33
0
def find_centroid(pic):
    from skimage.measure import moments
    import numpy as np

    if len(pic.shape) > 2:
        pic = np.copy(pic).reshape(list(pic.shape)[:-1])
    M = moments(pic)
    centroid = (M[1, 0] / M[0, 0], M[0, 1] / M[0, 0])

    return centroid
Esempio n. 34
0
def test_moments():
    image = np.zeros((20, 20), dtype=np.double)
    image[14, 14] = 1
    image[15, 15] = 1
    image[14, 15] = 0.5
    image[15, 14] = 0.5
    m = moments(image)
    assert_equal(m[0, 0], 3)
    assert_almost_equal(m[0, 1] / m[0, 0], 14.5)
    assert_almost_equal(m[1, 0] / m[0, 0], 14.5)
Esempio n. 35
0
def getcentroid(filename, box):
    ia.open(filename)
    imgdataraw = ia.getregion()
    headerlist = ia.summary()
    ia.close()

    x0,x1,y0,y1 = box
    imgdata = np.squeeze(imgdataraw) #in units of Jy/beam

    xpix = imgdata.shape[0] #confirmed that imgdata is x,y
    ypix = imgdata.shape[1]

    xcen, ycen = headerlist['refpix'][0], headerlist['refpix'][1]
    RA0, DEC0 = headerlist['refval'][0], headerlist['refval'][1]
    deltaRA, deltaDEC = headerlist['incr'][0], headerlist['incr'][1]

    bminor = headerlist['restoringbeam']['minor']['value']
    bmajor = headerlist['restoringbeam']['major']['value']
    ang = headerlist['restoringbeam']['positionangle']['value']

    beamsize = np.pi*bminor*bmajor/(4*np.log(2)) #in arcsec^2

    print "The beam shape is %.3f, %.3f, %.3f" % (bmajor, bminor, ang)

    cellsize = headerlist['incr'][1]*206265 #in arcsec
    
    print "The image size is %d pixels" % imgdata.shape[0]


    #background calculations
    bg1 = imgdata[50:xpix-50 ,50:150]
    bg2 = imgdata[50:xpix - 50, ypix - 150:ypix - 50]
    bgmean = 0.5*(np.mean(bg1)+np.mean(bg2))

    rms = np.sqrt(np.sum((bg1-bgmean)**2+(bg2-bgmean)**2)/(float(bg1.size)+float(bg2.size)))
    print "The image rms is %.3e" % rms

    #m = moments(image = imgdata[xpix/2-75:xpix/2+75,ypix/2-75:ypix/2+75], order = 1)

    #print "The image centroid is (%.3f, %.3f)" % (xpix/2-75+m[0,1]/m[0,0], ypix/2-75+m[1,0]/m[0,0])

    m = moments(image = imgdata[x0:x1,y0:y1], order = 1)

    print "The image centroid is (%.3f, %.3f)" % (x0+m[0,1]/m[0,0], y0+m[1,0]/m[0,0])
    centroidRA = (RA0+deltaRA*(x0+m[0,1]/m[0,0]-xcen))*180/np.pi*1/15.
    centroidDEC = (DEC0+deltaDEC*(y0+m[1,0]/m[0,0]-ycen))*180/np.pi
    ra = np.zeros(3)
    ra[0] = int(centroidRA)
    ra[1] = np.abs(int((centroidRA - ra[0])*60))
    ra[2] = (np.abs(centroidRA)-np.abs(ra[0])-ra[1]/60.)*3600
    dec = np.zeros(3)
    dec[0] = int(centroidDEC)
    dec[1] = np.abs(int((centroidDEC - dec[0])*60))
    dec[2] = (np.abs(centroidDEC)-np.abs(dec[0])-dec[1]/60.)*3600
    print "The coordinates are RA %d:%d:%.3f, DEC %d:%d:%.3f" % (ra[0], ra[1], ra[2], dec[0], dec[1], dec[2])
Esempio n. 36
0
def test_moments_central_deprecated():
    image = np.zeros((20, 20), dtype=np.double)
    image[5:-5, 5:-5] = np.random.random((10, 10))
    center = moments(image, 1)[[1, 0], [0, 1]]
    cr, cc = center
    with expected_warnings(['deprecated 2D-only']):
        mu0 = moments_central(image, cr, cc)
        mu1 = moments_central(image, cr=cr, cc=cc)
    mu_ref = moments_central(image, center)
    assert_almost_equal(mu0.T, mu_ref)
    assert_almost_equal(mu1.T, mu_ref)
Esempio n. 37
0
def _irafstarfind_moments(imgcutout, kernel, sky):
    """
    Find the properties of each detected source, as defined by IRAF's
    ``starfind``.

    Parameters
    ----------
    imgcutout : `_ImgCutout`
        The image cutout for a single detected source.

    kernel : `_FindObjKernel`
        The convolution kernel.  The dimensions should match those of
        ``imgcutout``.  ``kernel.gkernel`` should have a peak pixel
        value of 1.0 and not contain any masked pixels.

    sky : float
        The local sky level around the source.

    Returns
    -------
    result : dict
        A dictionary of the object parameters.
    """

    from skimage.measure import moments, moments_central

    result = defaultdict(list)
    img = np.array((imgcutout.data - sky) * kernel.mask)
    img = np.where(img > 0, img, 0)    # starfind discards negative pixels
    if np.count_nonzero(img) <= 1:
        return {}
    m = moments(img, 1)
    result['xcentroid'] = m[1, 0] / m[0, 0]
    result['ycentroid'] = m[0, 1] / m[0, 0]
    result['npix'] = float(np.count_nonzero(img))   # float for easier testing
    result['sky'] = sky
    result['peak'] = np.max(img)
    flux = img.sum()
    result['flux'] = flux
    result['mag'] = -2.5 * np.log10(flux)
    mu = moments_central(
        img, result['ycentroid'], result['xcentroid'], 2) / m[0, 0]
    musum = mu[2, 0] + mu[0, 2]
    mudiff = mu[2, 0] - mu[0, 2]
    result['fwhm'] = 2.0 * np.sqrt(np.log(2.0) * musum)
    result['sharpness'] = result['fwhm'] / kernel.fwhm
    result['roundness'] = np.sqrt(mudiff**2 + 4.0*mu[1, 1]**2) / musum
    pa = 0.5 * np.arctan2(2.0 * mu[1, 1], mudiff) * (180.0 / np.pi)
    if pa < 0.0:
        pa += 180.0
    result['pa'] = pa
    result['xcentroid'] += imgcutout.x0
    result['ycentroid'] += imgcutout.y0
    return result
Esempio n. 38
0
def test_moments_central_deprecated():
    image = np.zeros((20, 20), dtype=np.double)
    image[5:-5, 5:-5] = np.random.random((10, 10))
    center = moments(image, 1)[[1, 0], [0, 1]]
    cr, cc = center
    with expected_warnings(['deprecated 2D-only']):
        mu0 = moments_central(image, cr, cc)
        mu1 = moments_central(image, cr=cr, cc=cc)
    mu_ref = moments_central(image, center)
    assert_almost_equal(mu0.T, mu_ref)
    assert_almost_equal(mu1.T, mu_ref)
Esempio n. 39
0
def main(raw_images,ntrans,Phi,coords_r,coords_c):

    import w_subimg

    lines = ntrans * ['line_id']
    epoch = Phi
    nepoch = len(Phi)

    # centroid position for each transition and epoch and blob
    # (middle col refers to the blobs and last col refers to x,y)
    centroid_array = np.zeros((ntrans*nepoch,2,2))
    flux_array = np.zeros((ntrans*nepoch,2))

    for j in range(2):

        coord1_r, coord2_r = coords_r[2*j:2*(j+1)]
        coord1_c, coord2_c = coords_c[2*j:2*(j+1)]
        for i in range(ntrans*nepoch):

            if raw_images[i][1] == 'zero.fits':
                raw_images[i][2] = raw_images[i][2] * 0

            # call the sub-image function
            subimg = w_subimg.main(raw_images[i][2],raw_images[i][5],coord1_r,coord1_c,coord2_r,coord2_c)

            subim_r, extent = subimg[0:2]
            row1, row2, col1, col2 = subimg[2:]

            # extracting the subimage
            img = subim_r.astype('double')

            if raw_images[i][1] != 'zero.fits':
                # pos = feature.blob_dog(img, threshold=1e-14, min_sigma=1.5, max_sigma=2.0, overlap=0.5)
                pos = feature.peak_local_max(img, threshold_abs=1e-14, min_distance=1)
                mom = measure.moments(img)
                '''
                The following properties can be calculated from raw image moments:
                Area as: m[0, 0].
                Centroid as: {m[0, 1] / m[0, 0], m[1, 0] / m[0, 0]}.
                '''
                centroid = [mom[0,1] / mom[0,0], mom[1,0] / mom[0,0]]
                # centroid2 = get_centroid(img)
                # center of img
                center = (((raw_images[i][2]).shape)[0] - 1) / 2
                cent_col = (center - (col1+centroid[1])) * raw_images[i][5]
                cent_row = (row1+centroid[0] - center) * raw_images[i][5]
                centroid_array[i,j,:] = [cent_row,cent_col]
                # *REMOVING* THE FLUX CONSERVATION *PER PIXEL* CORRECTION FACTOR
                flux_array[i,j] = np.sum(img) * (raw_images[i][5] / 0.1)**2
                if pos.shape[0] != 0:
                    pos_col = (center - (col1+pos[:,1]).flatten()) * raw_images[i][5]
                    pos_row = ((row1+pos[:,0]).flatten() - center) * raw_images[i][5]

    return centroid_array
Esempio n. 40
0
def extract_features(img):
    # This function extract our features out of an image. It basically
    # computes the 8 (and not 7) Hu geometrical moments. To do this we
    # first compute the moments, centralize and normalize them before
    # computing Hu moments
    m = moments(img)
    cr = m[0,1] / m[0,0]
    cc = m[1,0] / m[0,0]
    mc = moments_central(img, cr, cc)
    mn = moments_normalized(mc)
    hu = moments_hu(mn)
    i8 = mn[1, 1] * ( (mn[3, 0] + mn[1, 2])**2 - (mn[0,3]+mn[2,1])**2 ) - (mn[2,0] - mn[0,2]) * (mn[3,0] + mn[1,2]) * (mn[0,3] + mn[2,1])
    return append(hu, [i8])
Esempio n. 41
0
def centroid_com(data, mask=None):
    """
    Calculate the centroid of a 2D array as its "center of mass"
    determined from image moments.

    Invalid values (e.g. NaNs or infs) in the ``data`` array are
    automatically masked.

    Parameters
    ----------
    data : array_like
        The 2D array of the image.

    mask : array_like (bool), optional
        A boolean mask, with the same shape as ``data``, where a `True`
        value indicates the corresponding element of ``data`` is masked.

    Returns
    -------
    centroid : `~numpy.ndarray`
        The ``x, y`` coordinates of the centroid.
    """

    from skimage.measure import moments

    data = np.ma.asanyarray(data)

    if mask is not None and mask is not np.ma.nomask:
        mask = np.asanyarray(mask)
        if data.shape != mask.shape:
            raise ValueError('data and mask must have the same shape.')
        data.mask |= mask

    if np.any(~np.isfinite(data)):
        data = np.ma.masked_invalid(data)
        warnings.warn('Input data contains input values (e.g. NaNs or infs), '
                      'which were automatically masked.', AstropyUserWarning)

    # Convert the data to a float64 (double) `numpy.ndarray`,
    # which is required for input to `skimage.measure.moments`.
    # Masked values are set to zero.
    data = data.astype(np.float)
    data.fill_value = 0.
    data = data.filled()

    m = moments(data, 1)
    xcen = m[1, 0] / m[0, 0]
    ycen = m[0, 1] / m[0, 0]

    return np.array([xcen, ycen])
Esempio n. 42
0
    def momentos_hu(self):
        """
            Calcula os 7 momentos de Hu

        """

        m = measure.moments(self.imagemTonsDeCinza)

        row = m[0, 1] / m[0, 0]
        col = m[1, 0] / m[0, 0]

        mu = measure.moments_central(self.imagemTonsDeCinza,row,col)
        nu = measure.moments_normalized(mu)
        hu = measure.moments_hu(nu)

        valores = list(hu)

        nomes = [m+n for m,n in zip(['hu_'] * len(valores),map(str,range(0,len(valores))))]

        tipos = [numerico] * len(nomes)

        return nomes, tipos, valores
Esempio n. 43
0
    def get_moments(self):
        """
        Return moments from frame

        Returns
        -------
        moments : pandas Series object with the following keys
                  - m10 : row position of centroid
                  - m01 : col position of centroid
                  - mupr20 : higher moments
                  - mupr02 : higher moments
                  - mupr11 : higher moments
        """
        frame = ma.masked_invalid(self)
        frame.fill_value = 0
        frame = frame.filled()
        frame *= self.ap_weights # Multiply frame by aperture weights

        # Compute the centroid
        m = measure.moments(frame)
        m10=m[1,0]
        m01=m[0,1]
        moments = np.array([m10,m01])
        moments /= m[0,0]

        # Compute central moments (second order)
        mu = measure.moments_central(frame,moments[0],moments[1])
        
        mupr20 = mu[2,0]
        mupr02 = mu[0,2]
        mupr11 = mu[1,1]

        c_moments = np.array([mupr20,mupr02,mupr11])
        c_moments/=mu[0,0]

        moments = np.hstack([moments,c_moments])
        return moments
Esempio n. 44
0
def shape_params(data, data_mask=None):
    """
    Calculate the centroid and shape parameters for an object using
    image moments.

    Parameters
    ----------
    data : array_like
        The 2D image data.

    data_mask : array_like, bool, optional
        A boolean mask with the same shape as ``data``, where a `True`
        value indicates the corresponding element of ``data`` is
        invalid.

    Returns
    -------
    dict :  A dictionary containing the object shape parameters:

        * ``xcen, ycen``: object centroid (zero-based origin).
        * ``major_axis``: length of the major axis
        * ``minor_axis``: length of the minor axis
        * ``eccen``: eccentricity.  The ratio of half the distance
          between its two ellipse foci to the length of the the
          semimajor axis.
        * ``pa``: position angle of the major axis.  Increases
          clockwise from the positive x axis.
        * ``covar``: corresponding covariance matrix for a 2D Gaussian
        * ``linear_eccen`` : linear eccentricity is the distance between
          the object center and either of its two ellipse foci.
    """
    from skimage.measure import moments, moments_central

    if data_mask is not None:
        if data.shape != data_mask.shape:
            raise ValueError('data and data_mask must have the same shape')
        data[data_mask] = 0.

    result = {}
    xcen, ycen = centroid_com(data)
    m = moments(data, 1)
    mu = moments_central(data, ycen, xcen, 2) / m[0, 0]
    result['xcen'] = xcen
    result['ycen'] = ycen
    # musum = mu[2, 0] + mu[0, 2]
    mudiff = mu[2, 0] - mu[0, 2]
    pa = 0.5 * np.arctan2(2.0*mu[1, 1], mudiff) * (180.0 / np.pi)
    if pa < 0.0:
        pa += 180.0
    result['pa'] = pa
    covar = np.array([[mu[2, 0], mu[1, 1]], [mu[1, 1], mu[0, 2]]])
    result['covar'] = covar
    eigvals, eigvecs = np.linalg.eigh(covar)
    majsq = np.max(eigvals)
    minsq = np.min(eigvals)
    result['major_axis'] = np.sqrt(majsq)
    result['minor_axis'] = np.sqrt(minsq)
    # if True:   # equivalent calculation
    #     tmp = np.sqrt(4.0*mu[1,1]**2 + mudiff**2)
    #     majsq = 0.5 * (musum + tmp)
    #     minsq = 0.5 * (musum - tmp)
    #     result['major_axis2'] = np.sqrt(majsq)
    #     result['minor_axis2'] = np.sqrt(minsq)
    result['eccen'] = np.sqrt(1.0 - (minsq / majsq))
    result['linear_eccen'] = np.sqrt(majsq - minsq)
    return result
Esempio n. 45
0
NewXs=np.arange(PosMinCtr-ExtSrch,PosMinCtr+ExtSrch,10)
#[a,b,c]=np.polyfit(FitXs,FitPnts,2)
#Xmax=

FitPnts=SegmntSMPadd[PosMaxCtr-ExtSrch:PosMaxCtr+ExtSrch]



Col=ss.medfilt(ZProjLog.max(axis=0),25)
MaxPos=Col.argmax()
scipy.misc.imsave('ZProjLog.jpg', ZProjLog)
scipy.misc.imsave('ZProj.jpg', ZProj)
#HoughRadii=np.arange(MaxPos-MaxHough,MaxPos+MaxHough,5)
#hough_res = hough_circle(ZProj, HoughRadii)

M=meas.moments(ZProj.astype('uint8'))
cx = M[0, 1] / M[0, 0]
cy = M[1, 0] / M[0, 0]
[cxc,cyc]=ZProj.shape
yCorr=int(cy-cyc/2)
xCorr=int(cx-cxc/2)
ZProj1=np.roll(ZProj,-xCorr,axis=0)
ZProj2=np.roll(ZProj1,-yCorr,axis=1)
pp.matshow(ZProj2)




Col=ss.medfilt(Img.max(axis=0),5)
Row=ss.medfilt(Img.max(axis=1),5)
MR=np.median(Row)
Esempio n. 46
0
 def geom_moments_sk(self, order):
     return measure.moments(self.image, order=order)
imshow(imgLabel, cmap = cm.Greys_r, interpolation = 'none')


# Assignment 4 - Morphology # - not done yet
img = array(Image.open('figure_problem_set_4.tiff'))
img = 1 * (img > 1)
imshow(img, cmap = cm.Greys_r, interpolation = 'none')
equivTable, imgLabel = regionLabel(img)
imshow(imgLabel, cmap = cm.Greys_r, interpolation = 'none')


# HOMEWORK 6
# Assignment 3 - calculate moments
img = skimageIO.imread('img_moment.tif').astype(float)
imshow(img, cmap = cm.Greys_r, interpolation = 'none')
moments_deg1 = skimageMeasure.moments(img, order = 1)
x0 = moments_deg1[0, 1] / moments_deg1[0, 0]
y0 = moments_deg1[1, 0] / moments_deg1[0, 0]
momentsCentral_deg2 = skimageMeasure.moments_central(img, x0, y0, order = 2)
m00 = momentsCentral_deg2[0, 0]
m11 = momentsCentral_deg2[1, 1]
m02 = momentsCentral_deg2[0, 2]
m20 = momentsCentral_deg2[2, 0]
orientation = degrees(arctan2(2 * m11, (m20 - m02)) / 2)

# HOUGH TRANSFORM HOMEWORK
img = skimageIO.imread('img_hough_circle.tiff').astype(float)

# Enhance edges by Sobel operator
h1 = array([[1, 2, 1], [0, 0, 0], [-1, -2, -1]])
imgH1 = convolve2d(img, h1)
 def compute_hu_moments(i):
     b = cells_aligned_padded[i].astype(np.uint8)
     m = moments(b, order=1)
     hu = moments_hu(moments_normalized(moments_central(b, cc=m[0,1]/m[0,0], cr=m[1,0]/m[0,0])))
     return hu
Esempio n. 49
0
def shape_params(data, mask=None):
    """
    Calculate the centroid and shape parameters of a 2D array (e.g., an
    image cutout of an object) using image moments.

    Parameters
    ----------
    data : array_like or `~astropy.nddata.NDData`
        The 2D array of the image.

    mask : array_like, bool, optional
        A boolean mask with the same shape as ``data``, where a `True`
        value indicates the corresponding element of ``data`` is
        invalid.  If ``mask`` is input it will override ``data.mask``
        for `~astropy.nddata.NDData` inputs.

    Returns
    -------
    params : dict
        A dictionary containing the object shape parameters:

        * ``xcen, ycen``: The object centroid (zero-based origin).
        * ``major_axis``: The length of the major axis of the ellipse
          that has the same second-order moments as the input image.
        * ``minor_axis``: The length of the minor axis of the ellipse
          that has the same second-order moments as the input image.
        * ``eccen``: The eccentricity of the ellipse that has the same
          second-order moments as the input image.  The eccentricity is
          the ratio of half the distance between the two ellipse foci to
          the length of the semimajor axis.
        * ``angle``: Angle in radians between the positive x axis and
          the major axis of the ellipse that has the same second-order
          moments as the input image.  The angle increases
          counter-clockwise.
        * ``covar``: The covariance matrix of the ellipse that has the
          same second-order moments as the input image.
        * ``linear_eccen`` : The linear eccentricity of the ellipse that
          has the same second-order moments as the input image.  Linear
          eccentricity is the distance between the ellipse center and
          either of its two foci.
    """

    from skimage.measure import moments, moments_central
    data = _convert_image(data, mask=mask)
    xcen, ycen = centroid_com(data)
    m = moments(data, 1)
    mu = moments_central(data, ycen, xcen, 2) / m[0, 0]
    result = {}
    result['xcen'] = xcen
    result['ycen'] = ycen
    mudiff = mu[2, 0] - mu[0, 2]
    angle = 0.5 * np.arctan2(2.0 * mu[1, 1], mudiff) * (180.0 / np.pi)
    if angle < 0.0:
        angle += np.pi
    result['angle'] = angle
    covar = np.array([[mu[2, 0], mu[1, 1]], [mu[1, 1], mu[0, 2]]])
    result['covar'] = covar
    eigvals, eigvecs = np.linalg.eigh(covar)
    majsq = np.max(eigvals)
    minsq = np.min(eigvals)
    result['major_axis'] = np.sqrt(majsq)
    result['minor_axis'] = np.sqrt(minsq)
    # equivalent calculation of major/minor axes:
    #     tmp = np.sqrt(4.0*mu[1,1]**2 + mudiff**2)
    #     musum = mu[2, 0] + mu[0, 2]
    #     majsq = 0.5 * (musum + tmp)
    #     minsq = 0.5 * (musum - tmp)
    #     result['major_axis2'] = np.sqrt(majsq)
    #     result['minor_axis2'] = np.sqrt(minsq)
    result['eccen'] = np.sqrt(1.0 - (minsq / majsq))
    result['linear_eccen'] = np.sqrt(majsq - minsq)
    return result
Esempio n. 50
0
def position(data, coords_row, coords_col):

    import numpy as np
    import w_subimg
    import copy
    from skimage import measure
    from skimage import feature

    ntrans = len(np.unique(data['lineID']))
    nepoch = len(np.unique(data['JD']))
    Phi = np.unique(data['Phi'])

    # http://www.python-course.eu/passing_arguments.php
    # https://jeffknupp.com/blog/2012/11/13/is-python-callbyvalue-or-callbyreference-neither/
    dic = copy.deepcopy(data) # <- THIS IS VERY IMPORTANT! KEEP copy.deepcopy()!
    dic['coords'] = [] # new key: centroid coordinates (x,y)
    dic['dist'] = [] # new key: centroid position
    dic['pa'] = [] # new key: position angle E from N

    raw_images = [
                    data['fileName'],
                    data['image'],
                    data['pixScale']
                ]

    for j in range(ntrans):

        for i in range(nepoch):

            index = i + j * nepoch

            # print(j, i, index, raw_images[0][index], coords_row, coords_col)

            if raw_images[0][index] == 'zero.fits':
                raw_images[1][index] = raw_images[1][index] * 0

            # call the sub-image function
            subimg = w_subimg.main(raw_images[1][index],
                                   raw_images[2][index],
                                   coords_row[0], coords_col[0],
                                   coords_row[1], coords_col[1])

            subim_r, extent = subimg[0:2]
            row1, row2, col1, col2 = subimg[2:]

            # extracting the subimage
            img = subim_r.astype('double')

            mom = measure.moments(img)
            '''
            The following properties can be calculated from raw image moments:
            Area as: m[0, 0].
            Centroid as: {m[0, 1] / m[0, 0], m[1, 0] / m[0, 0]}.
            '''
            centroid = [mom[0,1] / mom[0,0], mom[1,0] / mom[0,0]]
            # centroid2 = get_centroid(img)
            # center of img
            center = (((raw_images[1][index]).shape)[0] - 1) / 2
            cent_col = (center - (col1+centroid[1])) * raw_images[2][index]
            cent_row = (row1+centroid[0] - center) * raw_images[2][index]

            # position angle measured E from N
            dist = np.sqrt(cent_col**2 + cent_row**2)
            pa = (2. * np.pi - np.arctan(cent_col/cent_row)) / np.pi * 180.

            dic["dist"].append(dist)
            dic["coords"].append([cent_row,cent_col])
            dic["pa"].append(pa)

    return dic
Esempio n. 51
0
    def estimate_params(self, detrenddata=False):
        """
        Estimate the parameters that best model the data using it's moments

        Parameters
        ----------
        detrenddata : bool
            a keyword that determines whether data should be detrended first.
            Detrending takes *much* longer than not. Probably only useful for
            large fields of view.

        Returns
        -------
        params : array_like
            params[0] = amp
            params[1] = x0
            params[2] = y0
            params[3] = sigma_x
            params[4] = sigma_y
            params[5] = rho
            params[6] = offset

        Notes
        -----
        Bias is removed from data using detrend in the util module.
        """

        # initialize the parameter array
        params = np.zeros(7)
        # iterate at most 10 times
        for i in range(10):
            # detrend data
            if detrenddata:
                # only try to remove a plane, any more should be done before
                # passing object instatiation.
                data, bg = detrend(self._data.copy(), degree=1)
                offset = bg.mean()
                amp = data.max()
            else:
                data = self._data.astype(float)
                offset = data.min()
                amp = data.max() - offset

            # calculate the moments up to second order
            M = moments(data, 2)

            # calculate model parameters from the moments
            # https://en.wikipedia.org/wiki/Image_moment# Central_moments
            xbar = M[1, 0] / M[0, 0]
            ybar = M[0, 1] / M[0, 0]
            xvar = M[2, 0] / M[0, 0] - xbar**2
            yvar = M[0, 2] / M[0, 0] - ybar**2
            covar = M[1, 1] / M[0, 0] - xbar * ybar

            # place the model parameters in the return array
            params[:3] = amp, xbar, ybar
            params[3] = np.sqrt(np.abs(xvar))
            params[4] = np.sqrt(np.abs(yvar))
            params[5] = covar / np.sqrt(np.abs(xvar * yvar))
            params[6] = offset

            if abs(params[5]) < 1 or not detrenddata:
                # if the rho is valid or we're not detrending data,
                # break the loop.
                break

        # save estimate for later use
        self._guess_params = params
        # return parameters to the caller as a `copy`, we don't want them to
        # change the internal state
        return params.copy()
Esempio n. 52
0
File: uns.py Progetto: vassiliou/uns
    def properties(self):
        """Return a set of metrics on the masks with units of distance """
        imgH, imgW = self.image.shape  # Image height, width

        # Don't overemphasize one dimension over the other by setting the max
        # dimenstion to equal 1
        imgL = np.max([imgH, imgW])
        imgA = imgH * imgW  # Total number of pixels
        if self._properties is None:
            # Must load contour into single variable before checking self.hasmask
            # If mask exists, only then can we access x,y components of contour
            C = self.contour
            if self.hasmask:
                D = {}
                D['hasmask'] = True


                # Area metric is normalize to number of image pixels.  Sqrt
                # converts units to distance
                D['maskarea'] = np.sqrt(np.count_nonzero(self.image)/imgA)
                # Contour-derived values
                x, y = C
                D['contxmin'] = np.min(x)/imgL
                D['contxmax'] = np.max(x)/imgL
                D['contymin'] = np.min(y)/imgL
                D['contymax'] = np.max(y)/imgL

                D['contW'] = D['contxmax'] - D['contxmin']
                D['contH'] = D['contymax'] - D['contymin']

                # Image moments
                m = measure.moments(self.image, order=5)
                D['moments'] = m
                D['centrow'] = (m[0, 1]/m[0, 0])/imgL
                D['centcol'] = (m[1, 0]/m[0, 0])/imgL

                # Hu, scale, location, rotation invariant (7, 1)
                mHu = measure.moments_hu(m)
                for i, Ii in enumerate(mHu):
                    D['moment_hu_I{}'.format(i)] = Ii

                # Contour SVD is converted to two coordinates
                # First normalize and centre the contours
                D['contour'] = self.contour

                contour = (self.contour.T/imgL - [D['centrow'], D['centcol']]).T
                D['unitcontour'] = contour

                _, s, v = np.linalg.svd(contour.T)

                D['svd'] = s*v
                D['svdx0'] = D['svd'][0,0]
                D['svdx1'] = D['svd'][0,1]
                D['svdy0'] = D['svd'][1,0]
                D['svdy1'] = D['svd'][1,1]

                # Width by medial axis
                skel, distance = morphology.medial_axis(self.image,
                                                        self.image,
                                                        return_distance=True)
                self.skel = skel
                self.distance = distance

                #
                D['skelpixels'] = np.sqrt((np.sum(skel)/imgA))  # number of pixels

                # distances should be restricted to within mask to avoid over-
                # counting the zeros outside the mask
                distances = distance[self.image>0]/imgL
                q = [10, 25, 50, 75, 90]
                keys = ['skeldist{:2d}'.format(n) for n in q]
                vals = np.percentile(distances, q)
                D.update(dict(zip(keys, vals)))
                D['skelavgdist'] = np.mean(distances)
                D['skelmaxdist'] = np.max(distances)

                self._properties = D
        return self._properties
Esempio n. 53
0
    def moments(self):
        """Spatial moments up to 3rd order of the source."""

        from skimage.measure import moments
        return moments(self._data_cutout_maskzeroed_double, 3)
Esempio n. 54
0
def hu_feature(gray):

    moments = measure.moments(gray)
    hu = measure.moments_hu(moments)

    return normalize(hu)
Esempio n. 55
0
 def moments(self, order=3):
     '''see skimage.measure.moments'''
     return measure.moments(self, order)
Esempio n. 56
0
def get_centroid(image):
    m = measure.moments(image)
    return m[0, 1] / m[0, 0], m[1, 0] / m[0, 0]
def getHuMoments(image):
    img = image.copy()
    return measure.moments_hu(measure.moments(img))